lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/web/websocket.rs
alecdwm/webtron
ba90a9e5d7d388dbe93228eceb1cfc656016288c
use futures::sink::{Sink, SinkExt}; use futures::stream::{Stream, StreamExt}; use log::{debug, error, trace, warn}; use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use tokio::select; use tokio::sync::mpsc; use tokio::sync::mpsc::{Receiver, Sender}; use tokio::sync::Mutex; use tokio::time; use warp::reply::Reply; use warp::ws::{Message, WebSocket, Ws}; use crate::server::{ClientId, MessageIn, MessageOut}; const PING_RATE_SECONDS: u64 = 15; pub fn websocket( ws: Ws, ip_address: Option<SocketAddr>, server_tx: Sender<MessageIn>, ) -> impl Reply { let ip_address = ip_address.map(|ip_address| format!("{}", ip_address)); ws.on_upgrade(|websocket| handle_websocket(websocket, ip_address, server_tx)) } async fn handle_websocket( websocket: WebSocket, ip_address: Option<String>, mut server_tx: Sender<MessageIn>, ) { let id = ClientId::default(); let (messages_tx, messages_rx) = mpsc::channel::<MessageOut>(100); let (ws_tx, ws_rx) = websocket.split(); let ws_tx = Arc::new(Mutex::new(ws_tx)); if let Err(error) = server_tx .send(MessageIn::connect(id, ip_address, messages_tx)) .await { error!("Failed to send new client to server: {}", error); return; } let in_task = tokio::spawn(handle_in(id, ws_rx, server_tx.clone())); let out_task = tokio::spawn(handle_out(messages_rx, ws_tx.clone())); let ping_task = tokio::spawn(handle_ping(ws_tx)); if let Err(error) = select! { out = in_task => out, out = out_task => out, out = ping_task => out, } { error!("Failure occurred while handling websocket: {}", error); } if let Err(error) = server_tx.send(MessageIn::disconnect(id)).await { error!("Failed to send client disconnect to server: {}", error); } } async fn handle_in( id: ClientId, mut rx: impl Stream<Item = Result<Message, warp::Error>> + Unpin, mut tx: Sender<MessageIn>, ) { debug!("Websocket handler (in) created"); while let Some(message) = rx.next().await { let message = match message { Ok(message) => message, Err(error) => { error!("Error occurred in incoming message: {}", error); break; } }; if message.is_close() { trace!("Close received: {:?}", message); break; } let text = match message.to_str() { Ok(text) => text, Err(()) => { trace!("Non-text message received: {:?}", message); continue; } }; trace!("Text message received: {}", text); let message = match MessageIn::from_json(id, text) { Ok(message) => message, Err(error) => { warn!("Failed to parse incoming message ({}): {}", text, error); continue; } }; tx.send(message) .await .unwrap_or_else(|error| error!("Failed to send incoming message to server: {}", error)) } debug!("Websocket handler (in) closed"); } async fn handle_out(mut rx: Receiver<MessageOut>, tx: Arc<Mutex<impl Sink<Message> + Unpin>>) { debug!("Websocket handler (out) created"); while let Some(message) = rx.recv().await { let text = match message.to_json() { Ok(text) => text, Err(error) => { error!( "Failed to serialize outgoing message: ({:?}): {}", message, error ); continue; } }; if tx.lock().await.send(Message::text(text)).await.is_err() { error!("Failed to send outgoing message") } } debug!("Websocket handler (out) closed"); } async fn handle_ping(tx: Arc<Mutex<impl Sink<Message> + Unpin>>) { debug!("Websocket handler (ping) created"); let mut interval = time::interval(Duration::from_secs(PING_RATE_SECONDS)); loop { interval.tick().await; if tx .lock() .await .send(Message::ping(Vec::new())) .await .is_err() { error!("Failed to send outgoing ping"); break; } } debug!("Websocket handler (ping) closed"); }
use futures::sink::{Sink, SinkExt}; use futures::stream::{Stream, StreamExt}; use log::{debug, error, trace, warn}; use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use tokio::select; use tokio::sync::mpsc; use tokio::sync::mpsc::{Receiver, Sender}; use tokio::sync::Mutex; use tokio::time; use warp::reply::Reply; use warp::ws::{Message, WebSocket, Ws}; use crate::server::{ClientId, MessageIn, MessageOut}; const PING_RATE_SECONDS: u64 = 15; pub fn websocket( ws: Ws, ip_address: Option<SocketAddr>, server_tx: Sender<MessageIn>, ) -> impl Reply { let ip_address = ip_address.map(|ip_address| format!("{}", ip_address)); ws.on_upgrade(|websocket| handle_websocket(websocket, ip_address, server_tx)) } async fn handle_websocket( websocket: WebSocket, ip_address: Option<String>, mut server_tx: Sender<MessageIn>, ) { let id = ClientId::default(); let (messages_tx, messages_rx) = mpsc::channel::<MessageOut>(100); let (ws_tx, ws_rx) = websocket.split(); let ws_tx = Arc::new(Mutex::new(ws_tx)); if let Err(error) = server_tx .send(MessageIn::connect(id, ip_address, messages_tx)) .await { error!("Failed to send new client to server: {}", error); return; } let in_task = tokio::spawn(handle_in(id, ws_rx, server_tx.clone())); let out_task = tokio::spawn(handle_out(messages_rx, ws_tx.clone())); let ping_task = tokio::spawn(handle_ping(ws_tx)); if let Err(error) = select! { out = in_task => out, out = out_task => out, out = ping_task => out, } { error!("Failure occurred while handling websocket: {}", error); } if let Err(error) = server_tx.send(MessageIn::disconnect(id)).await { error!("Failed to send client disconnect to server: {}", error); } } async fn handle_in( id: ClientId, mut rx: impl Stream<Item = Result<Message, warp::Error>> + Unpin, mut tx: Sender<MessageIn>, ) { debug!("Websocket handler (in) created"); while let Some(message) = rx.next().await { let message = match message { Ok(message) => message, Err(error) => { error!("Error occurred in incoming message: {}", error); break; } }; if message.is_close() { trace!("Close received: {:?}", message); break; } let text = match message.to_str() { Ok(text) => text, Err(()) => { trace!("Non-text message received: {:?}", message); continue; } }; trace!("Text message received: {}", text); let message = match Messag
async fn handle_out(mut rx: Receiver<MessageOut>, tx: Arc<Mutex<impl Sink<Message> + Unpin>>) { debug!("Websocket handler (out) created"); while let Some(message) = rx.recv().await { let text = match message.to_json() { Ok(text) => text, Err(error) => { error!( "Failed to serialize outgoing message: ({:?}): {}", message, error ); continue; } }; if tx.lock().await.send(Message::text(text)).await.is_err() { error!("Failed to send outgoing message") } } debug!("Websocket handler (out) closed"); } async fn handle_ping(tx: Arc<Mutex<impl Sink<Message> + Unpin>>) { debug!("Websocket handler (ping) created"); let mut interval = time::interval(Duration::from_secs(PING_RATE_SECONDS)); loop { interval.tick().await; if tx .lock() .await .send(Message::ping(Vec::new())) .await .is_err() { error!("Failed to send outgoing ping"); break; } } debug!("Websocket handler (ping) closed"); }
eIn::from_json(id, text) { Ok(message) => message, Err(error) => { warn!("Failed to parse incoming message ({}): {}", text, error); continue; } }; tx.send(message) .await .unwrap_or_else(|error| error!("Failed to send incoming message to server: {}", error)) } debug!("Websocket handler (in) closed"); }
function_block-function_prefixed
[ { "content": "pub fn embed() -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {\n\n let index = warp::path::end().and_then(serve_index);\n\n let path = warp::path::tail().and_then(serve_path);\n\n\n\n index.or(path)\n\n}\n\n\n\nasync fn serve_index() -> Result<impl Reply, Rejection> {\n\n serve(\"index.html\")\n\n}\n\n\n\nasync fn serve_path(path: Tail) -> Result<impl Reply, Rejection> {\n\n serve(path.as_str())\n\n}\n\n\n", "file_path": "src/web/embed.rs", "rank": 0, "score": 170769.03430442675 }, { "content": "pub fn proxy(target: &str) -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {\n\n let target: Uri = target.parse().expect(\"Invalid proxy target\");\n\n let target_host = match (target.host(), target.port()) {\n\n (Some(host), Some(port)) => HeaderValue::from_str(&format!(\"{}:{}\", host, port)).ok(),\n\n (Some(host), None) => HeaderValue::from_str(host).ok(),\n\n _ => None,\n\n }\n\n .expect(\"Failed to parse target host\");\n\n\n\n let target = warp::any().map(move || (target.clone(), target_host.clone()));\n\n let empty_query = warp::any().map(move || \"\".to_owned());\n\n\n\n let ws = warp::ws()\n\n .and(warp::path::tail())\n\n .and(warp::query::raw().or(empty_query).unify())\n\n .and(warp::header::headers_cloned())\n\n .and(warp::addr::remote())\n\n .and(target.clone())\n\n .and_then(proxy_ws);\n\n\n", "file_path": "src/web/proxy.rs", "rank": 1, "score": 160977.53989705804 }, { "content": "///\n\n/// Returns a string representing the source hierarchy of an error.\n\n/// Format:\n\n///\n\n/// `\"$error: $source: $source_2: $source_3: etc\"`\n\n///\n\npub fn get_error_chain(error: Error) -> String {\n\n error\n\n .chain()\n\n .map(|f| format!(\"{}\", f))\n\n .collect::<Vec<_>>()\n\n .join(\": \")\n\n}\n", "file_path": "src/lib.rs", "rank": 3, "score": 137404.9613671886 }, { "content": "pub fn calculate_spawnpoints(player_ids: Vec<PlayerId>) -> Vec<(PlayerId, ArenaPoint, Direction)> {\n\n let mut spawnpoints: Vec<(PlayerId, ArenaPoint, Direction)> = Vec::new();\n\n let mut spawnpoints_used: Vec<usize> = Vec::new();\n\n\n\n for player_id in player_ids {\n\n // check if no spawnpoints remain\n\n if spawnpoints_used.len() >= SPAWNPOINTS.len() {\n\n error!(\"No spawnpoints remain!\");\n\n break;\n\n }\n\n\n\n // randomly select an available spawnpoint\n\n let mut selected_spawnpoint = OsRng.next_u32() as usize % SPAWNPOINTS.len();\n\n while spawnpoints_used\n\n .iter()\n\n .any(|spawnpoint| *spawnpoint == selected_spawnpoint)\n\n {\n\n selected_spawnpoint = OsRng.next_u32() as usize % SPAWNPOINTS.len();\n\n }\n\n spawnpoints_used.push(selected_spawnpoint);\n", "file_path": "src/server/arena/util.rs", "rank": 4, "score": 119423.91543928068 }, { "content": "fn insert_websocket_upgrade_headers(headers: &mut HeaderMap) {\n\n headers.insert(CONNECTION, HeaderValue::from_static(\"upgrade\"));\n\n headers.insert(UPGRADE, HeaderValue::from_static(\"websocket\"));\n\n}\n", "file_path": "src/web/proxy.rs", "rank": 5, "score": 105777.97423522169 }, { "content": "fn serve(path: &str) -> Result<impl Reply, Rejection> {\n\n let asset = Asset::get(path).ok_or_else(warp::reject::not_found)?;\n\n let mime = mime_guess::from_path(path).first_or_octet_stream();\n\n\n\n let mut res = Response::new(asset.into());\n\n res.headers_mut().insert(\n\n \"content-type\",\n\n HeaderValue::from_str(mime.as_ref())\n\n .map_err(|_| warp::reject::custom(InternalServerError))?,\n\n );\n\n\n\n Ok(res)\n\n}\n", "file_path": "src/web/embed.rs", "rank": 6, "score": 101123.82985581837 }, { "content": "const uniqueIds = {}\n", "file_path": "client/src/hooks/useUniqueId.js", "rank": 7, "score": 86570.71657079435 }, { "content": "export const SEND = 'SEND'\n", "file_path": "client/src/actions/socket.js", "rank": 8, "score": 81431.48123044703 }, { "content": "export const RECEIVE_SOCKET_MESSAGE = 'RECEIVE_SOCKET_MESSAGE'\n", "file_path": "client/src/actions/index.js", "rank": 9, "score": 80165.86089484129 }, { "content": "fn remove_hop_by_hop_headers(headers: &mut HeaderMap) {\n\n for header in HOP_BY_HOP_HEADERS {\n\n headers.remove(*header);\n\n }\n\n}\n", "file_path": "src/web/proxy.rs", "rank": 10, "score": 78762.3631040527 }, { "content": "import { useRef } from 'react'\n\n\n\nconst uniqueIds = {}\n\n\n\nexport default function useUniqueId() {\n\n const idRef = useRef(null)\n\n if (idRef.current !== null) return idRef.current\n\n\n\n idRef.current = generateId()\n\n while (uniqueIds[idRef.current] !== undefined) {\n\n idRef.current = generateId()\n\n }\n\n uniqueIds[idRef.current] = true\n\n\n\n return idRef.current\n\n}\n\n\n\nfunction generateId() {\n\n return parseInt(Math.random() * Math.pow(10, 8)).toString(16)\n\n}\n", "file_path": "client/src/hooks/useUniqueId.js", "rank": 11, "score": 67760.39064879592 }, { "content": "fn append_x_forwarded_for_header(remote_addr: Option<SocketAddr>, headers: &mut HeaderMap) {\n\n if let Some(remote_addr) = remote_addr {\n\n if let Ok(remote_addr) = HeaderValue::from_str(&remote_addr.ip().to_string()) {\n\n match headers.entry(X_FORWARDED_FOR) {\n\n Entry::Vacant(entry) => {\n\n entry.insert(remote_addr);\n\n }\n\n Entry::Occupied(mut entry) => {\n\n let existing = entry.get_mut();\n\n if let Ok(updated) = HeaderValue::from_bytes(\n\n &[existing.as_bytes(), b\", \", remote_addr.as_bytes()].concat(),\n\n ) {\n\n *existing = updated;\n\n } else {\n\n *existing = remote_addr;\n\n }\n\n }\n\n }\n\n }\n\n };\n\n}\n", "file_path": "src/web/proxy.rs", "rank": 12, "score": 66913.14650631208 }, { "content": " ArenaState(Box<Arena>),\n\n ArenaStatePatch(Vec<ArenaUpdate>),\n\n }\n\n\n\n impl Message {\n\n pub fn to_json(&self) -> Result<String, serde_json::error::Error> {\n\n serde_json::to_string(self)\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Client to server messages\n\n///\n\npub mod incoming {\n\n use anyhow::Error;\n\n use serde_derive::Deserialize;\n\n use tokio::sync::mpsc::Sender;\n\n\n\n use crate::server::{ArenaId, ClientId, Direction, MessageOut, Player};\n", "file_path": "src/server/messages.rs", "rank": 13, "score": 65786.79767498042 }, { "content": " arena_id: Option<ArenaId>,\n\n },\n\n\n\n Start,\n\n Turn(Direction),\n\n }\n\n\n\n impl Message {\n\n pub fn connect(\n\n client_id: ClientId,\n\n ip_address: Option<String>,\n\n tx: Sender<MessageOut>,\n\n ) -> Self {\n\n Self {\n\n client_id,\n\n payload: MessagePayload::Connect(ip_address, tx),\n\n }\n\n }\n\n\n\n pub fn disconnect(client_id: ClientId) -> Self {\n", "file_path": "src/server/messages.rs", "rank": 14, "score": 65784.2386146478 }, { "content": "pub use incoming::Message as MessageIn;\n\npub use incoming::MessagePayload as MessageInPayload;\n\npub use outgoing::Message as MessageOut;\n\n\n\n///\n\n/// Server to client messages\n\n///\n\npub mod outgoing {\n\n use serde_derive::Serialize;\n\n\n\n use crate::server::{Arena, ArenaId, ArenaOverview, ArenaUpdate, PlayerId};\n\n\n\n ///\n\n /// Outgoing messages\n\n ///\n\n #[derive(Debug, Clone, Serialize)]\n\n pub enum Message {\n\n ArenaList(Vec<ArenaOverview>),\n\n ArenaJoined(ArenaId, PlayerId),\n\n\n", "file_path": "src/server/messages.rs", "rank": 15, "score": 65782.59682425654 }, { "content": "\n\n ///\n\n /// Incoming messages\n\n ///\n\n #[derive(Debug)]\n\n pub struct Message {\n\n pub client_id: ClientId,\n\n pub payload: MessagePayload,\n\n }\n\n\n\n #[derive(Debug, Deserialize)]\n\n pub enum MessagePayload {\n\n #[serde(skip)]\n\n Connect(Option<String>, Sender<MessageOut>),\n\n #[serde(skip)]\n\n Disconnect,\n\n\n\n GetArenaList,\n\n Join {\n\n player: Player,\n", "file_path": "src/server/messages.rs", "rank": 16, "score": 65781.52826507641 }, { "content": " Self {\n\n client_id,\n\n payload: MessagePayload::Disconnect,\n\n }\n\n }\n\n\n\n pub fn from_json(client_id: ClientId, json: &str) -> Result<Self, Error> {\n\n Ok(Self {\n\n client_id,\n\n payload: serde_json::from_str(&json)?,\n\n })\n\n }\n\n }\n\n}\n", "file_path": "src/server/messages.rs", "rank": 17, "score": 65778.93947093602 }, { "content": "fn format_uri(target: &Uri, path: &str, query: String) -> String {\n\n let query = if !query.is_empty() {\n\n format!(\"?{}\", query)\n\n } else {\n\n query\n\n };\n\n format!(\"{}{}{}\", target, path, query)\n\n}\n", "file_path": "src/web/proxy.rs", "rank": 18, "score": 60898.98900744937 }, { "content": "fn main() {\n\n let debug = env::var(\"PROFILE\").expect(\"PROFILE env variable required\") == \"debug\";\n\n\n\n if debug {\n\n return;\n\n };\n\n\n\n let out = Command::new(\"yarn\")\n\n .current_dir(\"client\")\n\n .arg(\"install\")\n\n .status()\n\n .expect(\"Failed to fetch client dependencies\");\n\n assert!(out.success());\n\n\n\n let out = Command::new(\"yarn\")\n\n .current_dir(\"client\")\n\n .arg(\"build\")\n\n .status()\n\n .expect(\"Failed to build client\");\n\n assert!(out.success());\n\n}\n", "file_path": "build.rs", "rank": 19, "score": 54829.28162817341 }, { "content": "const createWebpackOverridePlugin = (overrideWebpackConfig) => ({ plugin: { overrideWebpackConfig } })\n", "file_path": "client/craco.config.js", "rank": 20, "score": 52484.3471515144 }, { "content": "export const RECEIVE_ARENA_STATE = 'RECEIVE_ARENA_STATE'\n", "file_path": "client/src/actions/index.js", "rank": 21, "score": 52368.46723593852 }, { "content": "export const RECEIVE_ARENA_JOINED = 'RECEIVE_ARENA_JOINED'\n", "file_path": "client/src/actions/index.js", "rank": 22, "score": 52368.46723593852 }, { "content": "export const receiveArenaState = createSimpleAction(RECEIVE_ARENA_STATE, 'state')\n", "file_path": "client/src/actions/index.js", "rank": 23, "score": 52368.46723593852 }, { "content": "export const RECEIVE_ARENA_LIST = 'RECEIVE_ARENA_LIST'\n", "file_path": "client/src/actions/index.js", "rank": 24, "score": 52368.46723593852 }, { "content": "export const StoreContext = createContext(null)\n", "file_path": "client/src/hooks/useStore.js", "rank": 25, "score": 52071.84901577442 }, { "content": "export const RECEIVE_ARENA_STATE_PATCH = 'RECEIVE_ARENA_STATE_PATCH'\n", "file_path": "client/src/actions/index.js", "rank": 26, "score": 51055.72359498222 }, { "content": "export const receiveArenaStatePatch = createSimpleAction(RECEIVE_ARENA_STATE_PATCH, 'statePatch')\n", "file_path": "client/src/actions/index.js", "rank": 27, "score": 51055.72359498222 }, { "content": "export const StoreDispatchContext = createContext(null)\n", "file_path": "client/src/hooks/useStoreDispatch.js", "rank": 28, "score": 49525.073801442726 }, { "content": "export default function createReducer(initialState, handlers) {\n\n return function reducer(state = initialState, action) {\n\n if (Object.prototype.hasOwnProperty.call(handlers, action.type)) {\n\n return handlers[action.type](state, action)\n\n } else if (Object.prototype.hasOwnProperty.call(handlers, 'default')) {\n\n return handlers['default'](state, action)\n\n } else {\n\n return state\n\n }\n\n }\n\n}\n", "file_path": "client/src/utils/createReducer.js", "rank": 29, "score": 45469.65806870087 }, { "content": "import { createContext, useContext } from 'react'\n\n\n\nexport const StoreContext = createContext(null)\n\n\n\nexport default function useStore() {\n\n const store = useContext(StoreContext)\n\n return store\n\n}\n", "file_path": "client/src/hooks/useStore.js", "rank": 30, "score": 45112.29153555287 }, { "content": "export default function createSimpleAction(type, ...argNames) {\n\n return function (...args) {\n\n const action = { type }\n\n argNames.forEach((argName, index) => {\n\n action[argName] = args[index]\n\n })\n\n return action\n\n }\n\n}\n", "file_path": "client/src/utils/createSimpleAction.js", "rank": 31, "score": 44478.869210683944 }, { "content": "import { preloadImages } from 'actions'\n\nimport useStoreDispatch from 'hooks/useStoreDispatch'\n\nimport { useEffect, useRef } from 'react'\n\n\n\nexport default function usePreloadImages(urls = []) {\n\n const dispatch = useStoreDispatch()\n\n\n\n // only want to preload on first render\n\n const urlsRef = useRef(urls)\n\n\n\n useEffect(() => {\n\n dispatch(preloadImages(urlsRef.current))\n\n }, [dispatch])\n\n}\n", "file_path": "client/src/hooks/usePreloadImages.js", "rank": 32, "score": 44129.289733658865 }, { "content": "import { useReducer } from 'react'\n\n\n\nexport default function useForceUpdate() {\n\n const [, forceUpdate] = useReducer((x) => (x + 1) % 2, 0)\n\n return forceUpdate\n\n}\n", "file_path": "client/src/hooks/useForceUpdate.js", "rank": 33, "score": 44129.289733658865 }, { "content": "import useForceUpdate from 'hooks/useForceUpdate'\n\nimport React, { forwardRef, useCallback, useEffect, useRef } from 'react'\n\n\n\n// Usage examples:\n\n//\n\n// const BoldText = useClassName('bold', 'span')\n\n// return <BoldText>This text can be styled boldly!</BoldText>\n\n//\n\n// const BoldText = useClassName('bold', 'span')\n\n// return <BoldText className=\"paragraph\">This text can _still_ be styled boldly!</BoldText>\n\n//\n\n// const ArticleContainer = useClassName('article-container')\n\n// const Title = useClassName('article-title')\n\n// const Body = useClassName('article-body')\n\n// return (\n\n// <ArticleContainer>\n\n// <Title>Lorem Ipsum</Title>\n\n// <Body>\n\n// lorem ipsum dolar sit amet...\n\n// </Body>\n\n// </Container>\n\n// )\n\n//\n\nexport default function useClassName(staticClassName, WrappedComponent = 'div') {\n\n // Resolve staticClassName to a string.\n\n // We will be doing an equality check against this string later on.\n\n // If we don't resolve it, and it therefore possibly remains as an array,\n\n // that equality check will always fail and we will be wasting render cycles.\n\n staticClassName = resolveClassName(staticClassName)\n\n\n\n // Store staticClassName in a ref so we don't need to remount the\n\n // WithClassName wrapper component when the staticClassName changes.\n\n const staticClassNameRef = useRef(staticClassName)\n\n\n\n // When staticClassName changes, update the value stored inside staticClassNameRef.\n\n staticClassNameRef.current = staticClassName\n\n\n\n // When staticClassName changes, trigger a forceUpdate in the wrapper component (if available).\n\n useEffect(() => {\n\n if (!forceUpdateRef.current) return\n\n forceUpdateRef.current()\n\n }, [staticClassName])\n\n\n\n // Store forceUpdate in a ref so we can use it outside of the\n\n // WithClassName wrapper component.\n\n const forceUpdateRef = useRef()\n\n\n\n // useCallback memoizes the WithClassName component using the `WrappedComponent` variable.\n\n // (if WrappedComponent doesn't change, the callback function is only created once).\n\n //\n\n // This stops React from replacing the entire DOM structure below and including\n\n // the WithClassName wrapper component when the WrappedComponent hasn't changed.\n\n //\n\n // eslint-disable-next-line react-hooks/exhaustive-deps\n\n const WithClassName = useCallback(\n\n // eslint-disable-next-line react/display-name\n\n forwardRef(\n\n // eslint-disable-next-line react/prop-types\n\n function WithClassName({ className: propsClassName, ...props }, ref) {\n\n const forceUpdate = useForceUpdate()\n\n\n\n // Provide the forceUpdate function to the useClassName effect hook...\n\n forceUpdateRef.current = forceUpdate\n\n\n\n // ...but remove it when we unmount this component, so that the useEffect\n\n // hook cannot call setState on an unmounted component.\n\n useEffect(() => {\n\n return () => {\n\n // Don't do anything if the forceUpdateRef.current value has changed\n\n // since we set it.\n\n if (forceUpdateRef.current !== forceUpdate) return\n\n\n\n forceUpdateRef.current = undefined\n\n }\n\n }, [forceUpdate])\n\n\n\n // Resolve propsClassName to a string.\n\n propsClassName = resolveClassName(propsClassName)\n\n\n\n // Join staticClassName with the value passed to WithClassName's className prop.\n\n // Allows for use-cases such as:\n\n //\n\n // const Item = useClassName('item')\n\n // return <Item className=\"blue\">A blue item!</Item>\n\n //\n\n const className = resolveClassName([staticClassNameRef.current, propsClassName])\n\n\n\n return React.createElement(WrappedComponent, { ...props, className, ref })\n\n },\n\n ),\n\n [WrappedComponent],\n\n )\n\n\n\n return WithClassName\n\n}\n\n\n\n// Used to resolve className arrays into className strings.\n\n// Falsy values inside the className array will be filtered out.\n\n// Some usage examples:\n\n//\n\n// resolveClassName(['one', 'two', 'three']) -> 'one two three'\n\n//\n\n// resolveClassName('four') -> 'four'\n\n//\n\n// const portraitUI = false\n\n// const errorUI = true\n\n// resolveClassName([\n\n// 'item-wrapper'\n\n// portraitUI && 'portrait-ui'\n\n// errorUI && 'error-ui'\n\n// ]) -> 'item-wrapper error-ui'\n\n//\n\nexport function resolveClassName(className) {\n\n if (!Array.isArray(className)) return className\n\n return className.filter(Boolean).join(' ')\n\n}\n", "file_path": "client/src/hooks/useClassName.js", "rank": 34, "score": 44129.289733658865 }, { "content": "import { useCallback, useEffect, useRef, useState } from 'react'\n\n\n\nexport default function useCursorBlink() {\n\n const [cursorBlink, setCursorBlink] = useState(true)\n\n const cursorBlinkTimeout = useRef()\n\n\n\n // avoid setState on unmounted components\n\n useEffect(() => () => (cursorBlinkTimeout.current = null), [])\n\n\n\n const resetCursorBlink = useCallback(() => {\n\n if (cursorBlinkTimeout.current) {\n\n window.clearTimeout(cursorBlinkTimeout.current)\n\n }\n\n setCursorBlink(false)\n\n cursorBlinkTimeout.current = window.setTimeout(() => {\n\n // avoid setState on unmounted components\n\n if (cursorBlinkTimeout.current === null) return\n\n\n\n setCursorBlink(true)\n\n }, 600)\n\n }, [])\n\n\n\n return [cursorBlink, resetCursorBlink]\n\n}\n", "file_path": "client/src/hooks/useCursorBlink.js", "rank": 35, "score": 44129.289733658865 }, { "content": "import { useCallback, useReducer, useRef } from 'react'\n\n\n\nexport default function useThunkReducer(reducer, initialState) {\n\n const [state, dispatch] = useReducer(reducer, initialState)\n\n\n\n const stateRef = useRef(state)\n\n stateRef.current = state\n\n\n\n const getState = useCallback(() => stateRef.current, [])\n\n const thunkDispatch = useCallback(\n\n (action) => (typeof action === 'function' ? action(thunkDispatch, getState) : dispatch(action)),\n\n [getState, dispatch],\n\n )\n\n\n\n return [state, thunkDispatch]\n\n}\n", "file_path": "client/src/hooks/useThunkReducer.js", "rank": 36, "score": 44129.289733658865 }, { "content": "import { useEffect } from 'react'\n\n\n\nexport default function useEventListener(event, callback, element = window) {\n\n useEffect(() => {\n\n element.addEventListener(event, callback)\n\n\n\n return () => element.removeEventListener(event, callback)\n\n }, [event, callback, element])\n\n}\n", "file_path": "client/src/hooks/useEventListener.js", "rank": 37, "score": 44129.289733658865 }, { "content": "import { createContext, useContext } from 'react'\n\n\n\nexport const StoreDispatchContext = createContext(null)\n\n\n\nexport default function useStoreDispatch() {\n\n const dispatch = useContext(StoreDispatchContext)\n\n return dispatch\n\n}\n", "file_path": "client/src/hooks/useStoreDispatch.js", "rank": 38, "score": 44129.289733658865 }, { "content": "import useRequestAnimationFrame from 'hooks/useRequestAnimationFrame'\n\nimport useStore from 'hooks/useStore'\n\nimport { useCallback, useRef, useState } from 'react'\n\n\n\nexport default function useInterpolatedLightcyclePosition(position, direction, speed, dead) {\n\n const {\n\n arena: { started },\n\n } = useStore()\n\n\n\n const basePosition = useRef(position)\n\n const basePositionSetAt = useRef(Date.now())\n\n\n\n const latestPosition = useRef(position)\n\n latestPosition.current = position\n\n\n\n const [interpolatedPosition, setInterpolatedPosition] = useState(position)\n\n\n\n const interpolate = useCallback(() => {\n\n if (!started || started.valueOf() > Date.now()) {\n\n if (started) basePositionSetAt.current = started.valueOf()\n\n return setInterpolatedPosition(latestPosition.current)\n\n }\n\n if (dead) return setInterpolatedPosition(latestPosition.current)\n\n\n\n if (basePosition.current !== latestPosition.current) {\n\n basePosition.current = latestPosition.current\n\n basePositionSetAt.current = Date.now()\n\n }\n\n\n\n const deltaTime = (Date.now() - basePositionSetAt.current) / 1000\n\n\n\n setInterpolatedPosition([\n\n basePosition.current[0] + (direction === 'left' ? -1 : direction === 'right' ? 1 : 0) * speed * deltaTime,\n\n basePosition.current[1] + (direction === 'down' ? -1 : direction === 'up' ? 1 : 0) * speed * deltaTime,\n\n ])\n\n }, [started, direction, speed, dead])\n\n\n\n useRequestAnimationFrame(interpolate)\n\n\n\n return interpolatedPosition\n\n}\n", "file_path": "client/src/hooks/useInterpolatedLightcyclePosition.js", "rank": 39, "score": 43188.21379762077 }, { "content": "import { getArenaList } from 'actions'\n\nimport useStoreDispatch from 'hooks/useStoreDispatch'\n\nimport { useEffect } from 'react'\n\n\n\nexport default function useArenaListPolling() {\n\n const dispatch = useStoreDispatch()\n\n\n\n useEffect(() => {\n\n let timeout = null\n\n const getArenaListTimeout = () => {\n\n dispatch(getArenaList())\n\n timeout = window.setTimeout(getArenaListTimeout, 2000)\n\n }\n\n getArenaListTimeout()\n\n\n\n return () => window.clearTimeout(timeout)\n\n }, [dispatch])\n\n}\n", "file_path": "client/src/hooks/useArenaListPolling.js", "rank": 40, "score": 43188.21379762077 }, { "content": "import { useEffect, useRef } from 'react'\n\n\n\nexport default function useRequestAnimationFrame(callback) {\n\n const callbackRef = useRef(null)\n\n callbackRef.current = callback\n\n\n\n useEffect(() => {\n\n const handleAnimationFrame = () => {\n\n if (callbackRef.current !== callback) return\n\n callback()\n\n requestAnimationFrame(handleAnimationFrame)\n\n }\n\n requestAnimationFrame(handleAnimationFrame)\n\n }, [callback])\n\n\n\n useEffect(() => () => (callbackRef.current = null), [])\n\n}\n", "file_path": "client/src/hooks/useRequestAnimationFrame.js", "rank": 41, "score": 43188.21379762077 }, { "content": "import { useEffect } from 'react'\n\n\n\nexport default function useAddWebtronLoadedClass() {\n\n useEffect(() => {\n\n const webtron = document.getElementById('webtron')\n\n webtron.classList.add('loaded')\n\n }, [])\n\n}\n", "file_path": "client/src/hooks/useAddWebtronLoadedClass.js", "rank": 42, "score": 42286.437472501304 }, { "content": "import { useCallback, useEffect } from 'react'\n\n\n\nexport default function usePreventWebtronContextMenu() {\n\n const preventDefault = useCallback((e) => e.preventDefault(), [])\n\n\n\n useEffect(() => {\n\n const webtron = document.getElementById('webtron')\n\n webtron.addEventListener('contextmenu', preventDefault)\n\n\n\n return () => webtron.removeEventListener('contextmenu', preventDefault)\n\n }, [preventDefault])\n\n}\n", "file_path": "client/src/hooks/usePreventWebtronContextMenu.js", "rank": 43, "score": 42286.437472501304 }, { "content": "use warp::http::StatusCode;\n\nuse warp::reject::Reject;\n\nuse warp::{Rejection, Reply};\n\n\n\n#[derive(Debug)]\n\npub struct InternalServerError;\n\nimpl Reject for InternalServerError {}\n\n\n\n#[derive(Debug)]\n\npub struct BadGateway;\n\nimpl Reject for BadGateway {}\n\n\n\npub async fn handle_rejection(error: Rejection) -> Result<impl Reply, Rejection> {\n\n if error.is_not_found() {\n\n return Ok(warp::reply::with_status(\"Not Found\", StatusCode::NOT_FOUND));\n\n }\n\n\n\n if let Some(InternalServerError) = error.find() {\n\n return Ok(warp::reply::with_status(\n\n \"Internal Server Error\",\n", "file_path": "src/web/errors.rs", "rank": 44, "score": 33953.60037919404 }, { "content": " StatusCode::INTERNAL_SERVER_ERROR,\n\n ));\n\n }\n\n\n\n if let Some(BadGateway) = error.find() {\n\n return Ok(warp::reply::with_status(\n\n \"Bad Gateway\",\n\n StatusCode::BAD_GATEWAY,\n\n ));\n\n }\n\n\n\n Err(error)\n\n}\n", "file_path": "src/web/errors.rs", "rank": 45, "score": 33944.11549843501 }, { "content": " self.process_messages().await;\n\n self.update(UPDATE_RATE_MILLISECONDS as f64 / 1000.0);\n\n self.send_updates().await;\n\n }\n\n }\n\n\n\n pub async fn process_messages(&mut self) {\n\n while let Ok(message) = self.message_queue.try_recv() {\n\n self.handle_message(message.client_id, message.payload)\n\n .await\n\n .unwrap_or_else(|error| {\n\n error!(\n\n \"Failed to process incoming message: {}\",\n\n get_error_chain(error)\n\n )\n\n })\n\n }\n\n }\n\n\n\n pub fn update(&mut self, delta_time: f64) {\n", "file_path": "src/server.rs", "rank": 53, "score": 32921.44932528406 }, { "content": "#[derive(Debug)]\n\npub struct Server {\n\n message_queue: Receiver<MessageIn>,\n\n clients: HashMap<ClientId, Client>,\n\n arenas: HashMap<ArenaId, Arena>,\n\n}\n\n\n\nimpl Server {\n\n pub fn new(message_queue: Receiver<MessageIn>) -> Self {\n\n Self {\n\n message_queue,\n\n clients: Default::default(),\n\n arenas: Default::default(),\n\n }\n\n }\n\n\n\n pub async fn start(mut self) {\n\n let mut interval = time::interval(Duration::from_millis(UPDATE_RATE_MILLISECONDS));\n\n loop {\n\n interval.tick().await;\n", "file_path": "src/server.rs", "rank": 54, "score": 32921.29376465011 }, { "content": " arena\n\n .updates\n\n .iter()\n\n .skip(client.updates_sent_so_far)\n\n .cloned()\n\n .collect(),\n\n ))\n\n .await\n\n {\n\n error!(\"Failed to send ArenaStatePatch to client: {}\", error);\n\n continue;\n\n }\n\n client.updates_sent_so_far = arena.updates.len();\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Server {\n\n pub fn new_arena(&mut self, name: &str) -> ArenaId {\n", "file_path": "src/server.rs", "rank": 55, "score": 32919.56255636767 }, { "content": " pub async fn handle_message(\n\n &mut self,\n\n client_id: ClientId,\n\n payload: MessageInPayload,\n\n ) -> Result<(), Error> {\n\n match payload {\n\n MessageInPayload::Connect(ip_address, tx) => {\n\n info!(\"Client connected: {}\", client_id);\n\n self.clients.insert(\n\n client_id,\n\n Client {\n\n id: client_id,\n\n ip_address,\n\n tx,\n\n player: None,\n\n arena: None,\n\n updates_sent_so_far: 0,\n\n },\n\n );\n\n }\n", "file_path": "src/server.rs", "rank": 56, "score": 32919.42420346885 }, { "content": "\n\n client\n\n .tx\n\n .send(MessageOut::ArenaList(arena_list))\n\n .await\n\n .with_context(|| anyhow!(\"Failed to send ArenaList to client {}\", client_id))?;\n\n }\n\n MessageInPayload::Join { player, arena_id } => {\n\n self.client_part_arena(client_id).unwrap_or_else(|error| {\n\n warn!(\"Failed to remove client from their arena: {}\", error)\n\n });\n\n\n\n let arena_id = match arena_id {\n\n Some(arena_id) => {\n\n if self.arenas.contains_key(&arena_id) {\n\n arena_id\n\n } else {\n\n self.new_arena(&player.name)\n\n }\n\n }\n", "file_path": "src/server.rs", "rank": 57, "score": 32918.23924060787 }, { "content": " None => continue,\n\n };\n\n\n\n if client.updates_sent_so_far == 0 {\n\n if let Err(error) = client\n\n .tx\n\n .send(MessageOut::ArenaState(Box::from(arena.clone())))\n\n .await\n\n {\n\n error!(\"Failed to send ArenaState to client: {}\", error);\n\n continue;\n\n }\n\n client.updates_sent_so_far = arena.updates.len();\n\n continue;\n\n }\n\n\n\n if client.updates_sent_so_far < arena.updates.len() {\n\n if let Err(error) = client\n\n .tx\n\n .send(MessageOut::ArenaStatePatch(\n", "file_path": "src/server.rs", "rank": 58, "score": 32917.92880283506 }, { "content": " MessageInPayload::Disconnect => {\n\n info!(\"Client disconnected: {}\", client_id);\n\n\n\n self.client_part_arena(client_id).unwrap_or_else(|error| {\n\n warn!(\"Failed to remove client from their arena: {}\", error)\n\n });\n\n\n\n self.clients\n\n .remove(&client_id)\n\n .ok_or_else(|| anyhow!(\"Failed to remove client {}\", client_id))?;\n\n }\n\n\n\n MessageInPayload::GetArenaList => {\n\n let arena_list: Vec<ArenaOverview> =\n\n self.arenas.values().map(ArenaOverview::from).collect();\n\n\n\n let client = self\n\n .clients\n\n .get_mut(&client_id)\n\n .ok_or_else(|| anyhow!(\"Client {} not found\", client_id))?;\n", "file_path": "src/server.rs", "rank": 59, "score": 32913.05982051731 }, { "content": "mod arena;\n\nmod messages;\n\nmod primitives;\n\n\n\nuse anyhow::{anyhow, Context as ResultContext, Error};\n\nuse log::{error, info, warn};\n\nuse std::collections::HashMap;\n\nuse std::time::Duration;\n\nuse tokio::sync::mpsc::Receiver;\n\nuse tokio::time;\n\n\n\npub use arena::{Arena, ArenaInput, ArenaOverview, ArenaUpdate};\n\npub use messages::{MessageIn, MessageOut};\n\npub use primitives::*;\n\n\n\nuse crate::get_error_chain;\n\nuse messages::MessageInPayload;\n\n\n\nconst UPDATE_RATE_MILLISECONDS: u64 = 50; // 1000 / 50 = 20 updates per second\n\n\n", "file_path": "src/server.rs", "rank": 60, "score": 32913.05322586594 }, { "content": " self.arenas.retain(|_, arena| {\n\n arena.update(delta_time);\n\n\n\n // discard arena if all players have left\n\n !arena.players.is_empty()\n\n })\n\n }\n\n\n\n pub async fn send_updates(&mut self) {\n\n let clients = &mut self.clients;\n\n let arenas = &self.arenas;\n\n\n\n for client in clients.values_mut() {\n\n let arena = match client.arena {\n\n Some(arena) => arena,\n\n None => continue,\n\n };\n\n\n\n let arena = match arenas.get(&arena) {\n\n Some(arena) => arena,\n", "file_path": "src/server.rs", "rank": 61, "score": 32912.92931868729 }, { "content": " client.player = Some(player_id);\n\n client.arena = Some(arena_id);\n\n\n\n client\n\n .tx\n\n .send(MessageOut::ArenaJoined(arena.id, player_id))\n\n .await\n\n .with_context(|| {\n\n anyhow!(\"Failed to send ArenaJoined to client {}\", client_id)\n\n })?;\n\n\n\n info!(\"Player {} joined arena {}\", player_id, arena_id);\n\n }\n\n\n\n MessageInPayload::Start => {\n\n self.client_input(client_id, ArenaInput::Start)?;\n\n }\n\n MessageInPayload::Turn(direction) => {\n\n self.client_input(client_id, ArenaInput::Turn(direction))?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/server.rs", "rank": 62, "score": 32911.7088269524 }, { "content": " .get_mut(&arena_id)\n\n .with_context(|| anyhow!(\"Arena {} not found\", arena_id))?;\n\n\n\n let player_id = client\n\n .player\n\n .with_context(|| anyhow!(\"Client {} has no player\", client_id))?;\n\n\n\n arena.process_input(player_id, input);\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn client_part_arena(&mut self, client_id: ClientId) -> Result<(), Error> {\n\n let client = self\n\n .clients\n\n .get_mut(&client_id)\n\n .with_context(|| anyhow!(\"Client {} not found\", client_id))?;\n\n\n\n client.updates_sent_so_far = 0;\n\n\n", "file_path": "src/server.rs", "rank": 63, "score": 32909.55101510175 }, { "content": " let arena_id = client\n\n .arena\n\n .with_context(|| anyhow!(\"Client {} not in an arena\", client_id))?;\n\n\n\n let arena = self\n\n .arenas\n\n .get_mut(&arena_id)\n\n .with_context(|| anyhow!(\"Arena {} not found\", arena_id))?;\n\n\n\n let player_id = client\n\n .player\n\n .with_context(|| anyhow!(\"Client {} has no player\", client_id))?;\n\n\n\n arena.remove_player(player_id);\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Server {\n", "file_path": "src/server.rs", "rank": 64, "score": 32909.113788659066 }, { "content": " None => self.new_arena(&player.name),\n\n };\n\n\n\n let arena = self\n\n .arenas\n\n .get_mut(&arena_id)\n\n .ok_or_else(|| anyhow!(\"Arena {} not found\", arena_id))?;\n\n\n\n if arena.players.len() >= arena.max_players {\n\n return Err(anyhow!(\"Arena {} is full\", arena_id));\n\n }\n\n\n\n let player_id = player.id;\n\n\n\n let client = self\n\n .clients\n\n .get_mut(&client_id)\n\n .ok_or_else(|| anyhow!(\"Client {} not found\", client_id))?;\n\n\n\n arena.add_player(player);\n", "file_path": "src/server.rs", "rank": 65, "score": 32908.94473498074 }, { "content": " let arena = Arena::with_name(name);\n\n let id = arena.id;\n\n\n\n self.arenas.insert(id, arena);\n\n\n\n id\n\n }\n\n\n\n pub fn client_input(&mut self, client_id: ClientId, input: ArenaInput) -> Result<(), Error> {\n\n let client = self\n\n .clients\n\n .get(&client_id)\n\n .with_context(|| anyhow!(\"Client {} not found\", client_id))?;\n\n\n\n let arena_id = client\n\n .arena\n\n .with_context(|| anyhow!(\"Client {} not in an arena\", client_id))?;\n\n\n\n let arena = self\n\n .arenas\n", "file_path": "src/server.rs", "rank": 66, "score": 32908.81942621783 }, { "content": "use euclid::{Point2D, Vector2D};\n\nuse lyon_geom::LineSegment;\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse tokio::sync::mpsc::Sender;\n\n\n\nuse crate::new_id_type;\n\nuse crate::server::MessageOut;\n\n\n\nnew_id_type!(ClientId);\n\nnew_id_type!(PlayerId);\n\nnew_id_type!(ArenaId);\n\n\n\n/// The euclidian space in which ArenaVectors and ArenaPoints operate.\n\npub struct ArenaSpace;\n\n/// Represents a direction in the ArenaSpace\n\npub type ArenaVector = Vector2D<f64, ArenaSpace>;\n\n/// Represents a position in the ArenaSpace\n\npub type ArenaPoint = Point2D<f64, ArenaSpace>;\n\n/// Represents a line between two points in the ArenaSpace\n\npub type ArenaLine = LineSegment<f64>;\n", "file_path": "src/server/primitives.rs", "rank": 67, "score": 31881.230006938447 }, { "content": "\n\n#[derive(Debug)]\n\npub struct Client {\n\n pub id: ClientId,\n\n pub ip_address: Option<String>,\n\n pub tx: Sender<MessageOut>,\n\n pub player: Option<PlayerId>,\n\n pub arena: Option<ArenaId>,\n\n pub updates_sent_so_far: usize,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, Hash, PartialEq, Serialize, Deserialize)]\n\npub struct Player {\n\n #[serde(skip_serializing, skip_deserializing)]\n\n pub id: PlayerId,\n\n pub name: String,\n\n pub color: PlayerColor,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Serialize, Deserialize)]\n", "file_path": "src/server/primitives.rs", "rank": 68, "score": 31878.24148850606 }, { "content": "mod entities;\n\nmod input;\n\nmod updates;\n\nmod util;\n\n\n\nuse chrono::{DateTime, Duration as OldDuration, Utc};\n\nuse log::{error, trace};\n\nuse rand_core::{OsRng, RngCore};\n\nuse serde_derive::Serialize;\n\nuse std::collections::HashMap;\n\nuse std::mem;\n\n\n\npub use self::entities::*;\n\npub use self::input::*;\n\npub use self::updates::*;\n\npub use self::util::*;\n\n\n\nuse crate::server::{ArenaId, ArenaLine, ArenaPoint, Direction, Player, PlayerId};\n\n\n\nconst ARENA_WIDTH: f64 = 560.0;\n", "file_path": "src/server/arena.rs", "rank": 69, "score": 31878.103456069057 }, { "content": " Left,\n\n Right,\n\n Down,\n\n}\n\n\n\nimpl Direction {\n\n pub fn as_velocity(self) -> ArenaVector {\n\n match self {\n\n Direction::Up => ArenaVector::new(0.0, 1.0),\n\n Direction::Down => ArenaVector::new(0.0, -1.0),\n\n Direction::Left => ArenaVector::new(-1.0, 0.0),\n\n Direction::Right => ArenaVector::new(1.0, 0.0),\n\n }\n\n }\n\n\n\n pub fn is_opposite(self, to: Direction) -> bool {\n\n match self {\n\n Direction::Up => to == Direction::Down,\n\n Direction::Left => to == Direction::Right,\n\n Direction::Right => to == Direction::Left,\n\n Direction::Down => to == Direction::Up,\n\n }\n\n }\n\n}\n", "file_path": "src/server/primitives.rs", "rank": 70, "score": 31875.478364446135 }, { "content": " for id in self.lightribbons.keys() {\n\n let latest_point = match self.lightcycles.get(id) {\n\n Some(lightcycle) => {\n\n if lightcycle.dead {\n\n continue;\n\n };\n\n\n\n lightcycle.position\n\n }\n\n None => {\n\n error!(\n\n \"Failed to update lightribbon position: No lightcycle with id {}\",\n\n id\n\n );\n\n continue;\n\n }\n\n };\n\n\n\n self.updates\n\n .push(ArenaUpdate::UpdateLightribbonReplaceLatestPoint(\n", "file_path": "src/server/arena.rs", "rank": 71, "score": 31875.104942064012 }, { "content": " pub fn process_input(&mut self, player_id: PlayerId, input_event: ArenaInput) {\n\n input_event\n\n .process_into_updates(self, player_id)\n\n .drain(..)\n\n .for_each(|update| self.updates.push(update));\n\n }\n\n\n\n pub fn update(&mut self, delta_time: f64) {\n\n // apply process_input updates\n\n self.apply_updates();\n\n\n\n let started = match self.started {\n\n Some(started) => started,\n\n None => return,\n\n };\n\n\n\n let now = Utc::now();\n\n if now < started {\n\n return;\n\n }\n", "file_path": "src/server/arena.rs", "rank": 72, "score": 31874.617891375496 }, { "content": " pub lightribbons: HashMap<PlayerId, Lightribbon>,\n\n\n\n #[serde(skip)]\n\n pub updates: Vec<ArenaUpdate>,\n\n #[serde(skip)]\n\n updates_applied_so_far: usize,\n\n}\n\n\n\nimpl Arena {\n\n pub fn with_name(name: &str) -> Self {\n\n Self {\n\n name: name.to_uppercase(),\n\n ..Default::default()\n\n }\n\n }\n\n\n\n pub fn add_player(&mut self, player: Player) {\n\n self.updates.push(ArenaUpdate::AddPlayer(player.id, player));\n\n }\n\n\n", "file_path": "src/server/arena.rs", "rank": 73, "score": 31874.052397294356 }, { "content": " pub fn remove_player(&mut self, player_id: PlayerId) {\n\n self.updates.push(ArenaUpdate::RemovePlayer(player_id));\n\n }\n\n\n\n pub fn clear_updates(&mut self) {\n\n self.updates.clear();\n\n self.updates_applied_so_far = 0;\n\n }\n\n\n\n pub fn apply_updates(&mut self) -> &mut Self {\n\n let updates = mem::take(&mut self.updates);\n\n for update in updates.iter().skip(self.updates_applied_so_far) {\n\n update.apply(self);\n\n self.updates_applied_so_far += 1;\n\n }\n\n self.updates = updates;\n\n\n\n self\n\n }\n\n\n", "file_path": "src/server/arena.rs", "rank": 74, "score": 31873.871392767687 }, { "content": " continue 'next_lightcycle;\n\n }\n\n\n\n // test for collisions with other lightcycles\n\n if self\n\n .lightcycles\n\n .iter()\n\n .filter(|(_, other_lightcycle)| !other_lightcycle.dead)\n\n .filter(|(other_id, _)| id != *other_id)\n\n .any(|(_, other_lightcycle)| lightcycle.position == other_lightcycle.position)\n\n {\n\n self.updates\n\n .push(ArenaUpdate::UpdateLightcycleApplyDeath(*id));\n\n continue 'next_lightcycle;\n\n }\n\n }\n\n self\n\n }\n\n\n\n fn update_lightribbon_positions(&mut self) -> &mut Self {\n", "file_path": "src/server/arena.rs", "rank": 75, "score": 31873.597155038817 }, { "content": " continue;\n\n };\n\n\n\n self.updates.push(ArenaUpdate::UpdateLightcyclePosition(\n\n *id,\n\n lightcycle.position\n\n + lightcycle.direction.as_velocity() * lightcycle.speed * delta_time,\n\n ))\n\n }\n\n self\n\n }\n\n\n\n fn calculate_lightcycle_collisions(&mut self, delta_time: f64) -> &mut Self {\n\n 'next_lightcycle: for (id, lightcycle) in self.lightcycles.iter() {\n\n if lightcycle.dead {\n\n continue 'next_lightcycle;\n\n };\n\n\n\n let last_position = lightcycle.position\n\n - lightcycle.direction.as_velocity() * lightcycle.speed * delta_time;\n", "file_path": "src/server/arena.rs", "rank": 76, "score": 31872.519982358182 }, { "content": "\n\n if let (Some((player_id, _)), None) = (alive_lightcycles.next(), alive_lightcycles.next()) {\n\n self.updates.push(ArenaUpdate::SetWinner(Some(*player_id)));\n\n }\n\n\n\n self\n\n }\n\n\n\n fn test_round_end(&mut self) -> &mut Self {\n\n if self.lightcycles.values().all(|lightcycle| lightcycle.dead) {\n\n self.updates.push(ArenaUpdate::End);\n\n }\n\n self\n\n }\n\n}\n\n\n\nimpl Default for Arena {\n\n fn default() -> Self {\n\n Self {\n\n id: Default::default(),\n", "file_path": "src/server/arena.rs", "rank": 77, "score": 31872.431495797813 }, { "content": "pub struct ArenaOverview {\n\n id: ArenaId,\n\n name: String,\n\n max_players: usize,\n\n started: Option<DateTime<Utc>>,\n\n players: HashMap<PlayerId, Player>,\n\n}\n\n\n\nimpl From<&Arena> for ArenaOverview {\n\n fn from(arena: &Arena) -> Self {\n\n Self {\n\n id: arena.id,\n\n name: arena.name.clone(),\n\n max_players: arena.max_players,\n\n started: arena.started,\n\n players: arena.players.clone(),\n\n }\n\n }\n\n}\n", "file_path": "src/server/arena.rs", "rank": 78, "score": 31871.75316493841 }, { "content": "const ARENA_HEIGHT: f64 = 560.0;\n\nconst ARENA_MAX_PLAYERS: usize = 8;\n\nconst ARENA_START_TIMER_SECONDS: i64 = 1;\n\nconst LIGHTCYCLE_SPEED: f64 = 55.0;\n\n// const LIGHTCYCLE_BRAKE_SPEED: f64 = 40.0;\n\n// const LIGHTCYCLE_BOOST_SPEED: f64 = 70.0;\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub struct Arena {\n\n pub id: ArenaId,\n\n pub name: String,\n\n pub width: f64,\n\n pub height: f64,\n\n pub max_players: usize,\n\n\n\n pub started: Option<DateTime<Utc>>,\n\n pub winner: Option<PlayerId>,\n\n\n\n pub players: HashMap<PlayerId, Player>,\n\n pub lightcycles: HashMap<PlayerId, Lightcycle>,\n", "file_path": "src/server/arena.rs", "rank": 79, "score": 31871.579419421152 }, { "content": " *id,\n\n latest_point,\n\n ));\n\n }\n\n self\n\n }\n\n\n\n fn test_win_condition(&mut self) -> &mut Self {\n\n if self.winner.is_some() {\n\n return self;\n\n }\n\n\n\n if self.lightcycles.iter().count() <= 1 {\n\n return self;\n\n }\n\n\n\n let mut alive_lightcycles = self\n\n .lightcycles\n\n .iter()\n\n .filter(|(_, lightcycle)| !lightcycle.dead);\n", "file_path": "src/server/arena.rs", "rank": 80, "score": 31870.778417825568 }, { "content": "#[serde(rename_all = \"lowercase\")]\n\npub enum PlayerColor {\n\n Blue,\n\n Green,\n\n Orange,\n\n Purple,\n\n Red,\n\n White,\n\n}\n\n\n\nimpl Default for PlayerColor {\n\n fn default() -> Self {\n\n PlayerColor::White\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum Direction {\n\n Up,\n", "file_path": "src/server/primitives.rs", "rank": 81, "score": 31870.080795188132 }, { "content": " if let Some(intersection) = travelled.intersection(&line) {\n\n self.updates.push(ArenaUpdate::UpdateLightcyclePosition(\n\n *id,\n\n ArenaPoint::from_untyped(intersection),\n\n ));\n\n self.updates\n\n .push(ArenaUpdate::UpdateLightcycleApplyDeath(*id));\n\n continue 'next_lightcycle;\n\n }\n\n }\n\n }\n\n\n\n // test for arena bounds collisions\n\n if lightcycle.position.x < 0.0\n\n || lightcycle.position.y < 0.0\n\n || lightcycle.position.x > self.width\n\n || lightcycle.position.y > self.height\n\n {\n\n self.updates\n\n .push(ArenaUpdate::UpdateLightcycleApplyDeath(*id));\n", "file_path": "src/server/arena.rs", "rank": 82, "score": 31869.716464773363 }, { "content": "\n\n self.update_lightcycle_positions(delta_time)\n\n .apply_updates()\n\n .calculate_lightcycle_collisions(delta_time)\n\n .apply_updates()\n\n .update_lightribbon_positions()\n\n .apply_updates()\n\n .test_win_condition()\n\n .apply_updates()\n\n .test_round_end()\n\n .apply_updates();\n\n }\n\n\n\n //\n\n // update helpers\n\n //\n\n\n\n fn update_lightcycle_positions(&mut self, delta_time: f64) -> &mut Self {\n\n for (id, lightcycle) in self.lightcycles.iter() {\n\n if lightcycle.dead {\n", "file_path": "src/server/arena.rs", "rank": 83, "score": 31869.47114100047 }, { "content": "\n\n let travelled = ArenaLine {\n\n from: last_position.to_untyped(),\n\n to: lightcycle.position.to_untyped(),\n\n };\n\n\n\n // test for lightribbon collisions\n\n for lightribbon in self.lightribbons.values() {\n\n for line in lightribbon.points.windows(2) {\n\n let line = ArenaLine {\n\n from: line[0].to_untyped(),\n\n to: line[1].to_untyped(),\n\n };\n\n\n\n if travelled.overlaps_segment(&line) {\n\n self.updates\n\n .push(ArenaUpdate::UpdateLightcycleApplyDeath(*id));\n\n continue 'next_lightcycle;\n\n }\n\n\n", "file_path": "src/server/arena.rs", "rank": 84, "score": 31869.013523377216 }, { "content": " name: Default::default(),\n\n width: ARENA_WIDTH,\n\n height: ARENA_HEIGHT,\n\n max_players: ARENA_MAX_PLAYERS,\n\n\n\n started: None,\n\n winner: None,\n\n\n\n players: Default::default(),\n\n lightcycles: Default::default(),\n\n lightribbons: Default::default(),\n\n\n\n updates: Default::default(),\n\n updates_applied_so_far: 0,\n\n }\n\n }\n\n}\n\n\n\n/// ArenaOverview represents an overview of an Arena for the arena selection screen.\n\n#[derive(Debug, Clone, Serialize)]\n", "file_path": "src/server/arena.rs", "rank": 85, "score": 31867.374086553893 }, { "content": "use super::*;\n\n\n\n#[derive(Debug, Clone, Hash, PartialEq)]\n\npub enum ArenaInput {\n\n Start,\n\n Turn(Direction),\n\n}\n\n\n\nimpl ArenaInput {\n\n pub fn process_into_updates(self, arena: &Arena, player_id: PlayerId) -> Vec<ArenaUpdate> {\n\n match self {\n\n ArenaInput::Start => {\n\n if arena.started.is_some() {\n\n trace!(\n\n \"Refusing to start arena that is already started: {}\",\n\n arena.id\n\n );\n\n return vec![];\n\n }\n\n\n", "file_path": "src/server/arena/input.rs", "rank": 86, "score": 30908.996784834584 }, { "content": "\n\n ArenaUpdate::UpdateLightcyclePosition(player_id, position) => {\n\n let lightcycle = match arena.lightcycles.get_mut(player_id) {\n\n Some(lightcycle) => lightcycle,\n\n None => {\n\n error!(\"Lightcycle {} not found\", player_id);\n\n return arena;\n\n }\n\n };\n\n\n\n lightcycle.position = *position;\n\n }\n\n ArenaUpdate::UpdateLightcycleDirection(player_id, direction) => {\n\n let lightcycle = match arena.lightcycles.get_mut(player_id) {\n\n Some(lightcycle) => lightcycle,\n\n None => {\n\n error!(\"Lightcycle {} not found\", player_id);\n\n return arena;\n\n }\n\n };\n", "file_path": "src/server/arena/updates.rs", "rank": 87, "score": 30906.57459529298 }, { "content": "\n\n lightcycle.direction = *direction;\n\n }\n\n ArenaUpdate::UpdateLightcycleApplyDeath(player_id) => {\n\n let lightcycle = match arena.lightcycles.get_mut(player_id) {\n\n Some(lightcycle) => lightcycle,\n\n None => {\n\n error!(\"Lightcycle {} not found\", player_id);\n\n return arena;\n\n }\n\n };\n\n\n\n lightcycle.dead = true;\n\n }\n\n\n\n ArenaUpdate::UpdateLightribbonAppendPoint(player_id, point) => {\n\n let lightribbon = match arena.lightribbons.get_mut(player_id) {\n\n Some(lightribbon) => lightribbon,\n\n None => {\n\n error!(\"Lightribbon {} not found\", player_id);\n", "file_path": "src/server/arena/updates.rs", "rank": 88, "score": 30906.40906396349 }, { "content": " RemoveLightcycle(PlayerId),\n\n RemoveLightribbon(PlayerId),\n\n}\n\n\n\nimpl ArenaUpdate {\n\n pub fn apply<'s, 'arena>(&'s self, arena: &'arena mut Arena) -> &'arena mut Arena {\n\n match self {\n\n ArenaUpdate::AddPlayer(player_id, player) => {\n\n arena.players.insert(*player_id, player.clone());\n\n }\n\n ArenaUpdate::AddLightcycle(player_id, lightcycle) => {\n\n arena.lightcycles.insert(*player_id, *lightcycle);\n\n }\n\n ArenaUpdate::AddLightribbon(player_id, lightribbon) => {\n\n arena.lightribbons.insert(*player_id, lightribbon.clone());\n\n }\n\n\n\n ArenaUpdate::Start(start_at) => arena.started = Some(*start_at),\n\n ArenaUpdate::End => arena.started = None,\n\n ArenaUpdate::SetWinner(winner) => arena.winner = *winner,\n", "file_path": "src/server/arena/updates.rs", "rank": 89, "score": 30905.298506143357 }, { "content": "\n\n // add the selected spawnpoint to our spawnpoints vector\n\n let selected_spawnpoint = SPAWNPOINTS[selected_spawnpoint];\n\n spawnpoints.push((\n\n player_id,\n\n ArenaPoint::new(selected_spawnpoint.0, selected_spawnpoint.1),\n\n selected_spawnpoint.2,\n\n ));\n\n }\n\n\n\n spawnpoints\n\n}\n", "file_path": "src/server/arena/util.rs", "rank": 90, "score": 30904.901639455256 }, { "content": " return arena;\n\n }\n\n };\n\n\n\n lightribbon.points.push(*point);\n\n }\n\n ArenaUpdate::UpdateLightribbonReplaceLatestPoint(player_id, latest_point) => {\n\n let lightribbon = match arena.lightribbons.get_mut(player_id) {\n\n Some(lightribbon) => lightribbon,\n\n None => {\n\n error!(\"Lightribbon {} not found\", player_id);\n\n return arena;\n\n }\n\n };\n\n\n\n lightribbon.points.pop();\n\n lightribbon.points.push(*latest_point);\n\n }\n\n\n\n ArenaUpdate::RemovePlayer(player_id) => {\n", "file_path": "src/server/arena/updates.rs", "rank": 91, "score": 30904.5395540905 }, { "content": "use super::*;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Serialize)]\n\npub struct Lightcycle {\n\n pub position: ArenaPoint,\n\n pub direction: Direction,\n\n pub speed: f64,\n\n pub dead: bool,\n\n}\n\n\n\nimpl Default for Lightcycle {\n\n fn default() -> Self {\n\n Self {\n\n position: ArenaPoint::origin(),\n\n direction: Direction::Up,\n\n speed: LIGHTCYCLE_SPEED,\n\n dead: Default::default(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Clone, PartialEq, Serialize)]\n\npub struct Lightribbon {\n\n pub points: Vec<ArenaPoint>,\n\n}\n", "file_path": "src/server/arena/entities.rs", "rank": 92, "score": 30903.22572136875 }, { "content": " },\n\n ));\n\n },\n\n );\n\n\n\n // begin countdown\n\n updates.push(ArenaUpdate::Start(\n\n Utc::now() + OldDuration::seconds(ARENA_START_TIMER_SECONDS),\n\n ));\n\n\n\n updates\n\n }\n\n\n\n ArenaInput::Turn(direction) => {\n\n let lightcycle = match arena.lightcycles.get(&player_id) {\n\n Some(lightcycle) => lightcycle,\n\n None => {\n\n error!(\"Lightcycle {} not found\", player_id);\n\n return vec![];\n\n }\n", "file_path": "src/server/arena/input.rs", "rank": 93, "score": 30902.81873676136 }, { "content": "use super::*;\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize)]\n\npub enum ArenaUpdate {\n\n AddPlayer(PlayerId, Player),\n\n AddLightcycle(PlayerId, Lightcycle),\n\n AddLightribbon(PlayerId, Lightribbon),\n\n\n\n Start(DateTime<Utc>),\n\n End,\n\n SetWinner(Option<PlayerId>),\n\n\n\n UpdateLightcyclePosition(PlayerId, ArenaPoint),\n\n UpdateLightcycleDirection(PlayerId, Direction),\n\n UpdateLightcycleApplyDeath(PlayerId),\n\n\n\n UpdateLightribbonAppendPoint(PlayerId, ArenaPoint),\n\n UpdateLightribbonReplaceLatestPoint(PlayerId, ArenaPoint),\n\n\n\n RemovePlayer(PlayerId),\n", "file_path": "src/server/arena/updates.rs", "rank": 94, "score": 30902.473395173645 }, { "content": " };\n\n\n\n match arena.started {\n\n None => {\n\n trace!(\"Refusing to turn lightcycle in stopped arena\");\n\n return vec![];\n\n }\n\n Some(started) => {\n\n let now = Utc::now();\n\n if now < started {\n\n trace!(\"Refusing to turn lightcycle before arena has started\");\n\n return vec![];\n\n }\n\n }\n\n }\n\n\n\n if lightcycle.dead {\n\n trace!(\"Refusing to turn dead lightcycle\");\n\n return vec![];\n\n }\n", "file_path": "src/server/arena/input.rs", "rank": 95, "score": 30902.056851290752 }, { "content": "\n\n if lightcycle.direction.is_opposite(direction) {\n\n trace!(\"Refusing to turn lightcycle in opposite direction\");\n\n return vec![];\n\n }\n\n\n\n return vec![\n\n ArenaUpdate::UpdateLightribbonAppendPoint(player_id, lightcycle.position),\n\n ArenaUpdate::UpdateLightcycleDirection(player_id, direction),\n\n ];\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/server/arena/input.rs", "rank": 96, "score": 30901.402690274732 }, { "content": " .keys()\n\n .copied()\n\n .for_each(|id| updates.push(ArenaUpdate::RemoveLightribbon(id)));\n\n\n\n // add new lightcycles and lightribbons\n\n let player_ids = arena.players.keys().copied().collect();\n\n calculate_spawnpoints(player_ids).drain(..).for_each(\n\n |(player_id, spawn_position, spawn_direction)| {\n\n updates.push(ArenaUpdate::AddLightcycle(\n\n player_id,\n\n Lightcycle {\n\n position: spawn_position,\n\n direction: spawn_direction,\n\n ..Default::default()\n\n },\n\n ));\n\n updates.push(ArenaUpdate::AddLightribbon(\n\n player_id,\n\n Lightribbon {\n\n points: vec![spawn_position, spawn_position],\n", "file_path": "src/server/arena/input.rs", "rank": 97, "score": 30901.214973208436 }, { "content": " let mut updates = Vec::with_capacity(\n\n 1 + arena.lightcycles.len()\n\n + arena.lightribbons.len()\n\n + (arena.players.len() * 2)\n\n + 1,\n\n );\n\n\n\n // remove existing winner\n\n updates.push(ArenaUpdate::SetWinner(None));\n\n\n\n // remove existing lightcycles\n\n arena\n\n .lightcycles\n\n .keys()\n\n .copied()\n\n .for_each(|id| updates.push(ArenaUpdate::RemoveLightcycle(id)));\n\n\n\n // remove existing lightribbons\n\n arena\n\n .lightribbons\n", "file_path": "src/server/arena/input.rs", "rank": 98, "score": 30900.730940707006 }, { "content": " arena.players.remove(player_id);\n\n }\n\n ArenaUpdate::RemoveLightcycle(player_id) => {\n\n arena.lightcycles.remove(player_id);\n\n }\n\n ArenaUpdate::RemoveLightribbon(player_id) => {\n\n arena.lightribbons.remove(player_id);\n\n }\n\n }\n\n\n\n arena\n\n }\n\n}\n", "file_path": "src/server/arena/updates.rs", "rank": 99, "score": 30899.87861019152 } ]
Rust
src/stats/mod.rs
hhandika/yap
95f0b06770b958afef12105c8088274684bbdae8
mod fasta; mod fastq; mod math; mod output; mod qscores; mod sequence; use std::path::PathBuf; use std::sync::mpsc::channel; use rayon::prelude::*; use walkdir::WalkDir; use crate::stats::sequence::{FastaStats, FastqStats}; pub fn process_wildcard(entries: &[&str], iscsv: bool, fastq: bool) { let files: Vec<PathBuf> = entries.iter().map(PathBuf::from).collect(); par_process_files(&files, iscsv, fastq) } pub fn process_walkdir(path: &str, iscsv: bool, fastq: bool) { let entries = tranverse_dir(path, fastq); par_process_files(&entries, iscsv, fastq) } fn par_process_files(entries: &[PathBuf], iscsv: bool, fastq: bool) { if fastq { par_process_fastq(&entries, iscsv); } else { par_process_fasta(&entries, iscsv); } } fn tranverse_dir(path: &str, fastq: bool) -> Vec<PathBuf> { let mut entries = Vec::new(); WalkDir::new(path) .into_iter() .filter_map(|ok| ok.ok()) .filter(|e| e.file_type().is_file()) .for_each(|e| { if fastq { let files = String::from(e.path().to_string_lossy()); match_fastq(&files, &mut entries); } else { let files = String::from(e.path().to_string_lossy()); match_fasta(&files, &mut entries); } }); entries } fn match_fastq(files: &str, entries: &mut Vec<PathBuf>) { match files { s if s.ends_with(".fastq.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fq.gz") => entries.push(PathBuf::from(files)), s if s.ends_with("fastq.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with("fq.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with("fastq") => entries.push(PathBuf::from(files)), s if s.ends_with("fq") => entries.push(PathBuf::from(files)), _ => (), }; } fn match_fasta(files: &str, entries: &mut Vec<PathBuf>) { match files { s if s.ends_with(".fasta.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fas.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fasta.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with(".fasta") => entries.push(PathBuf::from(files)), s if s.ends_with(".fas") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa") => entries.push(PathBuf::from(files)), _ => (), }; } fn par_process_fastq(files: &[PathBuf], iscsv: bool) { let (sender, receiver) = channel(); files.into_par_iter().for_each_with(sender, |s, recs| { s.send(fastq::process_fastq(&recs)).unwrap(); }); let mut all_reads: Vec<FastqStats> = receiver.iter().collect(); output::write_fastq(&mut all_reads, iscsv); } fn par_process_fasta(files: &[PathBuf], iscsv: bool) { let (sender, receiver) = channel(); files.into_par_iter().for_each_with(sender, |s, recs| { s.send(fasta::process_fasta(&recs)).unwrap(); }); let mut all_reads: Vec<FastaStats> = receiver.iter().collect(); output::write_fasta(&mut all_reads, iscsv); } #[cfg(test)] mod tests { use super::*; #[test] fn tranverse_dir_test() { let input = "test_files/stats"; let files = tranverse_dir(&input, true); assert_eq!(4, files.len()) } #[test] fn match_fasta_test() { let input = vec!["test.fasta", "test.fas", "test.fa", "test.fa.gz"]; let mut entries = Vec::new(); input.iter().for_each(|e| { match_fasta(&e, &mut entries); }); assert_eq!(4, entries.len()); } #[test] fn match_fastq_test() { let input = vec!["test.fq", "test.fastq", "test.fq.gz", "test.fa.gz"]; let mut entries = Vec::new(); input.iter().for_each(|e| { match_fastq(&e, &mut entries); }); assert_eq!(3, entries.len()); } }
mod fasta; mod fastq; mod math; mod output; mod qscores; mod sequence; use std::path::PathBuf; use std::sync::mpsc::channel; use rayon::prelude::*; use walkdir::WalkDir; use crate::stats::sequence::{FastaStats, FastqStats}; pub fn process_wildcard(entries: &[&str], iscsv: bool, fastq: bool) { let files: Vec<PathBuf> = entries.iter().map(PathBuf::from).collect(); par_process_files(&files, iscsv, fastq) } pub fn process_walkdir(path: &str, iscsv: bool, fastq: bool) { let entries = tranverse_dir(path, fastq); par_process_files(&entries, iscsv, fastq) } fn par_process_files(entries: &[PathBuf], iscsv: bool, fastq: bool) { if fastq { par_process_fastq(&entries, iscsv); } else { par_process_fasta(&entries, iscsv); } } fn tranverse_dir(path: &str, fastq: bool) -> Vec<PathBuf> { let mut entries = Vec::new(); WalkDir::new(path) .into_iter() .filter_map(|ok| ok.ok()) .filter(|e| e.file_type().is_file()) .for_each(|e| { if fastq { let files = String::from(e.path().to_string_lossy()); match_fastq(&files, &mut entries); } else { let files = String::from(e.path().to_string_lossy()); match_fasta(&files, &mut entries); } }); entries } fn match_fastq(files: &str, entries: &mut Vec<PathBuf>) { match files { s if s.ends_with(".fastq.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fq.gz") => entries.push(PathBuf::from(files)), s if s.ends_with("fastq.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with("fq.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with("fastq") => entries.push(PathBuf::from(files)), s if s.ends_with("fq") => entries.push(PathBuf::from(files)), _ => (), }; } fn match_fasta(files: &str, entries: &mut Vec<PathBuf>) { match files { s if s.ends_with(".fasta.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fas.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa.gz") => entries.push(PathBuf::from(files)), s if s.ends_with(".fasta.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa.gzip") => entries.push(PathBuf::from(files)), s if s.ends_with(".fasta") => entries.push(PathBuf::from(files)), s if s.ends_with(".fas") => entries.push(PathBuf::from(files)), s if s.ends_with(".fa") => entries.push(PathBuf::from(files)), _ => (), }; } fn par_process_fastq(files: &[PathBuf], iscsv: bool) { let (sender, receiver) = channel(); files.into_par_iter().for_each_with(sender, |s, recs| { s.send(fastq::process_fastq(&recs)).unwrap(); }); let mut all_reads: Vec<FastqStats> = receiver.iter().collect(); output::write_fastq(&mut all_reads, iscsv); } fn par_process_fasta(files: &[PathBuf], iscsv: bool) { let (sender, receiver) = channel();
#[cfg(test)] mod tests { use super::*; #[test] fn tranverse_dir_test() { let input = "test_files/stats"; let files = tranverse_dir(&input, true); assert_eq!(4, files.len()) } #[test] fn match_fasta_test() { let input = vec!["test.fasta", "test.fas", "test.fa", "test.fa.gz"]; let mut entries = Vec::new(); input.iter().for_each(|e| { match_fasta(&e, &mut entries); }); assert_eq!(4, entries.len()); } #[test] fn match_fastq_test() { let input = vec!["test.fq", "test.fastq", "test.fq.gz", "test.fa.gz"]; let mut entries = Vec::new(); input.iter().for_each(|e| { match_fastq(&e, &mut entries); }); assert_eq!(3, entries.len()); } }
files.into_par_iter().for_each_with(sender, |s, recs| { s.send(fasta::process_fasta(&recs)).unwrap(); }); let mut all_reads: Vec<FastaStats> = receiver.iter().collect(); output::write_fasta(&mut all_reads, iscsv); }
function_block-function_prefix_line
[ { "content": "pub fn write_fasta(stats: &mut [FastaStats], iscsv: bool) {\n\n stats.sort_by(|a, b| a.seqname.cmp(&b.seqname));\n\n\n\n println!(\"\\n\\x1b[1mResults:\\x1b[0m\");\n\n stats.iter().for_each(|recs| {\n\n write_fasta_console(&recs);\n\n });\n\n println!(\"Total files: {}\", stats.len());\n\n if iscsv {\n\n write_fasta_csv(stats);\n\n }\n\n}\n\n\n", "file_path": "src/stats/output.rs", "rank": 3, "score": 239897.72061516106 }, { "content": "pub fn write_fastq(results: &mut [FastqStats], iscsv: bool) {\n\n results.sort_by(|a, b| a.seqname.cmp(&b.seqname));\n\n\n\n println!(\"\\n\\x1b[1mResults:\\x1b[0m\");\n\n results.iter().for_each(|recs| {\n\n write_fastq_console(&recs);\n\n });\n\n println!(\"Total files: {}\", results.len());\n\n\n\n if iscsv {\n\n write_fastq_csv(results);\n\n }\n\n}\n\n\n", "file_path": "src/stats/output.rs", "rank": 4, "score": 239672.2444731408 }, { "content": "pub fn dryrun(input: &str) {\n\n let dirs = parser::parse_seqdir(input);\n\n let samples = finder::find_cleaned_fastq(&dirs);\n\n utils::get_system_info().unwrap();\n\n print_dryrun(&samples).unwrap();\n\n}\n\n\n", "file_path": "src/assembly/mod.rs", "rank": 10, "score": 155963.18284632795 }, { "content": "pub fn auto_dryrun(path: &str, dirname: &str) {\n\n let samples = finder::auto_find_cleaned_fastq(path, dirname);\n\n utils::get_system_info().unwrap();\n\n print_dryrun(&samples).unwrap();\n\n}\n\n\n", "file_path": "src/assembly/mod.rs", "rank": 11, "score": 154495.25230426114 }, { "content": "fn write_fasta_header<W: Write>(line: &mut W, path: bool) {\n\n if path {\n\n write!(line, \"Path,\").unwrap();\n\n }\n\n writeln!(\n\n line,\n\n \"Sequence_names,\\\n\n Contig_counts,\\\n\n Total_sequence_length,\\\n\n GC_counts,\\\n\n GC-content,\\\n\n N_counts,\\\n\n N-content,\\\n\n Min_contig_length,\\\n\n Max_contig_length,\\\n\n Mean_contig_length,\\\n\n Median_contig_length,\\\n\n Stdev_contig_length,\\\n\n N50,\\\n\n N75,\\\n\n N90,\\\n\n No_contigs_>750bp,\\\n\n No_contigs_>1000bp,\\\n\n No_contigs_>1500bp\"\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "src/stats/output.rs", "rank": 12, "score": 144806.49900274852 }, { "content": "fn write_fastq_header<W: Write>(line: &mut W, path: bool) {\n\n if path {\n\n write!(line, \"Path,\").unwrap();\n\n }\n\n writeln!(\n\n line,\n\n \"Sequence names,\\\n\n Read counts,\\\n\n Total sequence length,\\\n\n GC counts,\\\n\n GC-content,\\\n\n N counts,\\\n\n N-content,\\\n\n Min read length,\\\n\n Max read length,\\\n\n Mean read length,\\\n\n Median read length,\\\n\n Stdev read length,\\\n\n Mean q-score,\\\n\n Low base < 20,\\\n\n Low q-score ratio\"\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "src/stats/output.rs", "rank": 13, "score": 144655.92914801228 }, { "content": "pub fn dry_run(input: &Path, is_id: bool, is_rename: bool) {\n\n let reads: Vec<RawSeq> = parser::parse_input(input, is_id, is_rename);\n\n let stdout = io::stdout();\n\n let mut handle = io::BufWriter::new(stdout);\n\n\n\n writeln!(handle).unwrap();\n\n reads.iter().for_each(|r| {\n\n writeln!(handle, \"\\x1b[0;32mID\\t\\t: {}\\x1b[0m\", r.id).unwrap();\n\n writeln!(handle, \"Read 1\\t\\t: {}\", r.read_1.to_string_lossy()).unwrap();\n\n writeln!(handle, \"Read 2\\t\\t: {}\", r.read_2.to_string_lossy()).unwrap();\n\n\n\n match r.adapter_i7.as_ref() {\n\n Some(i7) => {\n\n writeln!(handle, \"Adapter i5\\t: {}\", r.adapter_i5.as_ref().unwrap()).unwrap();\n\n writeln!(handle, \"Adapter i7\\t: {}\", i7).unwrap();\n\n }\n\n None => {\n\n if r.auto_idx {\n\n writeln!(handle, \"Adapter\\t\\t: AUTO-DETECT\").unwrap();\n\n } else {\n", "file_path": "src/qc/mod.rs", "rank": 14, "score": 143644.6724394858 }, { "content": "fn write_fasta_contents<W: Write>(seq: &FastaStats, line: &mut W, path: bool) {\n\n if path {\n\n write!(line, \"{},\", seq.path).unwrap();\n\n }\n\n writeln!(\n\n line,\n\n \"{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\",\n\n seq.seqname,\n\n seq.contig_counts,\n\n seq.total_bp,\n\n seq.total_gc,\n\n seq.gc_content,\n\n seq.total_n,\n\n seq.n_content,\n\n seq.min,\n\n seq.max,\n\n seq.mean,\n\n seq.median,\n\n seq.sd,\n\n seq.n50,\n\n seq.n75,\n\n seq.n90,\n\n seq.con750,\n\n seq.con1000,\n\n seq.con1500,\n\n )\n\n .unwrap();\n\n}\n", "file_path": "src/stats/output.rs", "rank": 15, "score": 140258.9069824025 }, { "content": "fn write_fastq_contents<W: Write>(seq: &FastqStats, line: &mut W, path: bool) {\n\n if path {\n\n write!(line, \"{},\", seq.path).unwrap();\n\n }\n\n writeln!(\n\n line,\n\n \"{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\",\n\n seq.seqname,\n\n seq.read_count,\n\n seq.total_bp,\n\n seq.total_gc,\n\n seq.gc_content,\n\n seq.total_n,\n\n seq.n_content,\n\n seq.min_reads,\n\n seq.max_reads,\n\n seq.mean_reads,\n\n seq.median_reads,\n\n seq.sd_reads,\n\n seq.mean_qscores,\n\n seq.sum_low_bases,\n\n seq.low_bases_ratio,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "src/stats/output.rs", "rank": 16, "score": 140054.59450812935 }, { "content": "pub fn auto_find_cleaned_fastq(path: &str, dirname: &str) -> Vec<SeqReads> {\n\n let mut entries = Vec::new();\n\n\n\n WalkDir::new(path)\n\n .into_iter()\n\n .filter_map(|ok| ok.ok())\n\n .filter(|e| e.file_type().is_dir())\n\n .for_each(|e| {\n\n let dir = e.path().to_string_lossy();\n\n if dir.contains(dirname) {\n\n let target = None;\n\n get_files(&dir, &mut entries, target);\n\n }\n\n });\n\n\n\n entries\n\n}\n\n\n", "file_path": "src/assembly/finder.rs", "rank": 17, "score": 134909.47518260157 }, { "content": "fn get_files(dir: &str, entries: &mut Vec<SeqReads>, target: Option<String>) {\n\n let mut files = SeqReads::new(&dir);\n\n let fastq = files.glob_fastq();\n\n files.match_reads(&fastq);\n\n files.get_id(target);\n\n\n\n if !files.read_1.as_os_str().is_empty() {\n\n entries.push(files);\n\n }\n\n}\n\n\n\npub struct SeqReads {\n\n pub dir: PathBuf,\n\n pub id: String,\n\n pub read_1: PathBuf,\n\n pub read_2: PathBuf,\n\n pub singleton: Option<PathBuf>,\n\n}\n\n\n\nimpl SeqReads {\n", "file_path": "src/assembly/finder.rs", "rank": 18, "score": 134037.2945872789 }, { "content": "pub fn print_header(text: &str) {\n\n let header = format!(\"Processing {}\", text);\n\n let length = 78;\n\n let sym = '=';\n\n let mut header = PrettyHeader::new(&header, sym, length);\n\n header.print_header().unwrap();\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 19, "score": 130706.45038570545 }, { "content": "pub fn parse_cli(version: &str) {\n\n let args = get_args(version);\n\n match args.subcommand() {\n\n (\"new\", Some(init_matches)) => new_input(init_matches),\n\n (\"assembly\", Some(assembly_matches)) => match_assembly_cli(assembly_matches, version),\n\n (\"qc\", Some(qc_matches)) => Fastp::match_cli(qc_matches, version),\n\n (\"check\", Some(_)) => checker::check_dependencies().unwrap(),\n\n (\"stats\", Some(stats_matches)) => match_stats_cli(stats_matches, version),\n\n _ => unreachable!(),\n\n };\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 20, "score": 130706.45038570545 }, { "content": "fn is_insert_missing(adapter: &str) -> bool {\n\n adapter.contains('*')\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 21, "score": 128333.28099373468 }, { "content": "pub fn insert_tag(seq: &str, ins: &str) -> String {\n\n let insert = ins.to_uppercase();\n\n let trans = translate_dna(&insert);\n\n check_tag(&insert);\n\n seq.replace(\"*\", &trans).to_uppercase()\n\n}\n\n\n", "file_path": "src/qc/tag.rs", "rank": 22, "score": 127031.44331295404 }, { "content": "fn get_insert_single(seq: &mut RawSeq, i5: &str, i7: &str, insert: &str) {\n\n let adapter_i7 = i7.to_uppercase();\n\n if is_insert_missing(i5) {\n\n let adapter_i5 = tag::insert_tag(i5, insert);\n\n seq.get_adapter_dual(&adapter_i5, &adapter_i7);\n\n } else {\n\n panic!(\"INVALID COLUMNS FOR {}!\", seq.id);\n\n }\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 23, "score": 120371.56526912123 }, { "content": "fn find_files(path: &str) -> Vec<PathBuf> {\n\n let patterns = format!(\"{}/*\", path);\n\n\n\n glob(&patterns)\n\n .unwrap()\n\n .filter_map(|ok| ok.ok())\n\n .collect()\n\n}\n\n\n", "file_path": "src/assembly/cleaner.rs", "rank": 24, "score": 120015.971557701 }, { "content": "fn get_adapter_dual(seq: &mut RawSeq, i5: &str, i7: &str) {\n\n let adapter_i5 = i5.to_uppercase();\n\n if is_insert_missing(&adapter_i5) {\n\n let adapter_i5 = tag::insert_tag(i5, i7);\n\n seq.get_adapter_single(&adapter_i5);\n\n } else {\n\n let adapter_i7 = i7.to_uppercase();\n\n seq.get_adapter_dual(&adapter_i5, &adapter_i7);\n\n }\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 25, "score": 118801.24312456354 }, { "content": "fn get_insert_dual(seq: &mut RawSeq, i5: &str, i7: &str, insert_i5: &str, insert_i7: &str) {\n\n let i5 = tag::insert_tag(i5, insert_i5);\n\n let i7 = tag::insert_tag(i7, insert_i7);\n\n seq.get_adapter_dual(&i5, &i7);\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 26, "score": 117199.11278980176 }, { "content": "pub fn process_input(\n\n input: &str,\n\n threads: &Option<usize>,\n\n outdir: &Option<PathBuf>,\n\n args: &Option<String>,\n\n) {\n\n let dirs = parser::parse_seqdir(input);\n\n let samples = finder::find_cleaned_fastq(&dirs);\n\n runner::assemble_reads(&samples, threads, outdir, args);\n\n}\n\n\n", "file_path": "src/assembly/mod.rs", "rank": 27, "score": 117058.3091279043 }, { "content": "pub fn process_input(\n\n input: &Path,\n\n is_id: bool,\n\n is_rename: bool,\n\n params: &Option<String>,\n\n outdir: &Option<PathBuf>,\n\n) {\n\n let reads: Vec<RawSeq> = parser::parse_input(input, is_id, is_rename);\n\n runner::clean_reads(&reads, params, outdir);\n\n}\n", "file_path": "src/qc/mod.rs", "rank": 28, "score": 117058.3091279043 }, { "content": "#[inline(always)]\n\nfn is_unzip_fasta(input: &Path) -> bool {\n\n let ext = input.extension().unwrap();\n\n\n\n ext == \"fasta\" || ext == \"fas\" || ext == \"fa\"\n\n}\n\n\n", "file_path": "src/stats/fasta.rs", "rank": 29, "score": 115442.5797679783 }, { "content": "#[inline(always)]\n\nfn is_gz_fasta(input: &Path) -> bool {\n\n let ext = input.extension().unwrap();\n\n ext == \"gz\" || ext == \"gzip\"\n\n}\n\n\n", "file_path": "src/stats/fasta.rs", "rank": 30, "score": 115442.5797679783 }, { "content": "fn is_unzip_fastq(input: &Path) -> bool {\n\n let ext = input.extension().unwrap();\n\n\n\n ext == \"fastq\" || ext == \"fq\"\n\n}\n\n\n", "file_path": "src/stats/fastq.rs", "rank": 31, "score": 115191.04884435658 }, { "content": "pub fn auto_process_input(\n\n path: &str,\n\n dirname: &str,\n\n threads: &Option<usize>,\n\n outdir: &Option<PathBuf>,\n\n args: &Option<String>,\n\n) {\n\n let samples = finder::auto_find_cleaned_fastq(path, dirname);\n\n runner::assemble_reads(&samples, threads, outdir, args);\n\n}\n\n\n", "file_path": "src/assembly/mod.rs", "rank": 32, "score": 113687.29273660423 }, { "content": "pub fn parse_seqdir(input: &str) -> Vec<SeqDirs> {\n\n let file = File::open(input).unwrap();\n\n let buff = BufReader::new(file);\n\n\n\n let mut seqdir = Vec::new();\n\n buff.lines()\n\n .filter_map(|ok| ok.ok())\n\n .skip(1)\n\n .for_each(|line| {\n\n let mut sample = SeqDirs::new();\n\n\n\n if line.contains(',') {\n\n sample.parse_csv(&line);\n\n } else if line.contains(':') {\n\n sample.parse_ini(&line);\n\n } else {\n\n panic!(\n\n \"INVALID INPUT FORMAT. \\\n\n LOOKING FOR ',' or ':' FOUND {}\",\n\n line\n", "file_path": "src/assembly/parser.rs", "rank": 33, "score": 113183.4264388434 }, { "content": "pub fn parse_input(input: &Path, is_id: bool, is_rename: bool) -> Vec<RawSeq> {\n\n let file = File::open(input).expect(\"CAN'T OPEN INPUT FILE.\");\n\n let buff = BufReader::new(file);\n\n let mut raw_seqs = Vec::new();\n\n let mut lcounts: usize = 0;\n\n\n\n let ext = input.extension().unwrap().to_string_lossy();\n\n if ext == \"conf\" {\n\n parse_input_ini(buff, &mut raw_seqs, &mut lcounts);\n\n } else if ext == \"csv\" {\n\n parse_input_csv(buff, input, &mut raw_seqs, &mut lcounts, is_id, is_rename)\n\n } else {\n\n panic!(\n\n \"{:?} IS INVALID INPUT FILES. THE EXTENSION MUST BE .conf OR .csv.\",\n\n input\n\n );\n\n }\n\n\n\n println!(\"Total samples: {}\", lcounts);\n\n raw_seqs\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 34, "score": 112563.06008261356 }, { "content": "pub fn process_fasta(input: &Path) -> FastaStats {\n\n let file = File::open(input).unwrap();\n\n if is_gz_fasta(input) {\n\n let read = BufReader::new(file);\n\n let decom = MultiGzDecoder::new(read);\n\n parse_fasta(decom, input)\n\n } else if is_unzip_fasta(input) {\n\n parse_fasta(file, input)\n\n } else {\n\n panic!(\"INVALID FASTA\");\n\n }\n\n}\n\n\n", "file_path": "src/stats/fasta.rs", "rank": 35, "score": 112282.23979199844 }, { "content": "pub fn process_fastq(input: &Path) -> FastqStats {\n\n if is_gunzip(input) {\n\n parse_gunzip_fastq(input)\n\n } else if is_unzip_fastq(input) {\n\n parse_unzip_fastq(input)\n\n } else {\n\n panic!(\"INVALID FASTQ.\");\n\n }\n\n}\n\n\n", "file_path": "src/stats/fastq.rs", "rank": 36, "score": 112006.22793095847 }, { "content": "fn get_adapter_single(seq: &mut RawSeq, adapters: &str) {\n\n let i5 = adapters.to_uppercase();\n\n if is_insert_missing(&i5) {\n\n panic!(\"INSERT MISSING!\");\n\n } else {\n\n seq.get_adapter_single(&i5);\n\n }\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 37, "score": 111632.59687463005 }, { "content": "fn split_line(lines: &str, csv: bool) -> Vec<String> {\n\n assert!(\n\n lines.contains(',') || lines.contains(':'),\n\n \"INVALID INPUT LINE FORMAT\"\n\n );\n\n let mut sep = ',';\n\n if !csv {\n\n sep = ':';\n\n }\n\n let seqs = lines.split(sep).map(|e| e.trim().to_string()).collect();\n\n seqs\n\n}\n\n\n\npub struct RawSeq {\n\n pub id: String,\n\n pub dir: PathBuf,\n\n pub read_1: PathBuf,\n\n pub read_2: PathBuf,\n\n pub adapter_i5: Option<String>,\n\n pub adapter_i7: Option<String>,\n", "file_path": "src/qc/parser.rs", "rank": 38, "score": 111041.10403102048 }, { "content": "#[inline(always)]\n\nfn is_gunzip(input: &Path) -> bool {\n\n input.extension().unwrap() == \"gz\"\n\n}\n\n\n", "file_path": "src/stats/fastq.rs", "rank": 39, "score": 109092.65900024881 }, { "content": "fn match_stats_cli(args: &ArgMatches, version: &str) {\n\n let mut stats = Stats::new();\n\n println!(\"Starting YAP-stats v{}...\\n\", version);\n\n let value = args.value_of(\"format\").expect(\"IS NOT A VALID FILE PATH\");\n\n match value {\n\n \"fastq\" => stats.match_fastq(args),\n\n \"fasta\" => stats.match_fasta(args),\n\n _ => unreachable!(\"Please specify the allowed values\"),\n\n }\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 40, "score": 107639.79108646346 }, { "content": "fn match_assembly_cli(args: &ArgMatches, version: &str) {\n\n let mut spades = Spades::new();\n\n println!(\"Starting YAP-assembly v{}...\\n\", version);\n\n match args.subcommand() {\n\n (\"auto\", Some(clean_matches)) => spades.run_auto(clean_matches),\n\n (\"conf\", Some(assembly_matches)) => spades.run(assembly_matches),\n\n (\"clean\", Some(clean_matches)) => spades.clean_files(clean_matches),\n\n _ => unreachable!(),\n\n };\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 41, "score": 107639.79108646346 }, { "content": "fn get_args(version: &str) -> ArgMatches {\n\n App::new(\"YAP\")\n\n .version(version)\n\n .about(\"A cli app for phylogenomics\")\n\n .author(\"Heru Handika <[email protected]>\")\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(App::new(\"check\").about(\"Checks dependencies\"))\n\n .subcommand(\n\n App::new(\"new\")\n\n .about(\"Find sequences and generate input files\")\n\n .arg(\n\n Arg::with_name(\"dir\")\n\n .short(\"d\")\n\n .long(\"dir\")\n\n .help(\"Specify input directory\")\n\n .takes_value(true)\n\n .default_value(\"raw_reads\")\n\n .value_name(\"DIR\"),\n\n )\n\n .arg(\n", "file_path": "src/cli.rs", "rank": 42, "score": 105870.50621327876 }, { "content": "pub fn median(vec: &[u32]) -> f64 {\n\n let sorted_vec = sort_vector_asc(&vec);\n\n let n = sorted_vec.len();\n\n let midpoint = n / 2;\n\n\n\n let med;\n\n if n % 2 == 0 {\n\n med = (sorted_vec[midpoint - 1] + sorted_vec[midpoint]) as f64 / 2.0\n\n } else {\n\n med = sorted_vec[midpoint] as f64\n\n }\n\n\n\n med\n\n}\n\n\n", "file_path": "src/stats/math.rs", "rank": 43, "score": 103415.35441277914 }, { "content": "pub fn clean_spades_files(path: &Path) {\n\n let io = io::stdout();\n\n let mut handle = io::BufWriter::new(io);\n\n writeln!(handle, \"\\x1b[0;33mRemoved files and directories:\\x1b[0m\").unwrap();\n\n WalkDir::new(path).into_iter()\n\n .filter_map(|ok| ok.ok())\n\n .filter(|e| e.path().ends_with(\"spades.log\"))\n\n .for_each(|e| {\n\n let path = e.path().parent().unwrap().to_string_lossy();\n\n let contents = find_files(&path);\n\n remove_contents(&contents, &mut handle);\n\n });\n\n writeln!(handle).unwrap();\n\n}\n\n\n", "file_path": "src/assembly/cleaner.rs", "rank": 44, "score": 102779.61032392489 }, { "content": "fn parse_fasta<R: Read>(file: R, input: &Path) -> FastaStats {\n\n let stdout = io::stdout();\n\n let mut stdbuf = io::BufWriter::new(stdout);\n\n\n\n write!(stdbuf, \"Processing {:?}\\t\", input.file_name().unwrap()).unwrap();\n\n\n\n let mut contig_counts: u32 = 0;\n\n let mut contigs: Vec<SeqReads> = Vec::new();\n\n\n\n let file = Fasta::new(file);\n\n\n\n file.into_iter().for_each(|recs| {\n\n contig_counts += 1;\n\n let reads = SeqReads::get_seq_stats(&recs.as_bytes());\n\n contigs.push(reads);\n\n });\n\n writeln!(stdbuf, \"\\x1b[0;32mDONE!\\x1b[0m\").unwrap();\n\n FastaStats::get_stats(input, &contig_counts, &contigs)\n\n}\n\n\n", "file_path": "src/stats/fasta.rs", "rank": 45, "score": 102228.06457682792 }, { "content": "fn check_reads(reads: &[PathBuf], id: &str) {\n\n match reads.len() {\n\n 0 => panic!(\n\n \"CANNOT FIND FILE {}. \\\n\n USE THE --id FLAG IF YOU USE THE FILE ID.\",\n\n id\n\n ),\n\n 2 => (),\n\n _ => panic!(\"REQUIRED TWO READS FOR {}. FOUND: {:?}\", id, reads),\n\n }\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 46, "score": 97483.89665835221 }, { "content": "pub fn stdev(vec: &[u32], mean: &f64) -> f64 {\n\n let var = variance(vec, mean);\n\n var.sqrt()\n\n}\n\n\n", "file_path": "src/stats/math.rs", "rank": 47, "score": 94401.09512543544 }, { "content": "fn write_fasta_console(contigs: &FastaStats) {\n\n let stdout = io::stdout();\n\n let mut buff = io::BufWriter::new(stdout);\n\n\n\n writeln!(buff, \"\\x1b[0;32mFile {:?}\\x1b[0m\", contigs.seqname).unwrap();\n\n\n\n writeln!(\n\n buff,\n\n \"No. of contigs\\t\\t: {}\",\n\n contigs.contig_counts.to_formatted_string(&Locale::en)\n\n )\n\n .unwrap();\n\n\n\n writeln!(\n\n buff,\n\n \"Total GC count\\t\\t: {}\",\n\n contigs.total_gc.to_formatted_string(&Locale::en)\n\n )\n\n .unwrap();\n\n\n", "file_path": "src/stats/output.rs", "rank": 48, "score": 90793.91133839465 }, { "content": "fn write_fasta_csv(stats: &[FastaStats]) {\n\n let fname = \"YAP-fasta-summary.csv\";\n\n let output = File::create(&fname).expect(\"FILE EXISTS.\");\n\n let mut line = LineWriter::new(output);\n\n let path = !stats[0].path.is_empty();\n\n\n\n write_fasta_header(&mut line, path);\n\n\n\n stats\n\n .iter()\n\n .for_each(|seq| write_fasta_contents(seq, &mut line, path));\n\n println!(\"The result is saved as {}\", fname);\n\n}\n\n\n", "file_path": "src/stats/output.rs", "rank": 49, "score": 90793.91133839465 }, { "content": "pub fn assemble_reads(\n\n reads: &[SeqReads],\n\n threads: &Option<usize>,\n\n outdir: &Option<PathBuf>,\n\n args: &Option<String>,\n\n) {\n\n let dir = get_outdir(&outdir);\n\n check_dir_exists(&dir);\n\n let contig_dir = dir.join(\"contig_symlinks\");\n\n fs::create_dir_all(&contig_dir).unwrap();\n\n println!(\"\\x1b[0;33mTotal samples: {}\\n\\x1b[0m\", reads.len());\n\n reads.iter().for_each(|r| {\n\n let mut run = Runner::new(&dir, &contig_dir, r, threads, args);\n\n run.run_spades();\n\n });\n\n}\n\n\n", "file_path": "src/assembly/runner.rs", "rank": 50, "score": 90648.99713579574 }, { "content": "fn write_fastq_csv(all_reads: &[FastqStats]) {\n\n let fname = \"YAP-fastq-summary.csv\";\n\n let output = File::create(&fname).expect(\"FILE EXISTS.\");\n\n let mut line = LineWriter::new(output);\n\n let path = !all_reads[0].path.is_empty();\n\n\n\n write_fastq_header(&mut line, path);\n\n\n\n all_reads\n\n .iter()\n\n .for_each(|seq| write_fastq_contents(seq, &mut line, path));\n\n println!(\"The result is saved as {}\", fname);\n\n}\n\n\n", "file_path": "src/stats/output.rs", "rank": 51, "score": 90549.4427750835 }, { "content": "fn write_fastq_console(all_reads: &FastqStats) {\n\n let stdout = io::stdout();\n\n let mut buff = io::BufWriter::new(stdout);\n\n\n\n writeln!(buff, \"\\x1b[0;32mFile {:?}\\x1b[0m\", &all_reads.seqname).unwrap();\n\n\n\n writeln!(\n\n buff,\n\n \"No. of reads\\t\\t: {}\",\n\n &all_reads.read_count.to_formatted_string(&Locale::en)\n\n )\n\n .unwrap();\n\n\n\n writeln!(\n\n buff,\n\n \"Total GC count\\t\\t: {}\",\n\n &all_reads.total_gc.to_formatted_string(&Locale::en)\n\n )\n\n .unwrap();\n\n\n", "file_path": "src/stats/output.rs", "rank": 52, "score": 90549.4427750835 }, { "content": "pub fn find_cleaned_fastq(dirs: &[SeqDirs]) -> Vec<SeqReads> {\n\n let mut entries = Vec::new();\n\n\n\n dirs.iter()\n\n .for_each(|s| get_files(&s.dir, &mut entries, Some(s.id.clone())));\n\n entries\n\n}\n\n\n", "file_path": "src/assembly/finder.rs", "rank": 53, "score": 89081.98683201657 }, { "content": "pub fn check_dependencies() -> Result<()> {\n\n let stdout = io::stdout();\n\n let mut handle = io::BufWriter::new(stdout);\n\n utils::get_system_info().unwrap();\n\n writeln!(handle, \"Dependencies:\")?;\n\n check_fastp(&mut handle)?;\n\n check_spades(&mut handle)?;\n\n writeln!(handle)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/checker.rs", "rank": 54, "score": 87876.02891754461 }, { "content": "fn check_tag(insert: &str) {\n\n insert.chars()\n\n .for_each(| dna | \n\n match dna {\n\n 'A' | 'G' | 'T' | 'C' => (),\n\n _ => panic!(\"INVALID TAG DNA SEQUENCES\")\n\n }\n\n )\n\n}\n\n\n", "file_path": "src/qc/tag.rs", "rank": 55, "score": 85852.97888342084 }, { "content": "pub fn set_spinner() -> ProgressBar {\n\n let spin = ProgressBar::new_spinner();\n\n spin.enable_steady_tick(150);\n\n spin.set_style(\n\n ProgressStyle::default_spinner()\n\n .tick_chars(\"🌑🌒🌓🌔🌕🌖🌗🌘\")\n\n .template(\"{spinner} {msg}\"),\n\n );\n\n spin\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 56, "score": 85657.5920577306 }, { "content": "pub fn get_system_info() -> Result<()> {\n\n let sysinfo = sysinfo::System::new_all();\n\n let io = io::stdout();\n\n let mut handle = io::BufWriter::new(io);\n\n\n\n let total_ram = sysinfo.get_total_memory();\n\n let gb = 1048576;\n\n\n\n writeln!(handle, \"\\x1b[0;33mSystem Information\\x1b[0m\")?;\n\n\n\n writeln!(\n\n handle,\n\n \"Operating system\\t: {} {}\",\n\n get_os_name(&sysinfo),\n\n get_os_version(&sysinfo)\n\n )?;\n\n\n\n writeln!(\n\n handle,\n\n \"Kernel version\\t\\t: {}\",\n", "file_path": "src/utils.rs", "rank": 57, "score": 85657.5920577306 }, { "content": "fn remove_contents<W: Write>(contents: &[PathBuf], handle: &mut W) {\n\n contents.iter()\n\n .for_each(|e| {\n\n if e.is_file() {\n\n match e.to_string_lossy() {\n\n p if p.ends_with(\"/contigs.fasta\") => (),\n\n p if p.ends_with(\"/scaffolds.fasta\") => (),\n\n p if p.ends_with(\"/spades.log\") => (),\n\n p if p.ends_with(\"/warnings.log\") => (),\n\n _ => {\n\n fs::remove_file(e).unwrap();\n\n writeln!(handle, \"{}\", e.to_string_lossy()).unwrap();\n\n },\n\n }\n\n } \n\n if e.is_dir() {\n\n fs::remove_dir_all(e).unwrap();\n\n writeln!(handle, \"{}\", e.to_string_lossy()).unwrap();\n\n } \n\n });\n\n\n\n}", "file_path": "src/assembly/cleaner.rs", "rank": 58, "score": 85070.25574967837 }, { "content": "fn translate_dna(insert: &str) -> String {\n\n let libs = get_dna_libs();\n\n let dna = insert.to_uppercase();\n\n\n\n let mut translate = String::new();\n\n\n\n dna.chars()\n\n .for_each(|b| {\n\n let base = libs.get(&b).unwrap();\n\n translate.push(*base);\n\n });\n\n\n\n translate\n\n}\n\n\n", "file_path": "src/qc/tag.rs", "rank": 59, "score": 81396.48467309562 }, { "content": "pub fn print_formatted_duration(duration: u64) {\n\n let time = parse_duration(duration);\n\n println!(\"Execution time (HH:MM:SS): {}\", time);\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 60, "score": 81212.81119942639 }, { "content": "pub fn clean_reads(reads: &[RawSeq], params: &Option<String>, outdir: &Option<PathBuf>) {\n\n let dir = get_outdir(outdir);\n\n check_dir_exists(&dir);\n\n reads.iter().for_each(|read| {\n\n let mut run = Runner::new(&dir, read, params);\n\n\n\n if read.adapter_i7.as_ref().is_some() {\n\n // Check if i7 contains sequence\n\n run.dual_idx = true;\n\n }\n\n\n\n run.process_reads();\n\n });\n\n\n\n println!();\n\n}\n\n\n", "file_path": "src/qc/runner.rs", "rank": 61, "score": 76512.98915182133 }, { "content": "fn get_adapters(seq: &mut RawSeq, adapters: &[String]) {\n\n match adapters.len() {\n\n 1 => seq.get_adapter_auto(),\n\n 2 => get_adapter_single(seq, &adapters[1]),\n\n 3 => get_adapter_dual(seq, &adapters[1], &adapters[2]),\n\n 4 => get_insert_single(seq, &adapters[1], &adapters[2], &adapters[3]),\n\n 5 => get_insert_dual(seq, &adapters[1], &adapters[2], &adapters[3], &adapters[4]),\n\n _ => panic!(\n\n \"Unexpected cvs columns. It should be \\\n\n 2 columns for single index and 3 column for \\\n\n dual index. The app received {} columns\",\n\n adapters.len()\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 62, "score": 72734.63116321876 }, { "content": "fn check_spades<W: Write>(handle: &mut W) -> Result<()> {\n\n let out = Command::new(\"spades.py\").arg(\"--version\").output();\n\n match out {\n\n Ok(out) => writeln!(\n\n handle,\n\n \"[OK]\\t{}\",\n\n str::from_utf8(&out.stdout).unwrap().trim()\n\n )?,\n\n Err(_) => writeln!(handle, \"\\x1b[0;41m[NOT FOUND]\\x1b[0m\\tSPAdes\")?,\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/checker.rs", "rank": 63, "score": 72639.5779869448 }, { "content": "fn check_fastp<W: Write>(handle: &mut W) -> Result<()> {\n\n let out = Command::new(\"fastp\").arg(\"--version\").output();\n\n\n\n match out {\n\n Ok(out) => writeln!(\n\n handle,\n\n \"[OK]\\t{}\",\n\n str::from_utf8(&out.stderr).unwrap().trim()\n\n )?,\n\n Err(_) => writeln!(handle, \"\\x1b[0;41m[NOT FOUND]\\x1b[0m\\tfastp\")?,\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/checker.rs", "rank": 64, "score": 72639.5779869448 }, { "content": "fn new_input(matches: &ArgMatches) {\n\n let path = matches.value_of(\"dir\").expect(\"IS NOT A VALID FILE PATH\");\n\n let len = matches\n\n .value_of(\"len\")\n\n .unwrap()\n\n .parse::<usize>()\n\n .expect(\"NOT AN INTEGER\");\n\n let sep = matches\n\n .value_of(\"sep\")\n\n .unwrap()\n\n .parse::<char>()\n\n .expect(\"SEPARATOR SHOULD BE A SINGLE CHARACTER\");\n\n let iscsv = matches.is_present(\"csv\");\n\n let mut init = Init::new(path, len, sep, iscsv);\n\n\n\n init.initialize_input_file();\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 65, "score": 71657.22425098195 }, { "content": "fn get_adapter_rename(seq: &mut RawSeq, adapters: &[String]) {\n\n match adapters.len() {\n\n 1 => panic!(\"MISSING AN OUTPUT NAME COLUMN\"),\n\n 2 => {\n\n seq.get_output_name(&adapters[1]);\n\n seq.get_adapter_auto();\n\n }\n\n\n\n 3 => {\n\n seq.get_output_name(&adapters[1]);\n\n get_adapter_single(seq, &adapters[2]);\n\n }\n\n\n\n 4 => {\n\n seq.get_output_name(&adapters[1]);\n\n get_adapter_dual(seq, &adapters[2], &adapters[3]);\n\n }\n\n 5 => {\n\n seq.get_output_name(&adapters[1]);\n\n get_insert_single(seq, &adapters[2], &adapters[3], &adapters[4]);\n\n }\n\n 6 => {\n\n seq.get_output_name(&adapters[1]);\n\n get_insert_dual(seq, &adapters[2], &adapters[3], &adapters[4], &adapters[5]);\n\n }\n\n\n\n _ => panic!(\"TOO MANY COLUMN. SIX MAX FOR RENAMING\"),\n\n }\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 66, "score": 71246.04831201537 }, { "content": "fn parse_input_ini<R: BufRead>(buff: R, raw_seqs: &mut Vec<RawSeq>, lcount: &mut usize) {\n\n buff.lines()\n\n .filter_map(|ok| ok.ok())\n\n .skip(1)\n\n .for_each(|line| {\n\n let mut seq = RawSeq::new();\n\n let line = split_line(&line, false);\n\n let id = String::from(&line[0]);\n\n let path = PathBuf::from(&line[1]);\n\n let iscsv = false;\n\n let is_id = false;\n\n let reads = ReadFinder::get(&path, &id, is_id, iscsv);\n\n check_reads(&reads, &id);\n\n seq.get_id(&id);\n\n seq.get_reads(&reads);\n\n seq.get_adapter_auto();\n\n let is_rename = false;\n\n seq.get_dir(is_id, is_rename);\n\n raw_seqs.push(seq);\n\n *lcount += 1;\n\n });\n\n}\n\n\n", "file_path": "src/qc/parser.rs", "rank": 67, "score": 70399.3281020771 }, { "content": "fn parse_gunzip_fastq(input: &Path) -> FastqStats {\n\n let file = File::open(input).unwrap();\n\n let reader = BufReader::new(file);\n\n let decompressor = MultiGzDecoder::new(reader);\n\n let buff = BufReader::new(decompressor);\n\n\n\n parse_fastq(buff, input)\n\n}\n\n\n", "file_path": "src/stats/fastq.rs", "rank": 68, "score": 69752.21262105234 }, { "content": "fn parse_unzip_fastq(input: &Path) -> FastqStats {\n\n let file = File::open(input).unwrap();\n\n let buff = BufReader::new(file);\n\n parse_fastq(buff, input)\n\n}\n\n\n", "file_path": "src/stats/fastq.rs", "rank": 69, "score": 69752.21262105234 }, { "content": "fn parse_fastq<R: BufRead>(buff: R, input: &Path) -> FastqStats {\n\n let stdout = io::stdout();\n\n let mut outbuff = io::BufWriter::new(stdout);\n\n\n\n write!(outbuff, \"Processing {:?}\\t\", &input.file_name().unwrap()).unwrap();\n\n\n\n let mut reads: u32 = 0;\n\n let mut sq_per_read: Vec<SeqReads> = Vec::new();\n\n let mut qscores: Vec<QScore> = Vec::new();\n\n\n\n buff.lines()\n\n .filter_map(|ok| ok.ok())\n\n .filter(|recs| !recs.is_empty())\n\n .enumerate()\n\n .for_each(|(idx, recs)| match idx % 4 {\n\n 0 => {\n\n if !&recs.starts_with('@') {\n\n panic!(\n\n \"{:?} IS INVALID FASTQ. \\\n\n LOOKING FOR '@' FOUND '{}' at line {}\",\n", "file_path": "src/stats/fastq.rs", "rank": 70, "score": 61553.911516790045 }, { "content": "fn get_outdir(outdir: &Option<PathBuf>) -> PathBuf {\n\n match outdir {\n\n Some(dir) => dir.clone(),\n\n None => PathBuf::from(\"assemblies\"),\n\n }\n\n}\n\n\n", "file_path": "src/assembly/runner.rs", "rank": 71, "score": 61047.192452962365 }, { "content": "fn get_outdir(outdir: &Option<PathBuf>) -> PathBuf {\n\n match outdir {\n\n Some(dir) => dir.clone(),\n\n None => PathBuf::from(\"clean_reads\"),\n\n }\n\n}\n\n\n", "file_path": "src/qc/runner.rs", "rank": 72, "score": 61047.192452962365 }, { "content": "#[inline(always)]\n\nfn sum_of_square(vec: &[f64]) -> f64 {\n\n let d: f64 = vec.iter().map(|val| val.powf(2.0)).sum();\n\n d\n\n}\n\n\n", "file_path": "src/stats/math.rs", "rank": 73, "score": 59306.153678952374 }, { "content": "fn cumsum(vec: &[u32]) -> Vec<u32> {\n\n let mut csum = Vec::new();\n\n let mut sum = 0;\n\n vec.iter().for_each(|v| {\n\n sum += v;\n\n csum.push(sum);\n\n });\n\n csum\n\n}\n\n\n\npub struct NStats {\n\n sorted_contigs: Vec<u32>,\n\n csum_contigs: Vec<u32>,\n\n sum_contigs: u32,\n\n pub n50: u32,\n\n pub n75: u32,\n\n pub n90: u32,\n\n}\n\n\n\nimpl NStats {\n", "file_path": "src/stats/math.rs", "rank": 74, "score": 58545.52087217284 }, { "content": "fn print_dryrun(dirs: &[SeqReads]) -> Result<()> {\n\n let out = io::stdout();\n\n let mut handle = io::BufWriter::new(out);\n\n\n\n writeln!(handle, \"\\x1b[0;33mTotal samples: {}\\n\\x1b[0m\", dirs.len())?;\n\n dirs.iter().for_each(|e| {\n\n writeln!(handle, \"\\x1b[0;32mID\\t\\t: {}\\x1b[0m\", e.id).unwrap();\n\n writeln!(handle, \"Dir\\t\\t: {}\", e.dir.to_string_lossy()).unwrap();\n\n writeln!(handle, \"Read 1\\t\\t: {}\", e.read_1.to_string_lossy()).unwrap();\n\n writeln!(handle, \"Read 2\\t\\t: {}\", e.read_2.to_string_lossy()).unwrap();\n\n\n\n if e.singleton.is_some() {\n\n writeln!(\n\n handle,\n\n \"Singleton\\t: {}\",\n\n e.singleton.as_ref().unwrap().to_string_lossy()\n\n )\n\n .unwrap();\n\n }\n\n\n\n writeln!(handle).unwrap();\n\n });\n\n Ok(())\n\n}\n", "file_path": "src/assembly/mod.rs", "rank": 75, "score": 57310.87622641886 }, { "content": "fn variance(vec: &[u32], mean: &f64) -> f64 {\n\n let d_mean = dev_mean(vec, mean);\n\n let n = vec.len() as f64 - 1.0;\n\n sum_of_square(&d_mean) / n\n\n}\n\n\n", "file_path": "src/stats/math.rs", "rank": 76, "score": 55991.479647599896 }, { "content": "fn sort_vec_desc(vec: &[u32]) -> Vec<u32> {\n\n let mut sorted_vec = vec.to_vec();\n\n sorted_vec.sort_by_key(|v| Reverse(*v));\n\n\n\n sorted_vec\n\n}\n\n\n", "file_path": "src/stats/math.rs", "rank": 77, "score": 54928.61459184575 }, { "content": "#[inline]\n\nfn sort_vector_asc(vec: &[u32]) -> Vec<u32> {\n\n let mut sorted_vec = vec.to_vec();\n\n sorted_vec.sort_unstable();\n\n\n\n sorted_vec\n\n}\n\n\n", "file_path": "src/stats/math.rs", "rank": 78, "score": 54928.61459184575 }, { "content": "#[inline(always)]\n\nfn dev_mean(vec: &[u32], mean: &f64) -> Vec<f64> {\n\n vec.iter().map(|&val| val as f64 - *mean).collect()\n\n}\n\n\n", "file_path": "src/stats/math.rs", "rank": 79, "score": 52049.06732991791 }, { "content": "fn main() {\n\n let version = crate_version!();\n\n let time = Instant::now();\n\n cli::parse_cli(version);\n\n let duration = time.elapsed();\n\n\n\n if duration.as_secs() < 60 {\n\n println!(\"Execution time: {:?}\", duration);\n\n } else {\n\n utils::print_formatted_duration(duration.as_secs());\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 80, "score": 43966.68389969084 }, { "content": "fn parse_duration(duration: u64) -> String {\n\n let sec = (duration % 60) as u32;\n\n let min = ((duration / 60) % 60) as u32;\n\n let hours = ((duration / 60) / 60) as u32;\n\n let time = NaiveTime::from_hms(hours, min, sec);\n\n time.format(\"%H:%M:%S\").to_string()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 81, "score": 35970.44764260522 }, { "content": "fn check_dir_exists(dir: &Path) {\n\n if dir.exists() {\n\n panic!(\"{:?} DIR EXISTS. PLEASE RENAME OR REMOVE IT\", dir);\n\n } else {\n\n fs::create_dir_all(dir).expect(\"CAN'T CREATE ASSEMBLY DIR\");\n\n }\n\n}\n\n\n", "file_path": "src/assembly/runner.rs", "rank": 82, "score": 35853.16947715019 }, { "content": "fn check_dir_exists(dir: &Path) {\n\n if dir.exists() {\n\n panic!(\"{:?} DIR EXISTS. PLEASE RENAME OR REMOVE IT\", dir);\n\n } else {\n\n // if not create one\n\n fs::create_dir_all(dir).expect(\"CAN'T CREATE CLEAN READ DIR\");\n\n }\n\n}\n\n\n", "file_path": "src/qc/runner.rs", "rank": 83, "score": 35853.16947715019 }, { "content": "fn get_kernel_version(sysinfo: &System) -> String {\n\n match sysinfo.get_kernel_version() {\n\n Some(i) => i,\n\n None => String::from(\"UNKNOWN\"),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 84, "score": 34986.59266054722 }, { "content": "fn get_os_version(sysinfo: &System) -> String {\n\n match sysinfo.get_os_version() {\n\n Some(i) => i,\n\n None => String::from(\"\"),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 85, "score": 34986.59266054722 }, { "content": "fn get_os_name(sysinfo: &System) -> String {\n\n match sysinfo.get_name() {\n\n Some(i) => i,\n\n None => String::from(\"UNKNOWN\"),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 86, "score": 34986.59266054722 }, { "content": "fn parse_input_csv<R: BufRead>(\n\n buff: R,\n\n input: &Path,\n\n raw_seqs: &mut Vec<RawSeq>,\n\n lcount: &mut usize,\n\n is_id: bool,\n\n is_rename: bool,\n\n) {\n\n buff.lines()\n\n .filter_map(|ok| ok.ok())\n\n .skip(1)\n\n .for_each(|line| {\n\n let mut seq = RawSeq::new();\n\n let lines = split_line(&line, true);\n\n let id = String::from(&lines[0]);\n\n let iscsv = true;\n\n let reads = ReadFinder::get(&input, &id, is_id, iscsv);\n\n check_reads(&reads, &id);\n\n seq.get_id(&id);\n\n seq.get_reads(&reads);\n", "file_path": "src/qc/parser.rs", "rank": 87, "score": 34948.297047972614 }, { "content": "fn get_dna_libs() -> HashMap<char, char> {\n\n let dna = String::from(\"ATGC\");\n\n let comp = String::from(\"TACG\");\n\n\n\n let mut trans = HashMap::new();\n\n\n\n dna.chars()\n\n .zip(comp.chars())\n\n .for_each(|(b, c)| {\n\n trans.insert(b,c);\n\n });\n\n \n\n trans\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/qc/tag.rs", "rank": 88, "score": 33246.68646519988 }, { "content": "\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn qscore_test () {\n\n let p = String::from(\"II!)\");\n\n let q = String::from(\"II\");\n\n\n\n let q_score = QScore::analyze_qscores(q.as_bytes());\n\n let p_score = QScore::analyze_qscores(p.as_bytes());\n\n\n\n assert_eq!(2, q_score.q_len);\n\n assert_eq!(40.0, q_score.mean_q);\n\n assert_eq!(0, q_score.low_bases);\n\n assert_eq!(2, p_score.low_bases);\n\n }\n", "file_path": "src/stats/qscores.rs", "rank": 89, "score": 30999.3230858398 }, { "content": "//! Heru Handika\n\n//! Only support Illumina 1.8 Quality Scores\n\n\n\npub struct QScore {\n\n pub q_len: u32,\n\n pub mean_q: f64,\n\n pub low_bases: u32,\n\n pub sum: u32,\n\n}\n\n\n\nimpl QScore {\n\n pub fn analyze_qscores(q_line: &[u8]) -> Self {\n\n let q_scores = q_line.iter()\n\n .map(|scr| \n\n { if *scr < 75 {\n\n *scr as u32 - 33\n\n } else {\n\n panic!(\"UNSUPPORTED Q-SCORE ENCODING!\");\n\n }\n\n })\n", "file_path": "src/stats/qscores.rs", "rank": 90, "score": 30998.16612990822 }, { "content": "\n\n #[test]\n\n fn decode_qscores_test() {\n\n let q = String::from(\"II\");\n\n let qs = QScore::analyze_qscores(q.as_bytes());\n\n\n\n // let res = vec![40, 40];\n\n\n\n assert_eq!(80, qs.sum);\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"UNSUPPORTED Q-SCORE ENCODING!\")]\n\n fn decode_panic_qscore() {\n\n let p = String::from(\"II!)K\");\n\n\n\n QScore::analyze_qscores(&p.as_bytes());\n\n }\n\n}\n", "file_path": "src/stats/qscores.rs", "rank": 91, "score": 30996.083437558435 }, { "content": " .collect::<Vec<u32>>(); \n\n\n\n let mut q = Self {\n\n q_len: q_scores.iter().count() as u32,\n\n low_bases: q_scores.iter()\n\n .filter(|&x| *x < 20)\n\n .count() as u32,\n\n sum: q_scores.iter().sum(),\n\n mean_q: 0.0\n\n };\n\n\n\n q.mean();\n\n\n\n q\n\n }\n\n\n\n fn mean(&mut self) {\n\n self.mean_q = self.sum as f64 / self.q_len as f64\n\n }\n\n}\n", "file_path": "src/stats/qscores.rs", "rank": 92, "score": 30995.2876956253 }, { "content": "\n\n pub fn get_n75(&mut self) {\n\n let n75_len = self.n_len(0.75);\n\n let idx = self.get_n_idx(n75_len);\n\n self.n75 = self.sorted_contigs[idx];\n\n }\n\n\n\n pub fn get_n90(&mut self) {\n\n let n90_len = self.n_len(0.9);\n\n let idx = self.get_n_idx(n90_len);\n\n self.n90 = self.sorted_contigs[idx];\n\n }\n\n\n\n fn get_n_idx(&mut self, n: u32) -> usize {\n\n self.csum_contigs.iter().position(|i| *i >= n).unwrap()\n\n }\n\n\n\n fn n_len(&mut self, i: f64) -> u32 {\n\n let n = self.sum_contigs as f64 * i;\n\n\n", "file_path": "src/stats/math.rs", "rank": 93, "score": 30754.54916255806 }, { "content": " pub fn new(contigs: &[u32]) -> Self {\n\n let mut nstats = Self {\n\n sorted_contigs: sort_vec_desc(contigs),\n\n csum_contigs: Vec::new(),\n\n sum_contigs: contigs.iter().sum::<u32>(),\n\n n50: 0,\n\n n75: 0,\n\n n90: 0,\n\n };\n\n\n\n nstats.csum_contigs = cumsum(&nstats.sorted_contigs);\n\n\n\n nstats\n\n }\n\n\n\n pub fn get_n50(&mut self) {\n\n let n50_len = self.n_len(0.5);\n\n let idx = self.get_n_idx(n50_len);\n\n self.n50 = self.sorted_contigs[idx];\n\n }\n", "file_path": "src/stats/math.rs", "rank": 94, "score": 30753.72466039577 }, { "content": " n as u32\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use assert_approx_eq::assert_approx_eq;\n\n\n\n #[test]\n\n fn median_test() {\n\n let odd: Vec<u32> = vec![1, 4, 3, 5, 6];\n\n let even: Vec<u32> = vec![1, 4, 3, 5, 6, 6, 8, 10];\n\n assert_eq!(4.0, median(&odd));\n\n assert_eq!(5.5, median(&even));\n\n }\n\n #[test]\n\n fn var_test() {\n\n let data: Vec<u32> = vec![1, 4, 3, 5, 6, 6, 8, 10];\n\n let mean = 5.375;\n", "file_path": "src/stats/math.rs", "rank": 95, "score": 30753.43543439488 }, { "content": "//! Heru Handika\n\n//! Module for statistics\n\n\n\nuse std::cmp::Reverse;\n\n\n\n#[inline]\n", "file_path": "src/stats/math.rs", "rank": 96, "score": 30751.804864895075 }, { "content": " }\n\n\n\n #[test]\n\n fn sorted_vec_desc_test() {\n\n let a = vec![1, 2, 3];\n\n let res = vec![3, 2, 1];\n\n\n\n assert_eq!(res, sort_vec_desc(&a));\n\n }\n\n\n\n #[test]\n\n fn n50_stats_test() {\n\n let contigs = vec![2, 3, 4, 5, 6, 7, 8, 9, 10];\n\n let mut seq = NStats::new(&contigs);\n\n seq.get_n50();\n\n seq.get_n90();\n\n seq.get_n75();\n\n\n\n assert_eq!(8, seq.n50);\n\n assert_eq!(6, seq.n75);\n\n assert_eq!(4, seq.n90);\n\n }\n\n}\n", "file_path": "src/stats/math.rs", "rank": 97, "score": 30750.476969237316 }, { "content": " let exp = 7.982143;\n\n let res = variance(&data, &mean);\n\n assert_approx_eq!(exp, res, 6f64);\n\n }\n\n\n\n #[test]\n\n fn stdev_test() {\n\n let data: Vec<u32> = vec![1, 4, 3, 5, 6, 6, 8, 10];\n\n let mean = 5.375;\n\n\n\n let exp = 2.825269;\n\n let res = stdev(&data, &mean);\n\n assert_approx_eq!(exp, res, 6f64);\n\n }\n\n\n\n #[test]\n\n fn csum_test() {\n\n let a = vec![1, 2, 3];\n\n let res = vec![1, 3, 6];\n\n assert_eq!(res, cumsum(&a));\n", "file_path": "src/stats/math.rs", "rank": 98, "score": 30748.80917926822 }, { "content": "//! Heru Handika\n\n//! Modules to process sequencing data\n\n\n\nuse std::path::Path;\n\n\n\nuse crate::stats::math::{self, NStats};\n\nuse crate::stats::qscores::QScore;\n\n\n\npub struct SeqReads {\n\n pub seq_len: u32,\n\n pub gc_count: u32,\n\n pub n_count: u32,\n\n}\n\n\n\nimpl SeqReads {\n\n pub fn get_seq_stats(reads: &[u8]) -> Self {\n\n let mut seq = Self {\n\n seq_len: reads.iter().count() as u32,\n\n gc_count: 0,\n\n n_count: 0,\n", "file_path": "src/stats/sequence.rs", "rank": 99, "score": 30711.6496150497 } ]
Rust
examples/echo_server.rs
over-codes/oc-http
79051db0857225d437e499ef52782591a3ceca28
use std::{ error::Error }; use std::time::Duration; use log::{warn}; use env_logger::Env; use async_std::{ task, io::{ BufReader, BufWriter, }, net::{ TcpListener, }, }; use futures::{ prelude::*, AsyncRead, AsyncWrite, }; use oc_http::{ cookies::{Cookies, Cookie}, }; #[async_std::main] async fn main() -> Result<(), Box<dyn Error>> { env_logger::Builder::from_env(Env::default().default_filter_or("info")).init(); let listener = TcpListener::bind("127.0.0.1:8080").await?; let _local_addr = listener.local_addr()?; let mut incoming = listener.incoming(); while let Some(stream) = incoming.next().await { if let Ok(stream) = stream { task::spawn(handle_request(stream)); } } Ok(()) } async fn handle_request<S>(socket: S) where S: AsyncRead + AsyncWrite + Clone + Unpin { let mut reader = BufReader::new(socket.clone()); let mut writer = BufWriter::new(socket); let mut buf = vec![0; 65536]; let request = match oc_http::http(&mut reader, &mut buf).await { Ok(req) => req, Err(err) => { warn!("Error {}", err); return; }, }; let mut cookies = Cookies::new(&request); if request.path == "/echo" && request.method == "GET" { get_echo(&mut writer).await; } else if request.path == "/echo" && request.method == "POST" { post_echo(&mut reader, &mut writer).await; if let Some(_c) = cookies.get("Who") { writer.write(format!("You are a fool of a took!").as_bytes()).await.unwrap(); } } else { let mut res = oc_http::Response{ code: 404, reason: "NOT FOUND", headers: vec!(), }; cookies.add_cookie(Cookie::new("Who", "You fool!")); cookies.write_cookies(&mut res); oc_http::respond(&mut writer, res).await.unwrap(); } writer.flush().await.unwrap(); } async fn get_echo<S>(mut stream: &mut S) where S: AsyncWrite + Unpin { oc_http::respond(&mut stream, oc_http::Response{ code: 200, reason: "OK", headers: vec!(), }).await.unwrap(); stream.write(b" <html> <body> <form method=\"POST\"> <input name=\"input\"></inpout> <input type=\"submit\"></input> </form> </body> </html> ").await.unwrap(); } async fn post_echo<W, R>(reader: &mut R, mut writer: &mut W) where W: AsyncWrite + Unpin, R: AsyncRead + Unpin, { oc_http::respond(&mut writer, oc_http::Response{ code: 200, reason: "OK", headers: vec!(), }).await.unwrap(); let mut buf = vec![0; 10]; while let Ok(Ok(count)) = async_std::future::timeout(Duration::from_millis(10), reader.read(&mut buf)).await { if count == 0 { break; } writer.write_all(&buf[..count]).await.unwrap(); writer.flush().await.unwrap(); } }
use std::{ error::Error }; use std::time::Duration; use log::{warn}; use env_logger::Env; use async_std::{ task, io::{ BufReader, BufWriter, }, net::{ TcpListener, }, }; use futures::{ prelude::*, AsyncRead, AsyncWrite, }; use oc_http::{ cookies::{Cookies, Cookie}, }; #[async_std::main] async fn main() -> Result<(), Box<dyn Error>> { env_logger::Builder::from_env(Env::default().default_filter_or("info")).init(); let listener = TcpListener::bind("127.0.0.1:8080").await?; let _local_addr = listener.local_addr()?; let mut incoming = listener.incoming(); while let Some(stream) = incoming.next().await { if let Ok(stream) = stream { task::spawn(handle_request(stream)); } } Ok(()) } async fn handle_request<S>(socket: S) where S: AsyncRead + AsyncWrite + Clone + Unpin { let mut reader = BufReader::new(socket.clone()); let mut writer = BufWriter::new(socket); let mut buf = vec![0; 65536]; let request = match oc_http::http(&mut reader, &mut buf).await { Ok(req) => req, Err(err) => { warn!("Error {}", err); return; }, }; let mut cookies = Cookies::new(&request); if request.path == "/echo" && request.method == "GET" { get_echo(&mut writer).await; } else if request.path == "/echo" && request.method == "POS
async fn get_echo<S>(mut stream: &mut S) where S: AsyncWrite + Unpin { oc_http::respond(&mut stream, oc_http::Response{ code: 200, reason: "OK", headers: vec!(), }).await.unwrap(); stream.write(b" <html> <body> <form method=\"POST\"> <input name=\"input\"></inpout> <input type=\"submit\"></input> </form> </body> </html> ").await.unwrap(); } async fn post_echo<W, R>(reader: &mut R, mut writer: &mut W) where W: AsyncWrite + Unpin, R: AsyncRead + Unpin, { oc_http::respond(&mut writer, oc_http::Response{ code: 200, reason: "OK", headers: vec!(), }).await.unwrap(); let mut buf = vec![0; 10]; while let Ok(Ok(count)) = async_std::future::timeout(Duration::from_millis(10), reader.read(&mut buf)).await { if count == 0 { break; } writer.write_all(&buf[..count]).await.unwrap(); writer.flush().await.unwrap(); } }
T" { post_echo(&mut reader, &mut writer).await; if let Some(_c) = cookies.get("Who") { writer.write(format!("You are a fool of a took!").as_bytes()).await.unwrap(); } } else { let mut res = oc_http::Response{ code: 404, reason: "NOT FOUND", headers: vec!(), }; cookies.add_cookie(Cookie::new("Who", "You fool!")); cookies.write_cookies(&mut res); oc_http::respond(&mut writer, res).await.unwrap(); } writer.flush().await.unwrap(); }
function_block-function_prefixed
[ { "content": "fn main() {\n\n println!(\"Hello world!\");\n\n}\n\n\n\n/*\n\nuse std::io;\n\nuse std::error::Error;\n\nuse env_logger::Env;\n\nuse async_trait::async_trait;\n\nuse async_std::{\n\n prelude::*,\n\n sync::Arc,\n\n net::{\n\n TcpListener,\n\n },\n\n};\n\n\n\n#[async_std::main]\n\nasync fn main() -> Result<(), Box<dyn Error>> {\n\n env_logger::Builder::from_env(Env::default().default_filter_or(\"info\")).init();\n", "file_path": "examples/secure_server.rs", "rank": 0, "score": 57544.44730135905 }, { "content": "fn read_header_internal(input: &[u8]) -> IResult<&[u8], WebSocketHeader> {\n\n bits(read_header_internal_bits)(input)\n\n}\n\n\n", "file_path": "src/websocket.rs", "rank": 1, "score": 30309.37161696943 }, { "content": "fn read_header_internal_bits(input: (&[u8], usize)) -> IResult<(&[u8], usize), WebSocketHeader>\n\n{\n\n let (input, fin) = take(1usize)(input)?;\n\n let (input, _rsv1): ((&[u8], usize), u8) = take(1usize)(input)?;\n\n let (input, _rsv2): ((&[u8], usize), u8) = take(1usize)(input)?;\n\n let (input, _rsv3): ((&[u8], usize), u8) = take(1usize)(input)?;\n\n let (input, opcode) = take(4usize)(input)?;\n\n let (input, mask) = take(1usize)(input)?;\n\n let (input, payload_len) = take(7usize)(input)?;\n\n Ok((input, WebSocketHeader{fin, opcode, mask, payload_len, masking_key: vec!()}))\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::error::Error;\n\n use async_std::{\n\n prelude::*,\n\n task,\n\n net::{\n", "file_path": "src/websocket.rs", "rank": 2, "score": 26724.30003865812 }, { "content": " let mut cookies = HashMap::default();\n\n let iter_cookies = req.headers.get(\"Cookie\");\n\n if iter_cookies.is_none() {\n\n return Cookies{\n\n cookies,\n\n cookies_to_set: vec!(),\n\n }\n\n }\n\n for cookie in iter_cookies.unwrap().0.split(|x| *x == b';') {\n\n let cookie = match str::from_utf8(cookie) {\n\n Ok(s) => s,\n\n Err(_) => {\n\n warn!(\"Invalid cookie being ignored!\");\n\n continue;\n\n }\n\n };\n\n let cookie = Cookie::parse_encoded(cookie);\n\n if cookie.is_err() {\n\n warn!(\"Invalid cookie being ignored!\");\n\n continue;\n", "file_path": "src/cookies.rs", "rank": 3, "score": 22092.914549581514 }, { "content": "use std::{\n\n collections::HashMap,\n\n str,\n\n};\n\n\n\nuse log::{warn};\n\npub use cookie::Cookie;\n\n\n\nuse crate::{\n\n Request,\n\n Response,\n\n};\n\n\n\npub struct Cookies<'c> {\n\n cookies: HashMap<String, Cookie<'c>>,\n\n cookies_to_set: Vec<Cookie<'c>>,\n\n}\n\n\n\nimpl<'a> Cookies<'a> {\n\n pub fn new(req: &'a Request) -> Self {\n", "file_path": "src/cookies.rs", "rank": 4, "score": 22091.60068713371 }, { "content": " }\n\n let cookie = cookie.unwrap();\n\n cookies.insert(String::from(cookie.name()), cookie);\n\n }\n\n Cookies {\n\n cookies,\n\n cookies_to_set: vec!(),\n\n }\n\n }\n\n\n\n pub fn get(&self, s: &str) -> Option<&'a Cookie> {\n\n self.cookies.get(s)\n\n }\n\n\n\n pub fn add_cookie(&mut self, cookie: Cookie<'a>) {\n\n self.cookies_to_set.push(cookie.clone());\n\n self.cookies.insert(String::from(cookie.name()), cookie);\n\n }\n\n\n\n pub fn write_cookies(&self, resp: &mut Response) {\n\n for cookie in &self.cookies_to_set {\n\n resp.headers.push((\"Set-Cookie\".into(), Vec::from(format!(\"{}\", cookie.encoded()))));\n\n }\n\n }\n\n}", "file_path": "src/cookies.rs", "rank": 5, "score": 22088.15177456114 }, { "content": "};\n\n\n\n#[async_std::main]\n\nasync fn main() -> Result<(), Box<dyn Error>> {\n\n // setup the logger\n\n env_logger::Builder::from_env(Env::default().default_filter_or(\"info\")).init();\n\n // start the server; this uses standard stdlib-esque tools rather than saving a few\n\n // lines by just sending the ToSocketAddr item.\n\n let listener = TcpListener::bind(\"127.0.0.1:8080\").await?;\n\n let mut incoming = listener.incoming();\n\n // Accepting incoming reqeusts\n\n while let Some(stream) = incoming.next().await {\n\n if let Ok(stream) = stream {\n\n task::spawn(handle_request(stream));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn handle_request<S>(stream: S)\n\nwhere S: AsyncRead + AsyncWrite + Clone + Unpin\n\n{\n\n // parse the http request; prefer using BufWriter/BufReader for performance.\n\n let mut reader = BufReader::new(stream.clone());\n\n let mut writer = BufWriter::new(stream);\n\n // Read the request\n\n match oc_http::http(&mut reader).await {\n\n Ok(req) => req,\n\n Err(err) => {\n\n warn!(\"Error {}\", err);\n\n return;\n\n },\n\n };\n\n oc_http::respond(&mut writer, oc_http::Response{\n\n code: 200,\n\n reason: \"OK\",\n\n headers: vec!(),\n\n }).await.unwrap();\n\n // after sending the HTTP header, we can write anything to the body\n\n writer.write(b\"\n\n<html>\n\n <body>\n\n <h1>Hello world!</h1>\n\n </body>\n\n</html>\n\n \").await.unwrap();\n\n writer.flush().await.unwrap();\n\n}\n", "file_path": "README.md", "rank": 12, "score": 39.02132953454186 }, { "content": " while let Some(stream) = incoming.next().await {\n\n if let Ok(stream) = stream {\n\n task::spawn(handle_request(stream));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn handle_request<S>(stream: S)\n\nwhere S: AsyncRead + AsyncWrite + Clone + Unpin\n\n{\n\n // parse the http request; we'll make a /echo service;\n\n // first get a reader and writer buffer thing to improve\n\n // performance.\n\n let mut reader = BufReader::new(stream.clone());\n\n // Read the response\n\n let mut buf = vec![0; 65536];\n\n let request = match oc_http::http(&mut reader, &mut buf).await {\n\n Ok(req) => req,\n\n Err(err) => {\n", "file_path": "examples/websocket_server.rs", "rank": 13, "score": 34.026937197532476 }, { "content": " while let Some(stream) = incoming.next().race(token.wait()).await {\n\n if let Ok(stream) = stream {\n\n func(stream);\n\n }\n\n }\n\n });\n\n (local_addr, stopper)\n\n }\n\n\n\n #[async_std::test]\n\n async fn test_hello_world() -> Result<(), Box<dyn Error>> {\n\n let (sock, stop) = server(|stream| {\n\n task::spawn(async move {\n\n let mut reader = BufReader::new(stream.clone());\n\n let mut buf = vec![0; 65536];\n\n let request = match crate::http(&mut reader, &mut buf).await {\n\n Ok(req) => req,\n\n Err(_) => {\n\n return;\n\n },\n", "file_path": "src/websocket.rs", "rank": 14, "score": 31.127877337928734 }, { "content": " TcpListener,\n\n },\n\n io::{\n\n BufReader,\n\n BufWriter,\n\n }\n\n };\n\n use super::*;\n\n\n\n #[async_std::test]\n\n async fn test_hello_world() -> Result<(), Box<dyn Error>> {\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").await?;\n\n let local_addr = listener.local_addr().unwrap();\n\n let handle = task::spawn(async move {\n\n let mut incoming = listener.incoming();\n\n while let Some(stream) = incoming.next().await {\n\n let stream = stream.unwrap();\n\n let mut reader = BufReader::new(stream.clone());\n\n let mut writer = BufWriter::new(stream);\n\n let mut buf = vec![0; 64000];\n", "file_path": "src/lib.rs", "rank": 15, "score": 30.941760296213182 }, { "content": "use std::{\n\n error::Error\n\n};\n\nuse log::{warn};\n\n\n\nuse env_logger::Env;\n\nuse async_std::{\n\n task,\n\n io::{\n\n BufReader,\n\n BufWriter,\n\n },\n\n net::{\n\n TcpListener,\n\n },\n\n};\n\n\n\nuse futures::{\n\n prelude::*,\n\n AsyncRead,\n", "file_path": "examples/websocket_server.rs", "rank": 16, "score": 29.951169116912727 }, { "content": "use std::{\n\n error::Error\n\n};\n\nuse log::{warn};\n\n\n\nuse env_logger::Env;\n\nuse async_std::{\n\n task,\n\n io::{\n\n BufReader,\n\n BufWriter,\n\n },\n\n net::TcpListener,\n\n};\n\n\n\nuse futures::{\n\n prelude::*,\n\n AsyncRead,\n\n AsyncWrite,\n\n};\n", "file_path": "examples/simple_server.rs", "rank": 17, "score": 29.72552267123848 }, { "content": " assert_eq!(res.status(), 200);\n\n assert_eq!(res.header(\"Content-Type\").unwrap(), \"text/html; charset=utf-8\");\n\n assert_eq!(res.into_string().unwrap(), \"<h1>Hello world!</h1>\");\n\n Ok(())\n\n }\n\n\n\n #[async_std::test]\n\n async fn test_server_parses_headers() -> Result<(), Box<dyn Error>> {\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").await?;\n\n let local_addr = listener.local_addr().unwrap();\n\n let handle = task::spawn(async move {\n\n let mut incoming = listener.incoming();\n\n while let Some(stream) = incoming.next().await {\n\n let stream = stream.unwrap();\n\n let mut reader = BufReader::new(stream.clone());\n\n let mut writer = BufWriter::new(stream);\n\n let mut buf = vec![0; 65536];\n\n let req = http(&mut reader, &mut buf).await.unwrap();\n\n println!(\"req: {:?}\", req);\n\n assert_eq!(req.method, \"GET\");\n", "file_path": "src/lib.rs", "rank": 18, "score": 28.7199625242604 }, { "content": "\n\n#[async_std::main]\n\nasync fn main() -> Result<(), Box<dyn Error>> {\n\n env_logger::Builder::from_env(Env::default().default_filter_or(\"info\")).init();\n\n\n\n // start the server; this uses standard stdlib-esque tools rather than saving a few\n\n // lines by just sending the ToSocketAddr item.\n\n let listener = TcpListener::bind(\"127.0.0.1:8080\").await?;\n\n let mut incoming = listener.incoming();\n\n // Accepting incoming reqeusts\n\n while let Some(stream) = incoming.next().await {\n\n if let Ok(stream) = stream {\n\n task::spawn(handle_request(stream));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn handle_request<S>(stream: S)\n\nwhere S: AsyncRead + AsyncWrite + Clone + Unpin\n", "file_path": "examples/simple_server.rs", "rank": 19, "score": 27.466718130659626 }, { "content": "{\n\n // parse the http request; prefer using BufWriter/BufReader for performance.\n\n let mut reader = BufReader::new(stream.clone());\n\n let mut writer = BufWriter::new(stream);\n\n let mut buf = vec![0; 65536];\n\n // Read the request\n\n match oc_http::http(&mut reader, &mut buf).await {\n\n Ok(req) => req,\n\n Err(err) => {\n\n warn!(\"Error {}\", err);\n\n return;\n\n },\n\n };\n\n oc_http::respond(&mut writer, oc_http::Response{\n\n code: 200,\n\n reason: \"OK\",\n\n headers: vec!(),\n\n }).await.unwrap();\n\n // after sending the HTTP header, we can write anything to the body\n\n writer.write(b\"\n\n<html>\n\n <body>\n\n <h1>Hello world!</h1>\n\n </body>\n\n</html>\n\n \").await.unwrap();\n\n writer.flush().await.unwrap();\n\n}", "file_path": "examples/simple_server.rs", "rank": 20, "score": 25.455970905921024 }, { "content": "// body contents (such as a HTML page).\n\npub async fn send_content<S>(writer: &mut S, contents: &[u8]) -> io::Result<()>\n\nwhere S: AsyncWrite + Unpin\n\n{\n\n let mut offset = 0;\n\n while let Ok(count) = writer.write(&contents[offset..]).await {\n\n offset += count;\n\n if offset == contents.len() {\n\n break;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::error::Error;\n\n use async_std::{\n\n task,\n\n net::{\n", "file_path": "src/lib.rs", "rank": 21, "score": 23.934964403691325 }, { "content": " }\n\n Ok(lines)\n\n}\n\n\n\n/// Parses a stream for the http request; this does not parse the body at all,\n\n/// so it will remain entirely intact in stream.\n\n/// \n\n/// I strongly recommend you use a BufReader for the input stream. The size of the\n\n/// provided buffer bounds the maximum number/length of the headers, so don't be too\n\n/// stingy with it.\n\npub async fn http<'a, S>(stream: &mut S, buf: &'a mut [u8]) -> std::io::Result<Request<'a>>\n\nwhere S: AsyncRead + Unpin\n\n{\n\n let lines = populate_buffer(stream, buf).await?;\n\n if lines == 0 {\n\n // if the client disconnects before finishing the first line, we might have a problem\n\n return Err(io::ErrorKind::InvalidInput.into());\n\n }\n\n // 1 status line, then a buncha headers\n\n let mut raw_headers = vec![httparse::EMPTY_HEADER; lines - 1];\n", "file_path": "src/lib.rs", "rank": 22, "score": 23.011913733605365 }, { "content": "# Overcodes HTTP Server\n\n\n\nThis is a super simple HTTP server that doesn't get in the way.\n\n\n\nIt doesn't do much, but it doesn't hide much. Additional features are exposed through various\n\nmodules, including:\n\n\n\n- Setting/getting cookies\n\n- Websockets\n\n\n\nThis library is async, but does not dictate whether you use tokio, async-std, or something else.\n\n\n\nMy goal is to write a HTTP library that isn't confusing, doesn't prevent anything (even if it may\n\nrequire a few extra lines to get my goal), and has reasonable performance.\n\n\n\n## Getting started\n\n\n\nIf you're up for it, look at [examples/simple_server.rs], [examples/websocket_server.rs], and [examples/echo_server.rs]. They'll be up-to-date, unlike this doc.\n\n\n\n[examples/echo_server.rs]: https://github.com/over-codes/oc-http/blob/main/examples/echo_server.rs\n\n[examples/websocket_server.rs]: https://github.com/over-codes/oc-http/blob/main/examples/websocket_server.rs\n\n[examples/simple_server.rs]: https://github.com/over-codes/oc-http/blob/main/examples/simple_server.rs\n\n\n\nAdd to Cargo.toml\n\n\n\n```\n\noc_http = \"0.1.0\"\n\n```\n\n\n\nI use async-std because it's easy, plus logging stuff;\n\n\n\n```\n\nasync-std = {version = \"1.8.0\", features = [\"attributes\"]}\n\nlog = \"0.4\"\n\nenv_logger = \"0.8\"\n\n```\n\n\n\nCreate a server:\n\n\n\n```\n\nuse std::error::Error;\n\nuse log::warn;\n\nuse env_logger::Env;\n\nuse async_std::{\n\n task,\n\n io::{\n\n BufReader,\n\n BufWriter,\n\n },\n\n net::TcpListener,\n\n};\n\nuse futures::{\n\n prelude::*,\n\n AsyncRead,\n\n AsyncWrite,\n", "file_path": "README.md", "rank": 23, "score": 22.946030354801476 }, { "content": " TcpListener,\n\n SocketAddr,\n\n TcpStream,\n\n },\n\n io::{\n\n BufReader,\n\n }\n\n };\n\n use websocket::client::ClientBuilder;\n\n \n\n use super::*;\n\n use crate::stopper::Stopper;\n\n\n\n async fn server<F: Fn(TcpStream) + 'static + Send>(func: F) -> (SocketAddr, Stopper) {\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").await.unwrap();\n\n let local_addr = listener.local_addr().unwrap();\n\n let (stopper, token) = Stopper::new();\n\n // Accepting incoming reqeusts\n\n task::spawn(async move {\n\n let mut incoming = listener.incoming();\n", "file_path": "src/websocket.rs", "rank": 24, "score": 21.503952036398875 }, { "content": " AsyncWrite,\n\n};\n\n\n\nuse oc_http::websocket::{\n\n self,\n\n WebSocketReader,\n\n WebSocketWriter,\n\n WebSocketError,\n\n};\n\n\n\n#[async_std::main]\n\nasync fn main() -> Result<(), Box<dyn Error>> {\n\n env_logger::Builder::from_env(Env::default().default_filter_or(\"info\")).init();\n\n\n\n // start the server; we could reduce this to one line, but then you have to write an entire struct\n\n // to support it (or learn how to use super-dense map reduces)\n\n let listener = TcpListener::bind(\"127.0.0.1:8080\").await?;\n\n let _local_addr = listener.local_addr()?;\n\n let mut incoming = listener.incoming();\n\n // Accepting incoming reqeusts\n", "file_path": "examples/websocket_server.rs", "rank": 25, "score": 21.042327010829286 }, { "content": " MessageType::Text => 0x1,\n\n MessageType::Binary => 0x2,\n\n MessageType::Close => 0x8,\n\n MessageType::Ping => 0x9,\n\n MessageType::Pong => 0xA,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Message{\n\n pub typ: MessageType,\n\n pub contents: Vec<u8>,\n\n}\n\n\n\npub async fn upgrade<'a, S>(req: &Request<'a>, mut stream: S) -> Result<(WebSocketReader<S>, WebSocketWriter<S>), WebSocketError>\n\nwhere S: AsyncRead + AsyncWrite + Clone + Unpin\n\n{\n\n // sanity check that required headers are in place\n\n match req.headers.get(\"Connection\") {\n", "file_path": "src/websocket.rs", "rank": 26, "score": 19.85699336291072 }, { "content": "use async_std::{\n\n io,\n\n net::{\n\n TcpStream,\n\n },\n\n channel::{\n\n Sender,\n\n Receiver,\n\n bounded,\n\n },\n\n};\n\n\n\n#[derive(Clone)]\n\npub struct StopToken {\n\n done: Receiver<u8>,\n\n}\n\n\n\nimpl StopToken {\n\n pub async fn wait(&self) -> Option<io::Result<TcpStream>> {\n\n while let Ok(_) = self.done.recv().await {\n", "file_path": "src/stopper.rs", "rank": 27, "score": 18.59932978997151 }, { "content": " warn!(\"Error {}\", err);\n\n return;\n\n },\n\n };\n\n // make sure it goes to /ws\n\n if request.path == \"/ws\" && request.method == \"GET\" {\n\n let ws = websocket::upgrade(&request, stream).await.unwrap();\n\n handle_websocket(ws.0, ws.1).await;\n\n } else {\n\n let mut writer = BufWriter::new(stream);\n\n oc_http::respond(&mut writer, oc_http::Response{\n\n code: 404,\n\n reason: \"NOT FOUND\",\n\n headers: vec!(),\n\n }).await.unwrap();\n\n writer.flush().await.unwrap();\n\n }\n\n}\n\n\n\nasync fn handle_websocket<S>(mut rdr: WebSocketReader<S>, mut wrt: WebSocketWriter<S>)\n", "file_path": "examples/websocket_server.rs", "rank": 28, "score": 18.11091375538756 }, { "content": " Response{\n\n code: 200,\n\n reason: \"OK\",\n\n headers: vec!(),\n\n }\n\n }\n\n}\n\n\n\n/// populates the provided buffer with bytes from the stream.\n\nasync fn populate_buffer<S>(stream: &mut S, buf: &mut [u8]) -> std::io::Result<usize>\n\nwhere S: AsyncRead + Unpin\n\n{\n\n let mut lines = 0;\n\n let mut i = 0;\n\n let mut j;\n\n let mut last_newline_at = 0;\n\n 'read_loop: loop {\n\n j = i+1;\n\n // read one byte\n\n let count = stream.read(&mut buf[i..j]).await?;\n", "file_path": "src/lib.rs", "rank": 29, "score": 17.81089718462167 }, { "content": " let mut req = httparse::Request::new(&mut raw_headers);\n\n let res = req.parse(buf).or(Err(io::ErrorKind::InvalidInput))?;\n\n match res {\n\n httparse::Status::Complete(_) => {\n\n // sgtm\n\n },\n\n httparse::Status::Partial => {\n\n // this should never happen, since we made sure all headers were read\n\n return Err(io::ErrorKind::InvalidInput.into());\n\n }\n\n }\n\n // Accept any known version (at this time, I've only seen 1.1 and 1.0)\n\n if req.version.unwrap_or(1) > 2 {\n\n // not supported\n\n warn!(\"HTTP/1.{} request rejected; don't support that\", &req.version.unwrap_or(1));\n\n return Err(io::ErrorKind::InvalidInput.into());\n\n }\n\n let mut headers: HashMap<&str, (&[u8], Option<Vec<&[u8]>>)> = HashMap::default();\n\n for header in req.headers {\n\n if let Some(existing) = headers.get_mut(header.name) {\n", "file_path": "src/lib.rs", "rank": 30, "score": 17.467200169832154 }, { "content": " Some(header) => {\n\n let mut ok = false;\n\n if let Ok(txt) = std::str::from_utf8(header.0) {\n\n if let Some(_) = txt.find(\"Upgrade\") {\n\n ok = true;\n\n }\n\n }\n\n if !ok {\n\n Err(WebSocketError::ConnectionNotUpgrade)?\n\n }\n\n },\n\n None => Err(WebSocketError::NoConnectionHeader)?,\n\n };\n\n match req.headers.get(\"Upgrade\") {\n\n Some(header) => if header.0 != b\"websocket\" { Err(WebSocketError::UpgradeNotToWebSocket)? },\n\n None => Err(WebSocketError::NoUpgradeHeader)?,\n\n };\n\n match req.headers.get(\"Sec-WebSocket-Version\") {\n\n Some(header) => if header.0 != b\"13\" { Err(WebSocketError::WrongVersion)? },\n\n None => Err(WebSocketError::WrongVersion)?,\n", "file_path": "src/websocket.rs", "rank": 31, "score": 16.658699651961435 }, { "content": " let v = existing.1.get_or_insert(vec!());\n\n v.push(header.value);\n\n } else {\n\n headers.insert(header.name, (header.value, None));\n\n }\n\n }\n\n // Convert the response to a request and return\n\n let request = Request{\n\n method: String::from(req.method.unwrap_or(\"GET\")),\n\n path: String::from(req.path.unwrap_or(\"/\")),\n\n headers,\n\n };\n\n //info!(\"HTTP/1.1 {method} {path}\", method=request.method, path=request.path);\n\n Ok(request)\n\n}\n\n\n\n/// Respond writes the provided response to the stream; this should be called before\n\n/// any part of the body is written. After being called, the body can be written\n\n/// directly to the stream.\n\npub async fn respond<S>(stream: &mut S, response: Response) -> io::Result<()>\n", "file_path": "src/lib.rs", "rank": 32, "score": 16.132911594769652 }, { "content": "}\n\n\n\nimpl<S> WebSocketWriter<S>\n\nwhere S: AsyncWrite + Unpin\n\n{\n\n pub async fn write(&mut self, msg: &Message) -> Result<(), WebSocketError> {\n\n let res = WebSocketHeader{\n\n fin: 1,\n\n opcode: msg.typ.into(),\n\n mask: 0,\n\n payload_len: msg.contents.len() as u64,\n\n masking_key: vec!(),\n\n };\n\n self.stream.write_all(&mut res.to_vec()).await?;\n\n self.stream.write_all(&msg.contents).await?;\n\n self.stream.flush().await?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/websocket.rs", "rank": 33, "score": 15.521071812137702 }, { "content": "use std::{\n\n io,\n\n convert::TryFrom,\n\n fmt,\n\n};\n\n\n\nuse sha1::{Sha1, Digest};\n\nuse crate::{respond, Request, Response};\n\nuse nom::{\n\n IResult,\n\n bits::{\n\n bits,\n\n complete::take,\n\n },\n\n};\n\nuse futures::{\n\n AsyncRead,\n\n AsyncWrite,\n\n AsyncWriteExt,\n\n AsyncReadExt,\n", "file_path": "src/websocket.rs", "rank": 34, "score": 15.499498041269685 }, { "content": " self.buffered_message = Some((typ, contents));\n\n } else if header.fin == 0 {\n\n match &mut self.buffered_message {\n\n Some((_, old)) => {\n\n old.append(&mut contents);\n\n },\n\n None => return Err(WebSocketError::BadOpcode),\n\n }\n\n } else {\n\n let (typ, contents) = self.buffered_message.take().unwrap_or((typ, contents));\n\n return Ok(Message{typ, contents});\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct WebSocketWriter<S>\n\nwhere S: AsyncWrite + Unpin\n\n{\n\n stream: S,\n", "file_path": "src/websocket.rs", "rank": 35, "score": 15.43343474310873 }, { "content": "use std::{\n\n collections::HashMap,\n\n io,\n\n};\n\nuse log::{warn};\n\n\n\nuse futures::{\n\n prelude::*,\n\n AsyncWrite,\n\n};\n\n\n\n/// rexport of the urlencoded crate for convenience.\n\npub use form_urlencoded;\n\n\n\npub mod websocket;\n\npub mod cookies;\n\n\n\n\n\n#[cfg(test)]\n\npub mod stopper;\n", "file_path": "src/lib.rs", "rank": 36, "score": 15.000009420109752 }, { "content": " }).await?;\n\n stream.flush().await?;\n\n Ok((WebSocketReader{\n\n stream: stream.clone(),\n\n buffered_message: None,\n\n }, WebSocketWriter{\n\n stream,\n\n }))\n\n}\n\n\n\npub struct WebSocketReader<S>\n\nwhere S: AsyncRead + Unpin\n\n{\n\n stream: S,\n\n buffered_message: Option<(MessageType, Vec<u8>)>,\n\n}\n\n\n\nimpl<S> WebSocketReader<S>\n\nwhere S: AsyncRead + Unpin\n\n{\n", "file_path": "src/websocket.rs", "rank": 37, "score": 14.846958051536298 }, { "content": " let http_server = oc_http::HttpServer::new();\n\n let listener = TcpListener::bind(\"127.0.0.1:8080\").await?;\n\n let server = Arc::new(MyServer{});\n\n let mut incoming = listener.incoming();\n\n while let Some(stream) = incoming.next().await {\n\n if let Ok(stream) = stream {\n\n http_server.dispatch(server.clone(), stream);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/secure_server.rs", "rank": 38, "score": 14.518634279468722 }, { "content": " let req = http(&mut reader, &mut buf).await.unwrap();\n\n assert_eq!(req.method, \"GET\");\n\n assert_eq!(req.path, \"/\");\n\n // Response\n\n let mut headers = vec!();\n\n headers.push((\"Content-Type\".into(), Vec::from(\"text/html; charset=utf-8\".as_bytes())));\n\n respond(&mut writer, Response{\n\n code: 200,\n\n reason: \"OK\",\n\n headers,\n\n }).await.unwrap();\n\n writer.write_all(b\"<h1>Hello world!</h1>\").await.unwrap();\n\n writer.flush().await.unwrap();\n\n break;\n\n }\n\n });\n\n // Make a simple HTTP request with some other library\n\n let path = format!(\"http://localhost:{}\", local_addr.port());\n\n let res = ureq::get(&path).call();\n\n handle.await;\n", "file_path": "src/lib.rs", "rank": 39, "score": 14.50628232770246 }, { "content": "where S: AsyncRead + AsyncWrite + Clone + Unpin {\n\n loop {\n\n // this will return an error when the socket is closed;\n\n // oc_http::websocket::WebSocketError::ConnectionClosed\n\n let msg = match rdr.recv().await {\n\n Ok(msg) => msg,\n\n Err(WebSocketError::ConnectionClosed) => return,\n\n Err(err) => {\n\n warn!(\"Sadness is {:?}\", err);\n\n return\n\n },\n\n };\n\n wrt.write(&msg).await.unwrap();\n\n }\n\n}", "file_path": "examples/websocket_server.rs", "rank": 40, "score": 14.370174042899844 }, { "content": " ret.extend(&(self.payload_len as u16).to_be_bytes());\n\n ret\n\n });\n\n ret\n\n }\n\n}\n\n\n\n/// handles control message (ping, pong) to make sure the socket stays open\n\npub async fn handle_control<S>(msg: &Message, wrt: &mut WebSocketWriter<S>) -> Result<bool, WebSocketError>\n\nwhere S: AsyncWrite + Unpin\n\n{\n\n match msg.typ {\n\n MessageType::Pong => {\n\n let msg = Message{\n\n typ: MessageType::Pong,\n\n contents: msg.contents.clone(),\n\n };\n\n wrt.write(&msg).await?;\n\n Ok(true)\n\n },\n", "file_path": "src/websocket.rs", "rank": 41, "score": 13.620873967074019 }, { "content": " MessageType::Close => {\n\n Err(WebSocketError::ConnectionClosed)\n\n }\n\n _ => {\n\n Ok(false)\n\n },\n\n }\n\n}\n\n\n\nasync fn read_header<S>(stream: &mut S) -> Result<WebSocketHeader, WebSocketError>\n\nwhere S: AsyncRead + Unpin\n\n{\n\n // fixed-length header size is 2 bytes, followed by optional extended length\n\n // and finally mask\n\n let mut header_fixed = vec![0u8; 2];\n\n stream.read_exact(&mut header_fixed).await?;\n\n let (_, mut res) = read_header_internal(&header_fixed)?;\n\n header_fixed[1] &= 0b01111111;\n\n if res.payload_len == 126 {\n\n // read 16 bites, 2 bytes\n", "file_path": "src/websocket.rs", "rank": 42, "score": 13.619527772789278 }, { "content": " };\n\n // get the key we need to hash in the response\n\n let key = match req.headers.get(\"Sec-WebSocket-Key\") {\n\n Some(k) => k.0,\n\n None => Err(WebSocketError::NoKey)?,\n\n };\n\n let mut hasher = Sha1::new();\n\n hasher.update(&key);\n\n // magic string from the interwebs\n\n hasher.update(\"258EAFA5-E914-47DA-95CA-C5AB0DC85B11\");\n\n let result = hasher.finalize();\n\n let mut headers = vec!();\n\n headers.push((\"Upgrade\".into(), Vec::from(\"websocket\")));\n\n headers.push((\"Connection\".into(), Vec::from(\"Upgrade\")));\n\n headers.push((\"Sec-WebSocket-Accept\".into(), base64::encode(&result[..]).into()));\n\n // complete the handshake\n\n respond(&mut stream, Response{\n\n code: 101,\n\n reason: \"Switching Protocols\",\n\n headers,\n", "file_path": "src/websocket.rs", "rank": 43, "score": 12.912145241110874 }, { "content": " fn from(err: io::Error) -> Self {\n\n if err.kind() == io::ErrorKind::UnexpectedEof {\n\n WebSocketError::ConnectionClosed\n\n } else {\n\n WebSocketError::IOError(format!(\"{:?}\", err))\n\n }\n\n }\n\n}\n\n\n\nimpl<E> From<nom::Err<E>> for WebSocketError {\n\n fn from(_err: nom::Err<E>) -> Self {\n\n WebSocketError::ProtocolError\n\n }\n\n}\n\n\n\nimpl fmt::Display for WebSocketError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"problem establishing websocket connection\")\n\n }\n\n}\n", "file_path": "src/websocket.rs", "rank": 44, "score": 11.392250007717456 }, { "content": " pub async fn recv(&mut self) -> Result<Message, WebSocketError> {\n\n loop {\n\n let header = read_header(&mut self.stream).await?;\n\n if header.payload_len > MAX_PAYLOAD_SIZE {\n\n Err(WebSocketError::TooBig)?;\n\n }\n\n // read the body\n\n let mut contents = vec![0u8; header.payload_len as usize];\n\n self.stream.read_exact(&mut contents).await?;\n\n // unmask the value in-place\n\n let len = contents.len();\n\n for i in 0..len {\n\n contents[i] = contents[i] ^ header.masking_key[i % header.masking_key.len()];\n\n }\n\n let typ = MessageType::try_from(header.opcode)?;\n\n if typ.is_control() {\n\n return Ok(Message{contents, typ});\n\n }\n\n // if this is a new fragment chain, start it\n\n if header.fin == 0 && typ != MessageType::Continuation {\n", "file_path": "src/websocket.rs", "rank": 45, "score": 10.911629994781693 }, { "content": " assert_eq!(req.path, \"/\");\n\n assert_eq!(req.headers.len(), 4);\n\n // Response\n\n respond(&mut writer, Response{\n\n code: 200,\n\n reason: \"OK\",\n\n headers: vec!(),\n\n }).await.unwrap();\n\n break;\n\n }\n\n });\n\n // Make a simple HTTP request with some other library\n\n let path = format!(\"http://localhost:{}\", local_addr.port());\n\n ureq::get(&path)\n\n .set(\"Transfer-Encoding\", \"chunked\")\n\n .call();\n\n handle.await;\n\n Ok(())\n\n }\n\n\n\n // TODO: test large messages\n\n}", "file_path": "src/lib.rs", "rank": 46, "score": 10.90025764874608 }, { "content": "where S: AsyncWrite + Unpin\n\n{\n\n let buf = format!(\"HTTP/1.1 {code} {reason}\",\n\n code=format!(\"{}\", response.code),\n\n reason=response.reason,\n\n );\n\n stream.write_all(&buf.as_bytes()).await?;\n\n for (name, value) in &response.headers {\n\n stream.write_all(NEWLINE).await?;\n\n stream.write_all(name.as_bytes()).await?;\n\n stream.write_all(b\": \").await?;\n\n stream.write_all(&value).await?;\n\n }\n\n // one to end the last header/status line, and one as required by the protocol\n\n stream.write_all(NEWLINE).await?;\n\n stream.write_all(NEWLINE).await?;\n\n Ok(())\n\n}\n\n\n\n// send_content writes all of the contents to the specified stream. Use this to send\n", "file_path": "src/lib.rs", "rank": 47, "score": 10.600376646131409 }, { "content": "impl TryFrom<u8> for MessageType {\n\n type Error = WebSocketError;\n\n\n\n fn try_from(b: u8) -> Result<Self, Self::Error> {\n\n Ok(match b {\n\n 0x0 => MessageType::Continuation,\n\n 0x1 => MessageType::Text,\n\n 0x2 => MessageType::Binary,\n\n 0x8 => MessageType::Close,\n\n 0x9 => MessageType::Ping,\n\n 0xA => MessageType::Pong,\n\n _ => return Err(WebSocketError::BadOpcode),\n\n })\n\n }\n\n}\n\n\n\nimpl Into<u8> for MessageType {\n\n fn into(self) -> u8 {\n\n match self {\n\n MessageType::Continuation => 0x0,\n", "file_path": "src/websocket.rs", "rank": 48, "score": 9.465410575304347 }, { "content": "};\n\n\n\nconst MAX_PAYLOAD_SIZE: u64 = 16_000;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum WebSocketError {\n\n ConnectionNotUpgrade,\n\n NoConnectionHeader,\n\n NoUpgradeHeader,\n\n UpgradeNotToWebSocket,\n\n WrongVersion,\n\n NoKey,\n\n TooBig,\n\n ProtocolError,\n\n IOError(String),\n\n BadOpcode,\n\n ConnectionClosed,\n\n}\n\n\n\nimpl From<io::Error> for WebSocketError {\n", "file_path": "src/websocket.rs", "rank": 49, "score": 7.107015786563998 }, { "content": " let mut len = [0u8; 2];\n\n stream.read_exact(&mut len).await?;\n\n res.payload_len = u16::from_be_bytes(len) as u64;\n\n } else if res.payload_len == 127 {\n\n // read 64 bits, 8 bytes\n\n let mut len = [0u8; 8];\n\n stream.read_exact(&mut len).await?;\n\n res.payload_len = u64::from_be_bytes(len) as u64;\n\n }\n\n if res.mask != 0 {\n\n let mut mask_key = vec![0u8; 4];\n\n stream.read_exact(&mut mask_key).await?;\n\n res.masking_key = mask_key;\n\n }\n\n Ok(res)\n\n}\n\n\n", "file_path": "src/websocket.rs", "rank": 50, "score": 6.2873044414106305 }, { "content": " };\n\n let (mut rdr, mut wrt) = upgrade(&request, stream).await.unwrap();\n\n let want_msg = Message{\n\n typ: MessageType::Text,\n\n contents: Vec::from(\"hello world!\"),\n\n };\n\n let msg = rdr.recv().await.unwrap();\n\n assert_eq!(msg, want_msg);\n\n wrt.write(&want_msg).await.unwrap();\n\n });\n\n }).await;\n\n let mut client = ClientBuilder::new(&format!(\"ws://{}/ws\", sock)).unwrap()\n\n .connect(None)\n\n .unwrap();\n\n let msg = websocket::Message::text(\"hello world!\");\n\n client.send_message(&msg).unwrap();\n\n let resp = client.recv_message().unwrap();\n\n assert_eq!(resp, websocket::OwnedMessage::Text(\"hello world!\".to_string()));\n\n stop.shutdown();\n\n Ok(())\n\n }\n\n}", "file_path": "src/websocket.rs", "rank": 51, "score": 5.808792301301229 }, { "content": " // loop until we get an error about the sender being closed\n\n }\n\n None\n\n }\n\n}\n\n\n\npub struct Stopper {\n\n done: Sender<u8>,\n\n}\n\n\n\nimpl Stopper {\n\n pub fn new() -> (Self, StopToken) {\n\n let (s, r) = bounded(1);\n\n (Stopper{\n\n done: s,\n\n }, StopToken{\n\n done: r,\n\n })\n\n }\n\n\n\n pub fn shutdown(&self) {\n\n self.done.close();\n\n }\n\n}", "file_path": "src/stopper.rs", "rank": 52, "score": 3.9132132509005286 }, { "content": "\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum MessageType {\n\n Continuation,\n\n Text,\n\n Binary,\n\n Close,\n\n Ping,\n\n Pong,\n\n}\n\n\n\nimpl MessageType {\n\n pub fn is_control(&self) -> bool {\n\n match self {\n\n MessageType::Ping | MessageType::Pong | MessageType::Close => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/websocket.rs", "rank": 53, "score": 3.891797764529099 }, { "content": "\n\nconst NEWLINE: &[u8] = b\"\\r\\n\";\n\n\n\n#[derive(Debug)]\n\npub struct Request<'a> {\n\n pub method: String,\n\n pub path: String,\n\n // Returns a mapping of header => (first_value, other values)\n\n pub headers: HashMap<&'a str, (&'a [u8], Option<Vec<&'a [u8]>>)>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Response {\n\n pub code: usize,\n\n pub reason: &'static str,\n\n pub headers: Vec<(String, Vec<u8>)>,\n\n}\n\n\n\nimpl Default for Response {\n\n fn default() -> Self {\n", "file_path": "src/lib.rs", "rank": 54, "score": 3.546334841903301 }, { "content": " if count == 0 {\n\n // this will likely only happen if the client disconnects before header is sent\n\n break;\n\n }\n\n // if the byte we read was a newline, extract logic\n\n if buf[i] == b'\\n' {\n\n if i - last_newline_at < 3 {\n\n // we might be at the end; check if last_newline_at..j is a terminal case\n\n let part = &buf[last_newline_at..j];\n\n if part == b\"\\n\\r\\n\" || part == b\"\\n\\n\" {\n\n break 'read_loop;\n\n }\n\n }\n\n lines += 1;\n\n last_newline_at = i;\n\n }\n\n i += 1;\n\n if i == buf.len() {\n\n break 'read_loop;\n\n }\n", "file_path": "src/lib.rs", "rank": 55, "score": 2.7181508120115003 }, { "content": "#[derive(Debug, Clone)]\n\nstruct WebSocketHeader{\n\n fin: u8,\n\n opcode: u8,\n\n mask: u8,\n\n payload_len: u64,\n\n masking_key: Vec<u8>,\n\n}\n\n\n\nimpl WebSocketHeader {\n\n fn to_vec(&self) -> Vec<u8> {\n\n let mut ret = Vec::with_capacity(70);\n\n ret.push((self.fin << 7) | self.opcode);\n\n ret.extend(if self.payload_len < 126 {\n\n vec!(self.payload_len as u8)\n\n } else if self.payload_len < u16::MAX as u64 {\n\n let mut ret = vec!(126u8);\n\n ret.extend(&(self.payload_len as u16).to_be_bytes());\n\n ret\n\n } else {\n\n let mut ret = vec!(127u8);\n", "file_path": "src/websocket.rs", "rank": 56, "score": 2.7068953411878045 } ]
Rust
contracts/mirror_staking/src/contract.rs
jaypersanchez/shade
9b7357c366dceb108a300944d66dbf6deb735c01
use cosmwasm_std::{ from_binary, log, to_binary, Api, Binary, Decimal, Env, Extern, HandleResponse, HandleResult, HumanAddr, InitResponse, MigrateResponse, MigrateResult, Querier, StdError, StdResult, Storage, Uint128, }; use mirror_protocol::staking::{ ConfigResponse, Cw20HookMsg, HandleMsg, InitMsg, MigrateMsg, PoolInfoResponse, QueryMsg, }; use crate::migration::{migrate_config, migrate_pool_infos}; use crate::rewards::{adjust_premium, deposit_reward, query_reward_info, withdraw_reward}; use crate::staking::{ auto_stake, auto_stake_hook, bond, decrease_short_token, increase_short_token, unbond, }; use crate::state::{read_config, read_pool_info, store_config, store_pool_info, Config, PoolInfo}; use cw20::Cw20ReceiveMsg; pub fn init<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, _env: Env, msg: InitMsg, ) -> StdResult<InitResponse> { store_config( &mut deps.storage, &Config { owner: deps.api.canonical_address(&msg.owner)?, mirror_token: deps.api.canonical_address(&msg.mirror_token)?, mint_contract: deps.api.canonical_address(&msg.mint_contract)?, oracle_contract: deps.api.canonical_address(&msg.oracle_contract)?, terraswap_factory: deps.api.canonical_address(&msg.terraswap_factory)?, base_denom: msg.base_denom, premium_min_update_interval: msg.premium_min_update_interval, }, )?; Ok(InitResponse::default()) } pub fn handle<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, msg: HandleMsg, ) -> StdResult<HandleResponse> { match msg { HandleMsg::Receive(msg) => receive_cw20(deps, env, msg), HandleMsg::UpdateConfig { owner, premium_min_update_interval, } => update_config(deps, env, owner, premium_min_update_interval), HandleMsg::RegisterAsset { asset_token, staking_token, } => register_asset(deps, env, asset_token, staking_token), HandleMsg::Unbond { asset_token, amount, } => unbond(deps, env.message.sender, asset_token, amount), HandleMsg::Withdraw { asset_token } => withdraw_reward(deps, env, asset_token), HandleMsg::AdjustPremium { asset_tokens } => adjust_premium(deps, env, asset_tokens), HandleMsg::IncreaseShortToken { staker_addr, asset_token, amount, } => increase_short_token(deps, env, staker_addr, asset_token, amount), HandleMsg::DecreaseShortToken { staker_addr, asset_token, amount, } => decrease_short_token(deps, env, staker_addr, asset_token, amount), HandleMsg::AutoStake { assets, slippage_tolerance, } => auto_stake(deps, env, assets, slippage_tolerance), HandleMsg::AutoStakeHook { asset_token, staking_token, staker_addr, prev_staking_token_amount, } => auto_stake_hook( deps, env, asset_token, staking_token, staker_addr, prev_staking_token_amount, ), } } pub fn receive_cw20<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, cw20_msg: Cw20ReceiveMsg, ) -> HandleResult { if let Some(msg) = cw20_msg.msg { let config: Config = read_config(&deps.storage)?; match from_binary(&msg)? { Cw20HookMsg::Bond { asset_token } => { let pool_info: PoolInfo = read_pool_info(&deps.storage, &deps.api.canonical_address(&asset_token)?)?; if pool_info.staking_token != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } bond(deps, env, cw20_msg.sender, asset_token, cw20_msg.amount) } Cw20HookMsg::DepositReward { rewards } => { if config.mirror_token != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } let mut rewards_amount = Uint128::zero(); for (_, amount) in rewards.iter() { rewards_amount += *amount; } if rewards_amount != cw20_msg.amount { return Err(StdError::generic_err("rewards amount miss matched")); } deposit_reward(deps, rewards, rewards_amount) } } } else { Err(StdError::generic_err("data should be given")) } } pub fn update_config<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, owner: Option<HumanAddr>, premium_min_update_interval: Option<u64>, ) -> StdResult<HandleResponse> { let mut config: Config = read_config(&deps.storage)?; if deps.api.canonical_address(&env.message.sender)? != config.owner { return Err(StdError::unauthorized()); } if let Some(owner) = owner { config.owner = deps.api.canonical_address(&owner)?; } if let Some(premium_min_update_interval) = premium_min_update_interval { config.premium_min_update_interval = premium_min_update_interval; } store_config(&mut deps.storage, &config)?; Ok(HandleResponse { messages: vec![], log: vec![log("action", "update_config")], data: None, }) } fn register_asset<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, asset_token: HumanAddr, staking_token: HumanAddr, ) -> HandleResult { let config: Config = read_config(&deps.storage)?; let asset_token_raw = deps.api.canonical_address(&asset_token)?; if config.owner != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } if read_pool_info(&deps.storage, &asset_token_raw).is_ok() { return Err(StdError::generic_err("Asset was already registered")); } store_pool_info( &mut deps.storage, &asset_token_raw, &PoolInfo { staking_token: deps.api.canonical_address(&staking_token)?, total_bond_amount: Uint128::zero(), total_short_amount: Uint128::zero(), reward_index: Decimal::zero(), short_reward_index: Decimal::zero(), pending_reward: Uint128::zero(), short_pending_reward: Uint128::zero(), premium_rate: Decimal::zero(), short_reward_weight: Decimal::zero(), premium_updated_time: 0, }, )?; Ok(HandleResponse { messages: vec![], log: vec![ log("action", "register_asset"), log("asset_token", asset_token.as_str()), ], data: None, }) } pub fn query<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, msg: QueryMsg, ) -> StdResult<Binary> { match msg { QueryMsg::Config {} => to_binary(&query_config(deps)?), QueryMsg::PoolInfo { asset_token } => to_binary(&query_pool_info(deps, asset_token)?), QueryMsg::RewardInfo { staker_addr, asset_token, } => to_binary(&query_reward_info(deps, staker_addr, asset_token)?), } } pub fn query_config<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, ) -> StdResult<ConfigResponse> { let state = read_config(&deps.storage)?; let resp = ConfigResponse { owner: deps.api.human_address(&state.owner)?, mirror_token: deps.api.human_address(&state.mirror_token)?, mint_contract: deps.api.human_address(&state.mint_contract)?, oracle_contract: deps.api.human_address(&state.oracle_contract)?, terraswap_factory: deps.api.human_address(&state.terraswap_factory)?, base_denom: state.base_denom, premium_min_update_interval: state.premium_min_update_interval, }; Ok(resp) } pub fn query_pool_info<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, asset_token: HumanAddr, ) -> StdResult<PoolInfoResponse> { let asset_token_raw = deps.api.canonical_address(&asset_token)?; let pool_info: PoolInfo = read_pool_info(&deps.storage, &asset_token_raw)?; Ok(PoolInfoResponse { asset_token, staking_token: deps.api.human_address(&pool_info.staking_token)?, total_bond_amount: pool_info.total_bond_amount, total_short_amount: pool_info.total_short_amount, reward_index: pool_info.reward_index, short_reward_index: pool_info.short_reward_index, pending_reward: pool_info.pending_reward, short_pending_reward: pool_info.short_pending_reward, premium_rate: pool_info.premium_rate, short_reward_weight: pool_info.short_reward_weight, premium_updated_time: pool_info.premium_updated_time, }) } pub fn migrate<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, _env: Env, msg: MigrateMsg, ) -> MigrateResult { migrate_config( &mut deps.storage, deps.api.canonical_address(&msg.mint_contract)?, deps.api.canonical_address(&msg.oracle_contract)?, deps.api.canonical_address(&msg.terraswap_factory)?, msg.base_denom, msg.premium_min_update_interval, )?; migrate_pool_infos(&mut deps.storage)?; Ok(MigrateResponse::default()) }
use cosmwasm_std::{ from_binary, log, to_binary, Api, Binary, Decimal, Env, Extern, HandleResponse, HandleResult, HumanAddr, InitResponse, MigrateResponse, MigrateResult, Querier, StdError, StdResult, Storage, Uint128, }; use mirror_protocol::staking::{ ConfigResponse, Cw20HookMsg, HandleMsg, InitMsg, MigrateMsg, PoolInfoResponse, QueryMsg, }; use crate::migration::{migrate_config, migrate_pool_infos}; use crate::rewards::{adjust_premium, deposit_reward, query_reward_info, withdraw_reward}; use crate::staking::{ auto_stake, auto_stake_hook, bond, decrease_short_token, increase_short_token, unbond, }; use crate::state::{read_config, read_pool_info, store_config, store_pool_info, Config, PoolInfo}; use cw20::Cw20ReceiveMsg; pub fn init<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, _env: Env, msg: InitMsg, ) -> StdResult<InitResponse> { store_config( &mut deps.storage, &Config { owner: deps.api.canonical_address(&msg.owner)?, mirror_token: deps.api.canonical_address(&msg.mirror_token)?, mint_contract: deps.api.canonical_address(&msg.mint_contract)?, oracle_contract: deps.api.canonical_address(&msg.oracle_contract)?, terraswap_factory: deps.api.canonical_address(&msg.terraswap_factory)?, base_denom: msg.base_denom, premium_min_update_interval: msg.premium_min_update_interval, }, )?; Ok(InitResponse::default()) } pub fn handle<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, msg: HandleMsg, ) -> StdResult<HandleResponse> { match msg { HandleMsg::Receive(msg) => receive_cw20(deps, env, msg), HandleMsg::UpdateConfig { owner, premium_min_update_interval, } => update_config(deps, env, owner, premium_min_update_interval), HandleMsg::RegisterAsset { asset_token, staking_token, } => register_asset(deps, env, asset_token, staking_token), HandleMsg::Unbond { asset_token, amount, } => unbond(deps, env.message.sender, asset_token, amount), HandleMsg::Withdraw { asset_token } => withdraw_reward(deps, env, asset_token), HandleMsg::AdjustPremium { asset_tokens } => adjust_premium(deps, env, asset_tokens), HandleMsg::IncreaseShortToken { staker_addr, asset_token, amount, } => increase_short_token(deps, env, staker_addr, asset_token, amount), HandleMsg::DecreaseShortToken { staker_addr, asset_token, amount, } => decrease_short_token(deps, env, staker_addr, asset_token, amount), HandleMsg::AutoStake { assets, slippage_tolerance, } => auto_stake(deps, env, assets, slippage_tolerance), HandleMsg::AutoStakeHook { asset_token, staking_token, staker_addr, prev_staking_token_amount, } => auto_stake_hook( deps, env, asset_token, staking_token, staker_addr, prev_staking_token_amount, ), } } pub fn receive_cw20<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, cw20_msg: Cw20ReceiveMsg, ) -> HandleResult { if let Some(msg) = cw20_msg.msg { let config: Config = read_config(&deps.storage)?; match from_binary(&msg)? { Cw20HookMsg::Bond { asset_token } => { let pool_info: PoolInfo = read_pool_info(&deps.storage, &deps.api.canonical_address(&asset_token)?)?; if pool_info.staking_token != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } bond(deps, env, cw20_msg.sender, asset_token, cw20_msg.amount) } Cw20HookMsg::DepositReward { rewards } => { if config.mirror_token != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } let mut rewards_amount = Uint128::zero(); for (_, amount) in rewards.iter() { rewards_amount += *amount; } if rewards_amount != cw20_msg.amount { return Err(StdError::generic_err("rewards amount miss matched")); } deposit_reward(deps, rewards, rewards_amount) } } } else { Err(StdError::generic_err("data should be given")) } } pub fn update_config<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, owner: Option<HumanAddr>, premium_min_update_interval: Option<u64>, ) -> StdResult<HandleResponse> { let mut config: Config = read_config(&deps.storage)?; if deps.api.canonical_address(&env.message.sender)? != config.owner { return Err(StdError::unauthorized()); } if let Some(owner) = owner { config.owner = deps.api.canonical_address(&owner)?; } if let Some(premium_min_update_interval) = premium_min_update_interval { config.premium_min_update_interval = premium_min_update_interval; } store_config(&mut deps.storage, &config)?; Ok(HandleResponse { messages: vec![], log: vec![log("action", "update_config")], data: None, }) } fn register_asset<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, asset_token: HumanAddr, staking_token: HumanAddr, ) -> HandleResult { let config: Config = read_config(&deps.storage)?; let asset_token_raw = deps.api.canonical_address(&asset_token)?; if config.owner != deps.api.canonical_address(&env.message.sender)? { return Err(StdError::unauthorized()); } if read_pool_info(&deps.storage, &asset_token_raw).is_ok() { return Err(StdError::generic_err("Asset was already registered")); } store_pool_info( &mut deps.storage, &asset_token_raw, &PoolInfo { staking_token: deps.api.canonical_address(&staking_token)?, total_bond_amount: Uint128::zero(), total_short_amount: Uint128::zero(), reward_index: Decimal::zero(), short_reward_index: Decimal::zero(), pending_reward: Uint128::zero(), short_pending_reward: Uint128::zero(), premium_rate: Decimal::zero(), short_reward_weight: Decimal::zero(), premium_updated_time: 0, }, )?; Ok(HandleResponse { messages: vec![], log: vec![ log("action", "register_asset"), log("asset_token", asset_token.as_str()), ], data: None, }) } pub fn query<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, msg: QueryMsg, ) -> StdResult<Binary> { match msg { QueryMsg::Config {} => to_binary(&query_config(deps)?), QueryMsg::PoolInfo { asset_token } => to_binary(&query_pool_info(deps, asset_token)?), QueryMsg::RewardInfo { staker_addr, asset_token, } => to_binary(&query_reward_info(deps, staker_addr, asset_token)?), } } pub fn query_config<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, ) -> StdResult<ConfigResponse> { let state = read_config(&deps.storage)?; let resp = ConfigResponse { owner: deps.api.human_address(&state.owner)?,
pub fn query_pool_info<S: Storage, A: Api, Q: Querier>( deps: &Extern<S, A, Q>, asset_token: HumanAddr, ) -> StdResult<PoolInfoResponse> { let asset_token_raw = deps.api.canonical_address(&asset_token)?; let pool_info: PoolInfo = read_pool_info(&deps.storage, &asset_token_raw)?; Ok(PoolInfoResponse { asset_token, staking_token: deps.api.human_address(&pool_info.staking_token)?, total_bond_amount: pool_info.total_bond_amount, total_short_amount: pool_info.total_short_amount, reward_index: pool_info.reward_index, short_reward_index: pool_info.short_reward_index, pending_reward: pool_info.pending_reward, short_pending_reward: pool_info.short_pending_reward, premium_rate: pool_info.premium_rate, short_reward_weight: pool_info.short_reward_weight, premium_updated_time: pool_info.premium_updated_time, }) } pub fn migrate<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, _env: Env, msg: MigrateMsg, ) -> MigrateResult { migrate_config( &mut deps.storage, deps.api.canonical_address(&msg.mint_contract)?, deps.api.canonical_address(&msg.oracle_contract)?, deps.api.canonical_address(&msg.terraswap_factory)?, msg.base_denom, msg.premium_min_update_interval, )?; migrate_pool_infos(&mut deps.storage)?; Ok(MigrateResponse::default()) }
mirror_token: deps.api.human_address(&state.mirror_token)?, mint_contract: deps.api.human_address(&state.mint_contract)?, oracle_contract: deps.api.human_address(&state.oracle_contract)?, terraswap_factory: deps.api.human_address(&state.terraswap_factory)?, base_denom: state.base_denom, premium_min_update_interval: state.premium_min_update_interval, }; Ok(resp) }
function_block-function_prefix_line
[ { "content": "pub fn query_reward_info<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n staker_addr: HumanAddr,\n\n asset_token: Option<HumanAddr>,\n\n) -> StdResult<RewardInfoResponse> {\n\n let staker_addr_raw = deps.api.canonical_address(&staker_addr)?;\n\n\n\n let reward_infos: Vec<RewardInfoResponseItem> = vec![\n\n _read_reward_infos(\n\n &deps.api,\n\n &deps.storage,\n\n &staker_addr_raw,\n\n &asset_token,\n\n false,\n\n )?,\n\n _read_reward_infos(\n\n &deps.api,\n\n &deps.storage,\n\n &staker_addr_raw,\n\n &asset_token,\n", "file_path": "contracts/mirror_staking/src/rewards.rs", "rank": 0, "score": 495850.5976595578 }, { "content": "fn query_state<S: Storage, A: Api, Q: Querier>(deps: &Extern<S, A, Q>) -> StdResult<StateResponse> {\n\n let state: State = state_read(&deps.storage).load()?;\n\n Ok(StateResponse {\n\n poll_count: state.poll_count,\n\n total_share: state.total_share,\n\n total_deposit: state.total_deposit,\n\n pending_voting_rewards: state.pending_voting_rewards,\n\n })\n\n}\n\n\n", "file_path": "contracts/mirror_gov/src/contract.rs", "rank": 1, "score": 494156.1754878346 }, { "content": "pub fn register_asset<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset_token: HumanAddr,\n\n auction_discount: Decimal,\n\n min_collateral_ratio: Decimal,\n\n ipo_params: Option<IPOParams>,\n\n) -> StdResult<HandleResponse> {\n\n assert_auction_discount(auction_discount)?;\n\n assert_min_collateral_ratio(min_collateral_ratio)?;\n\n\n\n let config: Config = read_config(&deps.storage)?;\n\n\n\n // permission check\n\n if deps.api.canonical_address(&env.message.sender)? != config.owner {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n let asset_token_raw = deps.api.canonical_address(&asset_token)?;\n\n if read_asset_config(&deps.storage, &asset_token_raw).is_ok() {\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 2, "score": 490212.17576706526 }, { "content": "pub fn query_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<ConfigResponse> {\n\n let state = read_config(&deps.storage)?;\n\n let resp = ConfigResponse {\n\n owner: deps.api.human_address(&state.owner)?,\n\n mint_contract: deps.api.human_address(&state.mint_contract)?,\n\n base_denom: state.base_denom,\n\n lockup_period: state.lockup_period,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_lock/src/contract.rs", "rank": 3, "score": 490034.25326916843 }, { "content": "pub fn query_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<ConfigResponse> {\n\n let state = read_config(&deps.storage)?;\n\n let resp = ConfigResponse {\n\n owner: deps.api.human_address(&state.owner)?,\n\n oracle: deps.api.human_address(&state.oracle)?,\n\n staking: deps.api.human_address(&state.staking)?,\n\n collector: deps.api.human_address(&state.collector)?,\n\n collateral_oracle: deps.api.human_address(&state.collateral_oracle)?,\n\n terraswap_factory: deps.api.human_address(&state.terraswap_factory)?,\n\n lock: deps.api.human_address(&state.lock)?,\n\n base_denom: state.base_denom,\n\n token_code_id: state.token_code_id,\n\n protocol_fee_rate: Decimal::percent(1),\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 4, "score": 490034.25326916843 }, { "content": "pub fn query_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<ConfigResponse> {\n\n let state = read_config(&deps.storage)?;\n\n let resp = ConfigResponse {\n\n distribution_contract: deps.api.human_address(&state.distribution_contract)?,\n\n terraswap_factory: deps.api.human_address(&state.terraswap_factory)?,\n\n mirror_token: deps.api.human_address(&state.mirror_token)?,\n\n base_denom: state.base_denom,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_collector/src/contract.rs", "rank": 5, "score": 490034.2532691685 }, { "content": "pub fn query_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<ConfigResponse> {\n\n let state = read_config(&deps.storage)?;\n\n let resp = ConfigResponse {\n\n owner: deps.api.human_address(&state.owner)?,\n\n mirror_token: deps.api.human_address(&state.mirror_token)?,\n\n mint_contract: deps.api.human_address(&state.mint_contract)?,\n\n oracle_contract: deps.api.human_address(&state.oracle_contract)?,\n\n terraswap_factory: deps.api.human_address(&state.terraswap_factory)?,\n\n staking_contract: deps.api.human_address(&state.staking_contract)?,\n\n commission_collector: deps.api.human_address(&state.commission_collector)?,\n\n token_code_id: state.token_code_id,\n\n base_denom: state.base_denom,\n\n genesis_time: state.genesis_time,\n\n distribution_schedule: state.distribution_schedule,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_factory/src/contract.rs", "rank": 7, "score": 490034.25326916843 }, { "content": "pub fn query_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<ConfigResponse> {\n\n let state = read_config(&deps.storage)?;\n\n let resp = ConfigResponse {\n\n owner: deps.api.human_address(&state.owner)?,\n\n mirror_token: deps.api.human_address(&state.mirror_token)?,\n\n spend_limit: state.spend_limit,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_community/src/contract.rs", "rank": 8, "score": 490034.25326916843 }, { "content": "pub fn query_asset_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n asset_token: HumanAddr,\n\n) -> StdResult<AssetConfigResponse> {\n\n let asset_config: AssetConfig =\n\n read_asset_config(&deps.storage, &deps.api.canonical_address(&asset_token)?)?;\n\n\n\n let resp = AssetConfigResponse {\n\n token: deps.api.human_address(&asset_config.token).unwrap(),\n\n auction_discount: asset_config.auction_discount,\n\n min_collateral_ratio: asset_config.min_collateral_ratio,\n\n end_price: asset_config.end_price,\n\n ipo_params: asset_config.ipo_params,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 9, "score": 488951.0035174297 }, { "content": "pub fn query_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<ConfigResponse> {\n\n let config = read_config(&deps.storage)?;\n\n let resp = ConfigResponse {\n\n owner: deps.api.human_address(&config.owner)?,\n\n mint_contract: deps.api.human_address(&config.mint_contract)?,\n\n factory_contract: deps.api.human_address(&config.factory_contract)?,\n\n base_denom: config.base_denom,\n\n mirror_oracle: deps.api.human_address(&config.mirror_oracle)?,\n\n anchor_oracle: deps.api.human_address(&config.anchor_oracle)?,\n\n band_oracle: deps.api.human_address(&config.band_oracle)?,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 10, "score": 484028.23317439423 }, { "content": "// withdraw all rewards or single reward depending on asset_token\n\npub fn withdraw_reward<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset_token: Option<HumanAddr>,\n\n) -> HandleResult {\n\n let staker_addr = deps.api.canonical_address(&env.message.sender)?;\n\n let asset_token = asset_token.map(|a| deps.api.canonical_address(&a).unwrap());\n\n let normal_reward = _withdraw_reward(&mut deps.storage, &staker_addr, &asset_token, false)?;\n\n let short_reward = _withdraw_reward(&mut deps.storage, &staker_addr, &asset_token, true)?;\n\n\n\n let amount = normal_reward + short_reward;\n\n let config: Config = read_config(&deps.storage)?;\n\n Ok(HandleResponse {\n\n messages: vec![CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: deps.api.human_address(&config.mirror_token)?,\n\n msg: to_binary(&Cw20HandleMsg::Transfer {\n\n recipient: env.message.sender,\n\n amount,\n\n })?,\n\n send: vec![],\n\n })],\n\n log: vec![log(\"action\", \"withdraw\"), log(\"amount\", amount.to_string())],\n\n data: None,\n\n })\n\n}\n\n\n", "file_path": "contracts/mirror_staking/src/rewards.rs", "rank": 11, "score": 462576.0101670447 }, { "content": "// deposit_reward must be from reward token contract\n\npub fn deposit_reward<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n rewards: Vec<(HumanAddr, Uint128)>,\n\n rewards_amount: Uint128,\n\n) -> HandleResult {\n\n for (asset_token, amount) in rewards.iter() {\n\n let asset_token_raw: CanonicalAddr = deps.api.canonical_address(&asset_token)?;\n\n let mut pool_info: PoolInfo = read_pool_info(&deps.storage, &asset_token_raw)?;\n\n\n\n // Decimal::from_ratio(1, 5).mul()\n\n // erf(pool_info.premium_rate.0)\n\n // 3.0f64\n\n let total_reward = *amount;\n\n let mut short_reward = total_reward * pool_info.short_reward_weight;\n\n let mut normal_reward = (total_reward - short_reward).unwrap();\n\n\n\n if pool_info.total_bond_amount.is_zero() {\n\n pool_info.pending_reward += normal_reward;\n\n } else {\n\n normal_reward += pool_info.pending_reward;\n", "file_path": "contracts/mirror_staking/src/rewards.rs", "rank": 12, "score": 462559.73777424695 }, { "content": "pub fn unbond<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n staker_addr: HumanAddr,\n\n asset_token: HumanAddr,\n\n amount: Uint128,\n\n) -> HandleResult {\n\n let staker_addr_raw: CanonicalAddr = deps.api.canonical_address(&staker_addr)?;\n\n let asset_token_raw: CanonicalAddr = deps.api.canonical_address(&asset_token)?;\n\n let staking_token: CanonicalAddr = _decrease_bond_amount(\n\n &mut deps.storage,\n\n &staker_addr_raw,\n\n &asset_token_raw,\n\n amount,\n\n false,\n\n )?;\n\n\n\n Ok(HandleResponse {\n\n messages: vec![CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: deps.api.human_address(&staking_token)?,\n\n msg: to_binary(&Cw20HandleMsg::Transfer {\n", "file_path": "contracts/mirror_staking/src/staking.rs", "rank": 13, "score": 456615.66757323314 }, { "content": "pub fn bond<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n staker_addr: HumanAddr,\n\n asset_token: HumanAddr,\n\n amount: Uint128,\n\n) -> HandleResult {\n\n let staker_addr_raw: CanonicalAddr = deps.api.canonical_address(&staker_addr)?;\n\n let asset_token_raw: CanonicalAddr = deps.api.canonical_address(&asset_token)?;\n\n _increase_bond_amount(\n\n &mut deps.storage,\n\n &staker_addr_raw,\n\n &asset_token_raw,\n\n amount,\n\n false,\n\n )?;\n\n\n\n Ok(HandleResponse {\n\n messages: vec![],\n\n log: vec![\n\n log(\"action\", \"bond\"),\n\n log(\"staker_addr\", staker_addr.as_str()),\n\n log(\"asset_token\", asset_token.as_str()),\n\n log(\"amount\", amount.to_string()),\n\n ],\n\n data: None,\n\n })\n\n}\n\n\n", "file_path": "contracts/mirror_staking/src/staking.rs", "rank": 14, "score": 456607.4224333381 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n let config = Config {\n\n owner: deps.api.canonical_address(&msg.owner)?,\n\n mint_contract: deps.api.canonical_address(&msg.mint_contract)?,\n\n base_denom: msg.base_denom,\n\n lockup_period: msg.lockup_period,\n\n };\n\n\n\n store_config(&mut deps.storage, &config)?;\n\n total_locked_funds_store(&mut deps.storage).save(&Uint128::zero())?;\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_lock/src/contract.rs", "rank": 15, "score": 456465.6322050543 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n store_config(\n\n &mut deps.storage,\n\n &Config {\n\n owner: deps.api.canonical_address(&msg.owner)?,\n\n base_asset: msg.base_asset,\n\n },\n\n )?;\n\n\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_oracle/src/contract.rs", "rank": 16, "score": 456465.6322050543 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n let config = Config {\n\n owner: deps.api.canonical_address(&msg.owner)?,\n\n oracle: deps.api.canonical_address(&msg.oracle)?,\n\n collector: deps.api.canonical_address(&msg.collector)?,\n\n collateral_oracle: deps.api.canonical_address(&msg.collateral_oracle)?,\n\n staking: deps.api.canonical_address(&msg.staking)?,\n\n terraswap_factory: deps.api.canonical_address(&msg.terraswap_factory)?,\n\n lock: deps.api.canonical_address(&msg.lock)?,\n\n base_denom: msg.base_denom,\n\n token_code_id: msg.token_code_id,\n\n protocol_fee_rate: msg.protocol_fee_rate,\n\n };\n\n\n\n store_config(&mut deps.storage, &config)?;\n\n store_position_idx(&mut deps.storage, Uint128(1u128))?;\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 17, "score": 456465.63220505434 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n store_config(\n\n &mut deps.storage,\n\n &Config {\n\n distribution_contract: deps.api.canonical_address(&msg.distribution_contract)?,\n\n terraswap_factory: deps.api.canonical_address(&msg.terraswap_factory)?,\n\n mirror_token: deps.api.canonical_address(&msg.mirror_token)?,\n\n base_denom: msg.base_denom,\n\n },\n\n )?;\n\n\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_collector/src/contract.rs", "rank": 19, "score": 456465.6322050543 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: InitMsg,\n\n) -> InitResult {\n\n validate_quorum(msg.quorum)?;\n\n validate_threshold(msg.threshold)?;\n\n\n\n let config = Config {\n\n mirror_token: deps.api.canonical_address(&msg.mirror_token)?,\n\n owner: deps.api.canonical_address(&env.message.sender)?,\n\n quorum: msg.quorum,\n\n threshold: msg.threshold,\n\n voting_period: msg.voting_period,\n\n effective_delay: msg.effective_delay,\n\n expiration_period: msg.expiration_period,\n\n proposal_deposit: msg.proposal_deposit,\n\n voter_weight: msg.voter_weight,\n\n snapshot_period: msg.snapshot_period,\n\n };\n", "file_path": "contracts/mirror_gov/src/contract.rs", "rank": 20, "score": 456465.63220505434 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n store_config(\n\n &mut deps.storage,\n\n &Config {\n\n owner: CanonicalAddr::default(),\n\n mirror_token: CanonicalAddr::default(),\n\n mint_contract: CanonicalAddr::default(),\n\n oracle_contract: CanonicalAddr::default(),\n\n terraswap_factory: CanonicalAddr::default(),\n\n staking_contract: CanonicalAddr::default(),\n\n commission_collector: CanonicalAddr::default(),\n\n token_code_id: msg.token_code_id,\n\n base_denom: msg.base_denom,\n\n genesis_time: env.block.time,\n\n distribution_schedule: msg.distribution_schedule,\n\n },\n\n )?;\n\n\n\n store_total_weight(&mut deps.storage, 0u32)?;\n\n store_last_distributed(&mut deps.storage, env.block.time)?;\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_factory/src/contract.rs", "rank": 21, "score": 456465.63220505434 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n store_config(\n\n &mut deps.storage,\n\n &Config {\n\n owner: deps.api.canonical_address(&msg.owner)?,\n\n mirror_token: deps.api.canonical_address(&msg.mirror_token)?,\n\n spend_limit: msg.spend_limit,\n\n },\n\n )?;\n\n\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_community/src/contract.rs", "rank": 22, "score": 456465.63220505434 }, { "content": "pub fn load_mint_asset_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n contract_addr: &HumanAddr,\n\n asset_token: &CanonicalAddr,\n\n) -> StdResult<(Decimal, Decimal)> {\n\n let res: StdResult<Binary> = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Raw {\n\n contract_addr: HumanAddr::from(contract_addr),\n\n key: Binary::from(concat(\n\n &to_length_prefixed(b\"asset_config\"),\n\n asset_token.as_slice(),\n\n )),\n\n }));\n\n\n\n let res = match res {\n\n Ok(v) => v,\n\n Err(_) => {\n\n return Err(StdError::generic_err(\n\n \"Falied to fetch the mint asset config\",\n\n ));\n\n }\n", "file_path": "contracts/mirror_factory/src/querier.rs", "rank": 23, "score": 456409.82607071754 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n store_config(\n\n &mut deps.storage,\n\n &Config {\n\n owner: deps.api.canonical_address(&msg.owner)?,\n\n mint_contract: deps.api.canonical_address(&msg.mint_contract)?,\n\n factory_contract: deps.api.canonical_address(&msg.factory_contract)?,\n\n base_denom: msg.base_denom,\n\n mirror_oracle: deps.api.canonical_address(&msg.mirror_oracle)?,\n\n anchor_oracle: deps.api.canonical_address(&msg.anchor_oracle)?,\n\n band_oracle: deps.api.canonical_address(&msg.band_oracle)?,\n\n },\n\n )?;\n\n\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 24, "score": 451007.39940277446 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n _msg: InitMsg,\n\n) -> InitResult {\n\n init_last_order_id(&mut deps.storage)?;\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_limit_order/src/contract.rs", "rank": 25, "score": 451007.3994027744 }, { "content": "pub fn deposit_reward<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n _env: Env,\n\n _sender: HumanAddr,\n\n amount: Uint128,\n\n) -> HandleResult {\n\n let config = config_read(&deps.storage).load()?;\n\n\n\n let mut polls_in_progress = read_polls(\n\n &deps.storage,\n\n Some(PollStatus::InProgress),\n\n None,\n\n None,\n\n None,\n\n Some(true), // remove hard cap to get all polls\n\n )?;\n\n\n\n if config.voter_weight.is_zero() || polls_in_progress.is_empty() {\n\n return Ok(HandleResponse {\n\n messages: vec![],\n", "file_path": "contracts/mirror_gov/src/staking.rs", "rank": 26, "score": 450875.17346851225 }, { "content": "pub fn update_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n owner: Option<HumanAddr>,\n\n oracle: Option<HumanAddr>,\n\n collector: Option<HumanAddr>,\n\n collateral_oracle: Option<HumanAddr>,\n\n terraswap_factory: Option<HumanAddr>,\n\n lock: Option<HumanAddr>,\n\n token_code_id: Option<u64>,\n\n protocol_fee_rate: Option<Decimal>,\n\n) -> StdResult<HandleResponse> {\n\n let mut config: Config = read_config(&deps.storage)?;\n\n\n\n if deps.api.canonical_address(&env.message.sender)? != config.owner {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(owner) = owner {\n\n config.owner = deps.api.canonical_address(&owner)?;\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 27, "score": 450838.15722804295 }, { "content": "pub fn update_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n owner: Option<HumanAddr>,\n\n token_code_id: Option<u64>,\n\n distribution_schedule: Option<Vec<(u64, u64, Uint128)>>,\n\n) -> HandleResult {\n\n let mut config: Config = read_config(&deps.storage)?;\n\n if config.owner != deps.api.canonical_address(&env.message.sender)? {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(owner) = owner {\n\n config.owner = deps.api.canonical_address(&owner)?;\n\n }\n\n\n\n if let Some(distribution_schedule) = distribution_schedule {\n\n config.distribution_schedule = distribution_schedule;\n\n }\n\n\n", "file_path": "contracts/mirror_factory/src/contract.rs", "rank": 29, "score": 450838.15722804295 }, { "content": "pub fn update_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n owner: Option<HumanAddr>,\n\n mint_contract: Option<HumanAddr>,\n\n base_denom: Option<String>,\n\n lockup_period: Option<u64>,\n\n) -> StdResult<HandleResponse> {\n\n let mut config: Config = read_config(&deps.storage)?;\n\n\n\n if deps.api.canonical_address(&env.message.sender)? != config.owner {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(owner) = owner {\n\n config.owner = deps.api.canonical_address(&owner)?;\n\n }\n\n\n\n if let Some(mint_contract) = mint_contract {\n\n config.mint_contract = deps.api.canonical_address(&mint_contract)?;\n", "file_path": "contracts/mirror_lock/src/contract.rs", "rank": 30, "score": 450838.15722804295 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn update_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n owner: Option<HumanAddr>,\n\n quorum: Option<Decimal>,\n\n threshold: Option<Decimal>,\n\n voting_period: Option<u64>,\n\n effective_delay: Option<u64>,\n\n expiration_period: Option<u64>,\n\n proposal_deposit: Option<Uint128>,\n\n voter_weight: Option<Decimal>,\n\n snapshot_period: Option<u64>,\n\n) -> HandleResult {\n\n let api = deps.api;\n\n config_store(&mut deps.storage).update(|mut config| {\n\n if config.owner != api.canonical_address(&env.message.sender)? {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(owner) = owner {\n", "file_path": "contracts/mirror_gov/src/contract.rs", "rank": 31, "score": 450838.15722804295 }, { "content": "pub fn try_register_asset<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset_token: HumanAddr,\n\n feeder: HumanAddr,\n\n) -> HandleResult {\n\n let config: Config = read_config(&deps.storage)?;\n\n if config.owner != deps.api.canonical_address(&env.message.sender)? {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n let asset_token_raw = deps.api.canonical_address(&asset_token)?;\n\n\n\n // check if it is a new asset\n\n if read_feeder(&deps.storage, &asset_token_raw).is_err() {\n\n // new asset, initialize asset price info\n\n store_price(\n\n &mut deps.storage,\n\n &asset_token_raw,\n\n &PriceInfo {\n", "file_path": "contracts/mirror_oracle/src/contract.rs", "rank": 32, "score": 450728.1300749423 }, { "content": "pub fn update_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n owner: Option<HumanAddr>,\n\n mint_contract: Option<HumanAddr>,\n\n factory_contract: Option<HumanAddr>,\n\n base_denom: Option<String>,\n\n mirror_oracle: Option<HumanAddr>,\n\n anchor_oracle: Option<HumanAddr>,\n\n band_oracle: Option<HumanAddr>,\n\n) -> HandleResult {\n\n let mut config: Config = read_config(&deps.storage)?;\n\n if deps.api.canonical_address(&env.message.sender)? != config.owner {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(owner) = owner {\n\n config.owner = deps.api.canonical_address(&owner)?;\n\n }\n\n\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 33, "score": 445595.8278190015 }, { "content": "fn query_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<ConfigResponse> {\n\n let config: Config = config_read(&deps.storage).load()?;\n\n Ok(ConfigResponse {\n\n owner: deps.api.human_address(&config.owner)?,\n\n mirror_token: deps.api.human_address(&config.mirror_token)?,\n\n quorum: config.quorum,\n\n threshold: config.threshold,\n\n voting_period: config.voting_period,\n\n effective_delay: config.effective_delay,\n\n expiration_period: config.expiration_period,\n\n proposal_deposit: config.proposal_deposit,\n\n voter_weight: config.voter_weight,\n\n snapshot_period: config.snapshot_period,\n\n })\n\n}\n\n\n", "file_path": "contracts/mirror_gov/src/contract.rs", "rank": 35, "score": 432980.14908579504 }, { "content": "fn query_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<ConfigResponse> {\n\n let state = read_config(&deps.storage)?;\n\n let resp = ConfigResponse {\n\n owner: deps.api.human_address(&state.owner)?,\n\n base_asset: state.base_asset,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_oracle/src/contract.rs", "rank": 36, "score": 432980.14908579504 }, { "content": "pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/mirror_oracle/src/state.rs", "rank": 37, "score": 428975.0240552968 }, { "content": "pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/mirror_community/src/state.rs", "rank": 38, "score": 428975.0240552968 }, { "content": "pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/mirror_staking/src/state.rs", "rank": 39, "score": 428975.0240552968 }, { "content": "pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/mirror_factory/src/state.rs", "rank": 40, "score": 428975.0240552968 }, { "content": "pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/mirror_lock/src/state.rs", "rank": 41, "score": 428975.0240552968 }, { "content": "pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/mirror_collector/src/state.rs", "rank": 42, "score": 428975.0240552968 }, { "content": "pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/state.rs", "rank": 43, "score": 428975.0240552968 }, { "content": "// queries the collateral oracle to get the asset rate and multiplier\n\npub fn query_collateral<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n collateral_oracle: &HumanAddr,\n\n asset: String,\n\n block_time: Option<u64>,\n\n) -> StdResult<(Decimal, Decimal, bool)> {\n\n let res: CollateralPriceResponse =\n\n deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: HumanAddr::from(collateral_oracle),\n\n msg: to_binary(&CollateralOracleQueryMsg::CollateralPrice { asset })?,\n\n }))?;\n\n\n\n if let Some(block_time) = block_time {\n\n if res.last_updated < (block_time - PRICE_EXPIRE_TIME) {\n\n return Err(StdError::generic_err(\"Collateral price is too old\"));\n\n }\n\n }\n\n\n\n Ok((res.rate, res.multiplier, res.is_revoked))\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/querier.rs", "rank": 44, "score": 428937.53717438306 }, { "content": "pub fn query_price<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n oracle: &HumanAddr,\n\n base_asset: String,\n\n quote_asset: String,\n\n block_time: Option<u64>,\n\n) -> StdResult<Decimal> {\n\n let res: PriceResponse = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: HumanAddr::from(oracle),\n\n msg: to_binary(&OracleQueryMsg::Price {\n\n base_asset,\n\n quote_asset,\n\n })?,\n\n }))?;\n\n\n\n if let Some(block_time) = block_time {\n\n if res.last_updated_base < (block_time - PRICE_EXPIRE_TIME)\n\n || res.last_updated_quote < (block_time - PRICE_EXPIRE_TIME)\n\n {\n\n return Err(StdError::generic_err(\"Price is too old\"));\n\n }\n\n }\n\n\n\n Ok(res.rate)\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/querier.rs", "rank": 45, "score": 428927.9747863447 }, { "content": "pub fn query_price<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n oracle: &HumanAddr,\n\n base_asset: String,\n\n quote_asset: String,\n\n) -> StdResult<Decimal> {\n\n let res: PriceResponse = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: HumanAddr::from(oracle),\n\n msg: to_binary(&OracleQueryMsg::Price {\n\n base_asset,\n\n quote_asset,\n\n })?,\n\n }))?;\n\n\n\n Ok(res.rate)\n\n}\n", "file_path": "contracts/mirror_staking/src/querier.rs", "rank": 46, "score": 428927.9747863447 }, { "content": "pub fn store_config<S: Storage>(storage: &mut S, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/mirror_collateral_oracle/src/state.rs", "rank": 47, "score": 425328.008524426 }, { "content": "pub fn query_price<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n config: &Config,\n\n asset: &String,\n\n price_source: &SourceType,\n\n) -> StdResult<(Decimal, u64)> {\n\n match price_source {\n\n SourceType::BandOracle {} => {\n\n let res: BandOracleResponse =\n\n deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: deps.api.human_address(&config.band_oracle)?,\n\n msg: to_binary(&SourceQueryMsg::GetReferenceData {\n\n base_symbol: asset.to_string(),\n\n quote_symbol: config.base_denom.clone(),\n\n })\n\n .unwrap(),\n\n }))?;\n\n let rate: Decimal = parse_band_rate(res.rate)?;\n\n\n\n Ok((rate, res.last_updated_base))\n", "file_path": "contracts/mirror_collateral_oracle/src/querier.rs", "rank": 48, "score": 423797.2818076098 }, { "content": "pub fn load_asset_price<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n oracle: &HumanAddr,\n\n asset: &AssetInfoRaw,\n\n block_time: Option<u64>,\n\n) -> StdResult<Decimal> {\n\n let config: Config = read_config(&deps.storage)?;\n\n\n\n // check if the asset has a stored end_price or pre_ipo_price\n\n let stored_price = read_fixed_price(&deps.storage, &asset);\n\n\n\n let price: Decimal = if let Some(stored_price) = stored_price {\n\n stored_price\n\n } else {\n\n let asset_denom: String = (asset.to_normal(&deps)?).to_string();\n\n if asset_denom == config.base_denom {\n\n Decimal::one()\n\n } else {\n\n // fetch price from oracle\n\n query_price(deps, oracle, asset_denom, config.base_denom, block_time)?\n\n }\n\n };\n\n\n\n Ok(price)\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/querier.rs", "rank": 49, "score": 423710.7127123541 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::UpdateConfig {\n\n owner,\n\n mint_contract,\n\n base_denom,\n\n lockup_period,\n\n } => update_config(deps, env, owner, mint_contract, base_denom, lockup_period),\n\n HandleMsg::LockPositionFundsHook {\n\n position_idx,\n\n receiver,\n\n } => lock_position_funds_hook(deps, env, position_idx, receiver),\n\n HandleMsg::UnlockPositionFunds { position_idx } => {\n\n unlock_position_funds(deps, env, position_idx)\n\n }\n\n HandleMsg::ReleasePositionFunds { position_idx } => {\n\n release_position_funds(deps, env, position_idx)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_lock/src/contract.rs", "rank": 50, "score": 423093.0100490046 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::Convert { asset_token } => convert(deps, env, asset_token),\n\n HandleMsg::Distribute {} => distribute(deps, env),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_collector/src/contract.rs", "rank": 51, "score": 423093.0100490046 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::Receive(msg) => receive_cw20(deps, env, msg),\n\n HandleMsg::UpdateConfig {\n\n owner,\n\n oracle,\n\n collector,\n\n collateral_oracle,\n\n terraswap_factory,\n\n lock,\n\n token_code_id,\n\n protocol_fee_rate,\n\n } => update_config(\n\n deps,\n\n env,\n\n owner,\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 52, "score": 423093.0100490046 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::PostInitialize {\n\n owner,\n\n mirror_token,\n\n mint_contract,\n\n oracle_contract,\n\n terraswap_factory,\n\n staking_contract,\n\n commission_collector,\n\n } => post_initialize(\n\n deps,\n\n env,\n\n owner,\n\n mirror_token,\n\n mint_contract,\n", "file_path": "contracts/mirror_factory/src/contract.rs", "rank": 53, "score": 423093.01004900463 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> HandleResult {\n\n match msg {\n\n HandleMsg::UpdateConfig { owner } => try_update_config(deps, env, owner),\n\n HandleMsg::RegisterAsset {\n\n asset_token,\n\n feeder,\n\n } => try_register_asset(deps, env, asset_token, feeder),\n\n HandleMsg::FeedPrice { prices } => try_feed_price(deps, env, prices),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_oracle/src/contract.rs", "rank": 54, "score": 423093.01004900463 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::UpdateConfig { owner, spend_limit } => {\n\n udpate_config(deps, env, owner, spend_limit)\n\n }\n\n HandleMsg::Spend { recipient, amount } => spend(deps, env, recipient, amount),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_community/src/contract.rs", "rank": 55, "score": 423093.01004900463 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::Receive(msg) => receive_cw20(deps, env, msg),\n\n HandleMsg::UpdateConfig {\n\n owner,\n\n quorum,\n\n threshold,\n\n voting_period,\n\n effective_delay,\n\n expiration_period,\n\n proposal_deposit,\n\n voter_weight,\n\n snapshot_period,\n\n } => update_config(\n\n deps,\n\n env,\n", "file_path": "contracts/mirror_gov/src/contract.rs", "rank": 56, "score": 423093.0100490046 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::Feeder { asset_token } => to_binary(&query_feeder(deps, asset_token)?),\n\n QueryMsg::Price {\n\n base_asset,\n\n quote_asset,\n\n } => to_binary(&query_price(deps, base_asset, quote_asset)?),\n\n QueryMsg::Prices {\n\n start_after,\n\n limit,\n\n order_by,\n\n } => to_binary(&query_prices(deps, start_after, limit, order_by)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_oracle/src/contract.rs", "rank": 59, "score": 422950.9993914486 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(&deps)?),\n\n QueryMsg::State {} => to_binary(&query_state(&deps)?),\n\n QueryMsg::Staker { address } => to_binary(&query_staker(deps, address)?),\n\n QueryMsg::Poll { poll_id } => to_binary(&query_poll(deps, poll_id)?),\n\n QueryMsg::Polls {\n\n filter,\n\n start_after,\n\n limit,\n\n order_by,\n\n } => to_binary(&query_polls(deps, filter, start_after, limit, order_by)?),\n\n QueryMsg::Voters {\n\n poll_id,\n\n start_after,\n\n limit,\n\n order_by,\n\n } => to_binary(&query_voters(deps, poll_id, start_after, limit, order_by)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_gov/src/contract.rs", "rank": 60, "score": 422950.9993914486 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::DistributionInfo {} => to_binary(&query_distribution_info(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_factory/src/contract.rs", "rank": 61, "score": 422950.9993914486 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_collector/src/contract.rs", "rank": 62, "score": 422950.99939144857 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_community/src/contract.rs", "rank": 63, "score": 422950.9993914486 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::AssetConfig { asset_token } => to_binary(&query_asset_config(deps, asset_token)?),\n\n QueryMsg::Position { position_idx } => to_binary(&query_position(deps, position_idx)?),\n\n QueryMsg::Positions {\n\n owner_addr,\n\n asset_token,\n\n start_after,\n\n limit,\n\n order_by,\n\n } => to_binary(&query_positions(\n\n deps,\n\n owner_addr,\n\n asset_token,\n\n start_after,\n\n limit,\n\n order_by,\n\n )?),\n\n QueryMsg::NextPositionIdx {} => to_binary(&query_next_position_idx(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 64, "score": 422950.9993914486 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::PositionLockInfo { position_idx } => {\n\n to_binary(&query_position_lock_info(deps, position_idx)?)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_lock/src/contract.rs", "rank": 65, "score": 422950.99939144857 }, { "content": "pub fn register_migration<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset_token: HumanAddr,\n\n end_price: Decimal,\n\n) -> StdResult<HandleResponse> {\n\n let config = read_config(&deps.storage)?;\n\n if config.owner != deps.api.canonical_address(&env.message.sender)? {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n let asset_token_raw = deps.api.canonical_address(&asset_token)?;\n\n let asset_config: AssetConfig = read_asset_config(&deps.storage, &asset_token_raw)?;\n\n\n\n // update asset config\n\n store_asset_config(\n\n &mut deps.storage,\n\n &asset_token_raw,\n\n &AssetConfig {\n\n end_price: Some(end_price),\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 66, "score": 417649.32726501045 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::Receive(msg) => receive_cw20(deps, env, msg),\n\n HandleMsg::SubmitOrder {\n\n offer_asset,\n\n ask_asset,\n\n } => {\n\n if !offer_asset.is_native_token() {\n\n return Err(StdError::generic_err(\"must provide native token\"));\n\n }\n\n\n\n offer_asset.assert_sent_native_token_balance(&env)?;\n\n submit_order(deps, env.message.sender, offer_asset, ask_asset)\n\n }\n\n HandleMsg::CancelOrder { order_id } => cancel_order(deps, env, order_id),\n\n HandleMsg::ExecuteOrder {\n", "file_path": "contracts/mirror_limit_order/src/contract.rs", "rank": 67, "score": 417634.3991475393 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> HandleResult {\n\n match msg {\n\n HandleMsg::UpdateConfig {\n\n owner,\n\n mint_contract,\n\n factory_contract,\n\n base_denom,\n\n mirror_oracle,\n\n anchor_oracle,\n\n band_oracle,\n\n } => update_config(\n\n deps,\n\n env,\n\n owner,\n\n mint_contract,\n\n factory_contract,\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 68, "score": 417634.3991475393 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::CollateralPrice { asset } => to_binary(&query_collateral_price(deps, asset)?),\n\n QueryMsg::CollateralAssetInfo { asset } => to_binary(&query_collateral_info(deps, asset)?),\n\n QueryMsg::CollateralAssetInfos {} => to_binary(&query_collateral_infos(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 69, "score": 417495.2119245275 }, { "content": "pub fn query_positions<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n owner_addr: Option<HumanAddr>,\n\n asset_token: Option<HumanAddr>,\n\n start_after: Option<Uint128>,\n\n limit: Option<u32>,\n\n order_by: Option<OrderBy>,\n\n) -> StdResult<PositionsResponse> {\n\n let positions: Vec<Position> = if let Some(owner_addr) = owner_addr {\n\n read_positions_with_user_indexer(\n\n &deps.storage,\n\n &deps.api.canonical_address(&owner_addr)?,\n\n start_after,\n\n limit,\n\n order_by,\n\n )?\n\n } else if let Some(asset_token) = asset_token {\n\n read_positions_with_asset_indexer(\n\n &deps.storage,\n\n &deps.api.canonical_address(&asset_token)?,\n", "file_path": "contracts/mirror_mint/src/positions.rs", "rank": 70, "score": 417495.2119245275 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Order { order_id } => to_binary(&query_order(deps, order_id)?),\n\n QueryMsg::Orders {\n\n bidder_addr,\n\n start_after,\n\n limit,\n\n order_by,\n\n } => to_binary(&query_orders(\n\n deps,\n\n bidder_addr,\n\n start_after,\n\n limit,\n\n order_by,\n\n )?),\n\n QueryMsg::LastOrderId {} => to_binary(&query_last_order_id(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_limit_order/src/contract.rs", "rank": 71, "score": 417495.2119245275 }, { "content": "pub fn query_position<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n position_idx: Uint128,\n\n) -> StdResult<PositionResponse> {\n\n let position: Position = read_position(&deps.storage, position_idx)?;\n\n let resp = PositionResponse {\n\n idx: position.idx,\n\n owner: deps.api.human_address(&position.owner)?,\n\n collateral: position.collateral.to_normal(&deps)?,\n\n asset: position.asset.to_normal(&deps)?,\n\n is_short: is_short_position(&deps.storage, position.idx)?,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/positions.rs", "rank": 72, "score": 417495.2119245275 }, { "content": "pub fn query_staker<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n address: HumanAddr,\n\n) -> StdResult<StakerResponse> {\n\n let addr_raw = deps.api.canonical_address(&address).unwrap();\n\n let config: Config = config_read(&deps.storage).load()?;\n\n let state: State = state_read(&deps.storage).load()?;\n\n let mut token_manager = bank_read(&deps.storage)\n\n .may_load(addr_raw.as_slice())?\n\n .unwrap_or_default();\n\n\n\n // calculate pending voting rewards\n\n let w_polls: Vec<(Poll, VoterInfo)> =\n\n get_withdrawable_polls(&deps.storage, &token_manager, &addr_raw);\n\n\n\n let mut user_reward_amount = Uint128::zero();\n\n let w_polls_res: Vec<(u64, Uint128)> = w_polls\n\n .iter()\n\n .map(|(poll, voting_info)| {\n\n // calculate reward share\n", "file_path": "contracts/mirror_gov/src/staking.rs", "rank": 73, "score": 417495.21192452754 }, { "content": "pub fn udpate_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n owner: Option<HumanAddr>,\n\n spend_limit: Option<Uint128>,\n\n) -> HandleResult {\n\n let mut config: Config = read_config(&deps.storage)?;\n\n if config.owner != deps.api.canonical_address(&env.message.sender)? {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(owner) = owner {\n\n config.owner = deps.api.canonical_address(&owner)?;\n\n }\n\n\n\n if let Some(spend_limit) = spend_limit {\n\n config.spend_limit = spend_limit;\n\n }\n\n\n\n store_config(&mut deps.storage, &config)?;\n\n\n\n Ok(HandleResponse {\n\n messages: vec![],\n\n log: vec![log(\"action\", \"update_config\")],\n\n data: None,\n\n })\n\n}\n\n\n", "file_path": "contracts/mirror_community/src/contract.rs", "rank": 74, "score": 417457.0008688137 }, { "content": "// Check zero balance & same asset with position\n\npub fn assert_asset<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n position: &Position,\n\n asset: &Asset,\n\n) -> StdResult<()> {\n\n if !asset.info.equal(&position.asset.info.to_normal(&deps)?) || asset.amount.is_zero() {\n\n return Err(StdError::generic_err(\"Wrong asset\"));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/mirror_mint/src/asserts.rs", "rank": 75, "score": 417411.7011089108 }, { "content": "pub fn revoke_asset<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset_token: HumanAddr,\n\n end_price: Decimal,\n\n) -> HandleResult {\n\n let config: Config = read_config(&deps.storage)?;\n\n let asset_token_raw: CanonicalAddr = deps.api.canonical_address(&asset_token)?;\n\n let oracle_feeder: HumanAddr = deps.api.human_address(&load_oracle_feeder(\n\n &deps,\n\n &deps.api.human_address(&config.oracle_contract)?,\n\n &asset_token_raw,\n\n )?)?;\n\n let sender_raw = deps.api.canonical_address(&env.message.sender)?;\n\n\n\n // revoke asset can only be executed by the feeder or the owner (gov contract)\n\n if oracle_feeder != env.message.sender && config.owner != sender_raw {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n", "file_path": "contracts/mirror_factory/src/contract.rs", "rank": 76, "score": 417406.92167553445 }, { "content": "pub fn update_asset<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset_token: HumanAddr,\n\n auction_discount: Option<Decimal>,\n\n min_collateral_ratio: Option<Decimal>,\n\n ipo_params: Option<IPOParams>,\n\n) -> StdResult<HandleResponse> {\n\n let config: Config = read_config(&deps.storage)?;\n\n let asset_token_raw = deps.api.canonical_address(&asset_token)?;\n\n let mut asset: AssetConfig = read_asset_config(&deps.storage, &asset_token_raw)?;\n\n\n\n if deps.api.canonical_address(&env.message.sender)? != config.owner {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(auction_discount) = auction_discount {\n\n assert_auction_discount(auction_discount)?;\n\n asset.auction_discount = auction_discount;\n\n }\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 77, "score": 417406.92167553445 }, { "content": "pub fn migrate_asset<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n name: String,\n\n symbol: String,\n\n asset_token: HumanAddr,\n\n end_price: Decimal,\n\n) -> HandleResult {\n\n let config: Config = read_config(&deps.storage)?;\n\n let asset_token_raw: CanonicalAddr = deps.api.canonical_address(&asset_token)?;\n\n let oracle_feeder: HumanAddr = deps.api.human_address(&load_oracle_feeder(\n\n &deps,\n\n &deps.api.human_address(&config.oracle_contract)?,\n\n &asset_token_raw,\n\n )?)?;\n\n\n\n if oracle_feeder != env.message.sender {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n", "file_path": "contracts/mirror_factory/src/contract.rs", "rank": 78, "score": 417406.92167553445 }, { "content": "pub fn adjust_premium<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset_tokens: Vec<HumanAddr>,\n\n) -> HandleResult {\n\n let config: Config = read_config(&deps.storage)?;\n\n let oracle_contract = deps.api.human_address(&config.oracle_contract)?;\n\n let terraswap_factory = deps.api.human_address(&config.terraswap_factory)?;\n\n for asset_token in asset_tokens.iter() {\n\n let asset_token_raw = deps.api.canonical_address(&asset_token)?;\n\n let pool_info: PoolInfo = read_pool_info(&deps.storage, &asset_token_raw)?;\n\n if env.block.time < pool_info.premium_updated_time + config.premium_min_update_interval {\n\n return Err(StdError::generic_err(\n\n \"cannot adjust premium before premium_min_update_interval passed\",\n\n ));\n\n }\n\n\n\n let premium_rate = compute_premium_rate(\n\n deps,\n\n &oracle_contract,\n", "file_path": "contracts/mirror_staking/src/rewards.rs", "rank": 79, "score": 417399.1759796536 }, { "content": "pub fn read_prices<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n start_after: Option<CanonicalAddr>,\n\n limit: Option<u32>,\n\n order_by: Option<OrderBy>,\n\n) -> StdResult<Vec<PricesResponseElem>> {\n\n let price_bucket: ReadonlyBucket<S, PriceInfo> =\n\n ReadonlyBucket::new(PREFIX_PRICE, &deps.storage);\n\n\n\n let limit = limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT) as usize;\n\n let (start, end, order_by) = match order_by {\n\n Some(OrderBy::Asc) => (calc_range_start(start_after), None, OrderBy::Asc),\n\n _ => (None, calc_range_end(start_after), OrderBy::Desc),\n\n };\n\n\n\n price_bucket\n\n .range(start.as_deref(), end.as_deref(), order_by.into())\n\n .take(limit)\n\n .map(|item| {\n\n let (k, v) = item?;\n", "file_path": "contracts/mirror_oracle/src/state.rs", "rank": 80, "score": 417237.89570736873 }, { "content": "pub fn register_collateral<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset: AssetInfo,\n\n price_source: SourceType,\n\n multiplier: Decimal,\n\n) -> HandleResult {\n\n let config: Config = read_config(&deps.storage)?;\n\n let sender_address_raw: CanonicalAddr = deps.api.canonical_address(&env.message.sender)?;\n\n // only contract onwner and mint contract can register a new collateral\n\n if config.owner != sender_address_raw && config.mint_contract != sender_address_raw {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if read_collateral_info(&deps.storage, &asset.to_string()).is_ok() {\n\n return Err(StdError::generic_err(\"Collateral was already registered\"));\n\n }\n\n\n\n if multiplier.is_zero() {\n\n return Err(StdError::generic_err(\"Multiplier must be bigger than 0\"));\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 81, "score": 412403.2485937414 }, { "content": "pub fn query_orders<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n bidder_addr: Option<HumanAddr>,\n\n start_after: Option<u64>,\n\n limit: Option<u32>,\n\n order_by: Option<OrderBy>,\n\n) -> StdResult<OrdersResponse> {\n\n let orders: Vec<Order> = if let Some(bidder_addr) = bidder_addr {\n\n let bidder_addr_raw = deps.api.canonical_address(&bidder_addr)?;\n\n read_orders_with_bidder_indexer(\n\n &deps.storage,\n\n &bidder_addr_raw,\n\n start_after,\n\n limit,\n\n order_by,\n\n )?\n\n } else {\n\n read_orders(&deps.storage, start_after, limit, order_by)?\n\n };\n\n\n", "file_path": "contracts/mirror_limit_order/src/order.rs", "rank": 82, "score": 412252.13761894195 }, { "content": "pub fn query_order<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n order_id: u64,\n\n) -> StdResult<OrderResponse> {\n\n let order: Order = read_order(&deps.storage, order_id)?;\n\n let resp = OrderResponse {\n\n order_id: order.order_id,\n\n bidder_addr: deps.api.human_address(&order.bidder_addr)?,\n\n offer_asset: order.offer_asset.to_normal(&deps)?,\n\n ask_asset: order.ask_asset.to_normal(&deps)?,\n\n filled_offer_amount: order.filled_offer_amount,\n\n filled_ask_amount: order.filled_ask_amount,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_limit_order/src/order.rs", "rank": 83, "score": 412252.13761894195 }, { "content": "pub fn query_distribution_info<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<DistributionInfoResponse> {\n\n let weights: Vec<(CanonicalAddr, u32)> = read_all_weight(&deps.storage)?;\n\n let last_distributed = read_last_distributed(&deps.storage)?;\n\n let resp = DistributionInfoResponse {\n\n last_distributed,\n\n weights: weights\n\n .iter()\n\n .map(|w| Ok((deps.api.human_address(&w.0)?, w.1)))\n\n .collect::<StdResult<Vec<(HumanAddr, u32)>>>()?,\n\n };\n\n\n\n Ok(resp)\n\n}\n\n\n", "file_path": "contracts/mirror_factory/src/contract.rs", "rank": 84, "score": 412252.13761894195 }, { "content": "pub fn try_update_config<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n owner: Option<HumanAddr>,\n\n) -> HandleResult {\n\n let mut config: Config = read_config(&deps.storage)?;\n\n if deps.api.canonical_address(&env.message.sender)? != config.owner {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(owner) = owner {\n\n config.owner = deps.api.canonical_address(&owner)?;\n\n }\n\n\n\n store_config(&mut deps.storage, &config)?;\n\n Ok(HandleResponse::default())\n\n}\n\n\n", "file_path": "contracts/mirror_oracle/src/contract.rs", "rank": 86, "score": 412214.67145977233 }, { "content": "pub fn stake_voting_rewards<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n) -> HandleResult {\n\n let config: Config = config_store(&mut deps.storage).load()?;\n\n let mut state: State = state_store(&mut deps.storage).load()?;\n\n let sender_address_raw = deps.api.canonical_address(&env.message.sender)?;\n\n let key = sender_address_raw.as_slice();\n\n\n\n let mut token_manager = bank_read(&deps.storage)\n\n .load(key)\n\n .or(Err(StdError::generic_err(\"Nothing staked\")))?;\n\n\n\n let user_reward_amount: u128 =\n\n withdraw_user_voting_rewards(&mut deps.storage, &sender_address_raw, &token_manager);\n\n if user_reward_amount.eq(&0u128) {\n\n return Err(StdError::generic_err(\"Nothing to withdraw\"));\n\n }\n\n\n\n // add the withdrawn rewards to stake pool and calculate share\n", "file_path": "contracts/mirror_gov/src/staking.rs", "rank": 87, "score": 412157.97382447345 }, { "content": "pub fn withdraw_voting_rewards<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n) -> HandleResult {\n\n let config: Config = config_store(&mut deps.storage).load()?;\n\n let sender_address_raw = deps.api.canonical_address(&env.message.sender)?;\n\n let key = sender_address_raw.as_slice();\n\n\n\n let token_manager = bank_read(&deps.storage)\n\n .load(key)\n\n .or(Err(StdError::generic_err(\"Nothing staked\")))?;\n\n\n\n let user_reward_amount: u128 =\n\n withdraw_user_voting_rewards(&mut deps.storage, &sender_address_raw, &token_manager);\n\n if user_reward_amount.eq(&0u128) {\n\n return Err(StdError::generic_err(\"Nothing to withdraw\"));\n\n }\n\n\n\n state_store(&mut deps.storage).update(|mut state| {\n\n state.pending_voting_rewards =\n", "file_path": "contracts/mirror_gov/src/staking.rs", "rank": 88, "score": 412157.97382447345 }, { "content": "pub fn receive_cw20<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n cw20_msg: Cw20ReceiveMsg,\n\n) -> HandleResult {\n\n let passed_asset: Asset = Asset {\n\n info: AssetInfo::Token {\n\n contract_addr: env.message.sender.clone(),\n\n },\n\n amount: cw20_msg.amount,\n\n };\n\n\n\n if let Some(msg) = cw20_msg.msg {\n\n match from_binary(&msg)? {\n\n Cw20HookMsg::OpenPosition {\n\n asset_info,\n\n collateral_ratio,\n\n short_params,\n\n } => open_position(\n\n deps,\n", "file_path": "contracts/mirror_mint/src/contract.rs", "rank": 89, "score": 411774.57944949565 }, { "content": "pub fn receive_cw20<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n cw20_msg: Cw20ReceiveMsg,\n\n) -> HandleResult {\n\n // only asset contract can execute this message\n\n let config: Config = config_read(&deps.storage).load()?;\n\n if config.mirror_token != deps.api.canonical_address(&env.message.sender)? {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n if let Some(msg) = cw20_msg.msg {\n\n match from_binary(&msg)? {\n\n Cw20HookMsg::StakeVotingTokens {} => {\n\n stake_voting_tokens(deps, env, cw20_msg.sender, cw20_msg.amount)\n\n }\n\n Cw20HookMsg::CreatePoll {\n\n title,\n\n description,\n\n link,\n", "file_path": "contracts/mirror_gov/src/contract.rs", "rank": 90, "score": 411774.57944949565 }, { "content": "// only mint contract can execute the operation\n\npub fn decrease_short_token<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n staker_addr: HumanAddr,\n\n asset_token: HumanAddr,\n\n amount: Uint128,\n\n) -> HandleResult {\n\n let config: Config = read_config(&deps.storage)?;\n\n if deps.api.canonical_address(&env.message.sender)? != config.mint_contract {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n let staker_addr_raw: CanonicalAddr = deps.api.canonical_address(&staker_addr)?;\n\n let asset_token_raw: CanonicalAddr = deps.api.canonical_address(&asset_token)?;\n\n\n\n // not used\n\n let _ = _decrease_bond_amount(\n\n &mut deps.storage,\n\n &staker_addr_raw,\n\n &asset_token_raw,\n", "file_path": "contracts/mirror_staking/src/staking.rs", "rank": 92, "score": 407328.3072840518 }, { "content": "pub fn auto_stake_hook<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n asset_token: HumanAddr,\n\n staking_token: HumanAddr,\n\n staker_addr: HumanAddr,\n\n prev_staking_token_amount: Uint128,\n\n) -> HandleResult {\n\n // only can be called by itself\n\n if env.message.sender != env.contract.address {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n // stake all lp tokens received, compare with staking token amount before liquidity provision was executed\n\n let current_staking_token_amount =\n\n query_token_balance(&deps, &staking_token, &env.contract.address)?;\n\n let amount_to_stake = (current_staking_token_amount - prev_staking_token_amount)?;\n\n\n\n bond(deps, env, staker_addr, asset_token, amount_to_stake)\n\n}\n\n\n", "file_path": "contracts/mirror_staking/src/staking.rs", "rank": 93, "score": 407328.3072840518 }, { "content": "// only mint contract can execute the operation\n\npub fn increase_short_token<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n staker_addr: HumanAddr,\n\n asset_token: HumanAddr,\n\n amount: Uint128,\n\n) -> HandleResult {\n\n let config: Config = read_config(&deps.storage)?;\n\n if deps.api.canonical_address(&env.message.sender)? != config.mint_contract {\n\n return Err(StdError::unauthorized());\n\n }\n\n\n\n let staker_addr_raw: CanonicalAddr = deps.api.canonical_address(&staker_addr)?;\n\n let asset_token_raw: CanonicalAddr = deps.api.canonical_address(&asset_token)?;\n\n\n\n _increase_bond_amount(\n\n &mut deps.storage,\n\n &staker_addr_raw,\n\n &asset_token_raw,\n\n amount,\n", "file_path": "contracts/mirror_staking/src/staking.rs", "rank": 94, "score": 407328.3072840518 }, { "content": "pub fn receive_cw20<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n cw20_msg: Cw20ReceiveMsg,\n\n) -> HandleResult {\n\n if let Some(msg) = cw20_msg.msg {\n\n let provided_asset = Asset {\n\n info: AssetInfo::Token {\n\n contract_addr: env.message.sender,\n\n },\n\n amount: cw20_msg.amount,\n\n };\n\n\n\n match from_binary(&msg)? {\n\n Cw20HookMsg::SubmitOrder { ask_asset } => {\n\n submit_order(deps, cw20_msg.sender, provided_asset, ask_asset)\n\n }\n\n Cw20HookMsg::ExecuteOrder { order_id } => execute_order(\n\n deps,\n\n cw20_msg.sender,\n\n env.contract.address,\n\n provided_asset,\n\n order_id,\n\n ),\n\n }\n\n } else {\n\n Err(StdError::generic_err(\"data should be given\"))\n\n }\n\n}\n\n\n", "file_path": "contracts/mirror_limit_order/src/contract.rs", "rank": 95, "score": 407295.19582964294 }, { "content": "pub fn query_next_position_idx<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<NextPositionIdxResponse> {\n\n let idx = read_position_idx(&deps.storage)?;\n\n let resp = NextPositionIdxResponse {\n\n next_position_idx: idx,\n\n };\n\n\n\n Ok(resp)\n\n}\n", "file_path": "contracts/mirror_mint/src/positions.rs", "rank": 96, "score": 407209.5742855853 }, { "content": "pub fn query_collateral_info<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n quote_asset: String,\n\n) -> StdResult<CollateralInfoResponse> {\n\n let collateral: CollateralAssetInfo =\n\n if let Ok(res) = read_collateral_info(&deps.storage, &quote_asset) {\n\n res\n\n } else {\n\n return Err(StdError::generic_err(\"Collateral asset not found\"));\n\n };\n\n\n\n Ok(CollateralInfoResponse {\n\n asset: collateral.asset,\n\n source_type: collateral.price_source.to_string(),\n\n multiplier: collateral.multiplier,\n\n is_revoked: collateral.is_revoked,\n\n })\n\n}\n\n\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 97, "score": 407209.5742855853 }, { "content": "pub fn query_collateral_infos<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<CollateralInfosResponse> {\n\n let infos: Vec<CollateralInfoResponse> = read_collateral_infos(&deps.storage)?;\n\n\n\n Ok(CollateralInfosResponse { collaterals: infos })\n\n}\n\n\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 98, "score": 407209.5742855853 }, { "content": "pub fn query_collateral_price<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n quote_asset: String,\n\n) -> StdResult<CollateralPriceResponse> {\n\n let config: Config = read_config(&deps.storage)?;\n\n\n\n let collateral: CollateralAssetInfo =\n\n if let Ok(res) = read_collateral_info(&deps.storage, &quote_asset) {\n\n res\n\n } else {\n\n return Err(StdError::generic_err(\"Collateral asset not found\"));\n\n };\n\n\n\n let (price, last_updated): (Decimal, u64) =\n\n query_price(deps, &config, &quote_asset, &collateral.price_source)?;\n\n\n\n Ok(CollateralPriceResponse {\n\n asset: collateral.asset,\n\n rate: price,\n\n last_updated,\n\n multiplier: collateral.multiplier,\n\n is_revoked: collateral.is_revoked,\n\n })\n\n}\n\n\n", "file_path": "contracts/mirror_collateral_oracle/src/contract.rs", "rank": 99, "score": 407209.5742855853 } ]
Rust
src/message/message.rs
ddimaria/stun-server
4557238c5f05f69105c1834da09f62aad74b826e
use crate::error::{Error, Result}; use crate::message::attribute::Attribute; use crate::message::class::Class; use crate::message::method::Method; use crate::message::transaction_id::TransactionId; use bytes::{Buf, BufMut, Bytes, BytesMut}; pub(crate) const MAGIC_COOKIE: u32 = 0x2112A442; pub(crate) const MESSAGE_HEADER_LENGTH: usize = 20; #[derive(Debug, PartialEq)] pub(crate) struct Message { pub(crate) class: Class, pub(crate) method: Method, pub(crate) transaction_id: TransactionId, pub(crate) attributes: Vec<Attribute>, } impl Message { pub(crate) fn binding_request(attributes: Vec<Attribute>) -> Message { Message { class: Class::Request, method: Method::Binding, transaction_id: TransactionId::new(), attributes, } } pub(crate) fn binding_response(attributes: Vec<Attribute>) -> Message { Message { class: Class::SuccessResponse, method: Method::Binding, transaction_id: TransactionId::new(), attributes, } } pub(crate) fn encode(&self, buf: &mut BytesMut) { let transaction_id = &self.transaction_id.0; let class = self.class.encode(); let method = self.method.encode(); buf.put_u16(class + method); let mut body = BytesMut::with_capacity(256); let mut message_length: u16 = 0; for attribute in &self.attributes { message_length += attribute.encode(&mut body, &self.transaction_id); } buf.put_u16(message_length); buf.put_u32(MAGIC_COOKIE); buf.put_slice(transaction_id); buf.put_slice(body.as_ref()); } pub(crate) fn decode(buffer: &mut Bytes) -> Result<Message> { let mut attributes: Vec<Attribute> = Vec::new(); if buffer.remaining() < MESSAGE_HEADER_LENGTH { return Err(Error::Decode(format!( "Not enough bytes in the header. Expected {}, but got {}", 20, buffer.remaining() ))); } let message_type = buffer.get_u16(); let class = Class::decode(message_type)?; let method = Method::decode(message_type); let message_length = buffer.get_u16() as usize; let magic_cookie = buffer.get_u32(); let transaction_id = TransactionId::decode(buffer)?; if magic_cookie != MAGIC_COOKIE { return Err(Error::Decode(format!( "Invalid magic cookie. Expected {}, but got {}.", MAGIC_COOKIE, magic_cookie ))); } let attributes_length = buffer.remaining() - message_length; while buffer.remaining() > attributes_length { let attribute = Attribute::decode(buffer, &transaction_id)?; attributes.push(attribute); } let msg = Message { class, method, transaction_id, attributes, }; Ok(msg) } } #[cfg(test)] pub(crate) mod tests { use super::*; pub(crate) const BINDING_REQUEST: &[u8; 20] = b"\0\x01\0\0!\x12\xa4B\xb0\xb8?\0\xda\x0c\xa2\xc3(\xe1\xf2\x85"; pub(crate) const BINDING_RESPONSE: &[u8; 20] = b"\x01\x01\0\0!\x12\xa4B\xc3>bhW \xc0\x8e\xd8\xf1y\x88"; pub(crate) fn decode_message(buffer: &[u8; 20]) -> Message { let mut buffer = Bytes::copy_from_slice(buffer); Message::decode(&mut buffer).unwrap() } pub(crate) fn binding_request() -> Message { Message { class: Class::Request, method: Method::Binding, transaction_id: TransactionId([176, 184, 63, 0, 218, 12, 162, 195, 40, 225, 242, 133]), attributes: vec![], } } pub(crate) fn binding_response() -> Message { Message { class: Class::SuccessResponse, method: Method::Binding, transaction_id: TransactionId([195, 62, 98, 104, 87, 32, 192, 142, 216, 241, 121, 136]), attributes: vec![], } } #[test] fn it_encodes_a_binding_request() { let mut buffer = BytesMut::new(); let message = binding_request(); message.encode(&mut buffer); let mut expected_buffer = BytesMut::with_capacity(0); expected_buffer.extend_from_slice(BINDING_REQUEST); assert_eq!(buffer, expected_buffer); } #[test] fn it_encodes_a_binding_response() { let mut buffer = BytesMut::new(); let message = binding_response(); message.encode(&mut buffer); let mut expected_buffer = BytesMut::with_capacity(0); expected_buffer.extend_from_slice(BINDING_RESPONSE); assert_eq!(buffer, expected_buffer); } #[test] fn it_decodes_a_binding_request() { let message = decode_message(BINDING_REQUEST); let expected = binding_request(); assert_eq!(message, expected); } #[test] fn it_decodes_a_binding_response() { let message = decode_message(BINDING_RESPONSE); let expected = binding_response(); assert_eq!(message, expected); } }
use crate::error::{Error, Result}; use crate::message::attribute::Attribute; use crate::message::class::Class; use crate::message::method::Method; use crate::message::transaction_id::TransactionId; use bytes::{Buf, BufMut, Bytes, BytesMut}; pub(crate) const MAGIC_COOKIE: u32 = 0x2112A442; pub(crate) const MESSAGE_HEADER_LENGTH: usize = 20; #[derive(Debug, PartialEq)] pub(crate) struct Message { pub(crate) class: Class, pub(crate) method: Method, pub(crate) transaction_id: TransactionId, pub(crate) attributes: Vec<Attribute>, } impl Message { pub(crate) fn binding_request(attributes: Vec<Attribute>) -> Message { Message { class: Class::Request, method: Method::Binding, transaction_id: TransactionId::new(), attributes, } } pub(crate) fn binding_response(attributes: Vec<Attribute>) -> Message { Message { class: Class::SuccessResponse, method: Method::Binding, transaction_id: TransactionId::new(), attributes, } } pub(crate) fn encode(&self, buf: &mut BytesMut) { let transaction_id = &self.transaction_id.0; let class = self.class.encode(); let method = self.method.encode(); buf.put_u16(class + method); let mut body = BytesMut::with_capacity(256); let mut message_length: u16 = 0; for attribute in &self.attributes { message_length += attribute.encode(&mut body, &self.transaction_id); } buf.put_u16(message_length); buf.put_u32(MAGIC_COOKIE); buf.put_slice(transaction_id); buf.put_slice(body.as_ref()); } pub(crate) fn decode(buffer: &mut Bytes) -> Result<Message> { let mut attributes: Vec<Attribute> = Vec::new();
let message_type = buffer.get_u16(); let class = Class::decode(message_type)?; let method = Method::decode(message_type); let message_length = buffer.get_u16() as usize; let magic_cookie = buffer.get_u32(); let transaction_id = TransactionId::decode(buffer)?; if magic_cookie != MAGIC_COOKIE { return Err(Error::Decode(format!( "Invalid magic cookie. Expected {}, but got {}.", MAGIC_COOKIE, magic_cookie ))); } let attributes_length = buffer.remaining() - message_length; while buffer.remaining() > attributes_length { let attribute = Attribute::decode(buffer, &transaction_id)?; attributes.push(attribute); } let msg = Message { class, method, transaction_id, attributes, }; Ok(msg) } } #[cfg(test)] pub(crate) mod tests { use super::*; pub(crate) const BINDING_REQUEST: &[u8; 20] = b"\0\x01\0\0!\x12\xa4B\xb0\xb8?\0\xda\x0c\xa2\xc3(\xe1\xf2\x85"; pub(crate) const BINDING_RESPONSE: &[u8; 20] = b"\x01\x01\0\0!\x12\xa4B\xc3>bhW \xc0\x8e\xd8\xf1y\x88"; pub(crate) fn decode_message(buffer: &[u8; 20]) -> Message { let mut buffer = Bytes::copy_from_slice(buffer); Message::decode(&mut buffer).unwrap() } pub(crate) fn binding_request() -> Message { Message { class: Class::Request, method: Method::Binding, transaction_id: TransactionId([176, 184, 63, 0, 218, 12, 162, 195, 40, 225, 242, 133]), attributes: vec![], } } pub(crate) fn binding_response() -> Message { Message { class: Class::SuccessResponse, method: Method::Binding, transaction_id: TransactionId([195, 62, 98, 104, 87, 32, 192, 142, 216, 241, 121, 136]), attributes: vec![], } } #[test] fn it_encodes_a_binding_request() { let mut buffer = BytesMut::new(); let message = binding_request(); message.encode(&mut buffer); let mut expected_buffer = BytesMut::with_capacity(0); expected_buffer.extend_from_slice(BINDING_REQUEST); assert_eq!(buffer, expected_buffer); } #[test] fn it_encodes_a_binding_response() { let mut buffer = BytesMut::new(); let message = binding_response(); message.encode(&mut buffer); let mut expected_buffer = BytesMut::with_capacity(0); expected_buffer.extend_from_slice(BINDING_RESPONSE); assert_eq!(buffer, expected_buffer); } #[test] fn it_decodes_a_binding_request() { let message = decode_message(BINDING_REQUEST); let expected = binding_request(); assert_eq!(message, expected); } #[test] fn it_decodes_a_binding_response() { let message = decode_message(BINDING_RESPONSE); let expected = binding_response(); assert_eq!(message, expected); } }
if buffer.remaining() < MESSAGE_HEADER_LENGTH { return Err(Error::Decode(format!( "Not enough bytes in the header. Expected {}, but got {}", 20, buffer.remaining() ))); }
if_condition
[ { "content": " method.into()\n\n }\n\n}\n\n\n\nimpl From<u16> for Method {\n\n fn from(value: u16) -> Method {\n\n match value {\n\n 0x001 => Method::Binding,\n\n _ => unimplemented!(\"Only binding methods are allowed\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Into<u16> for &Method {\n\n fn into(self) -> u16 {\n\n match self {\n\n Method::Binding => 0x001,\n\n }\n\n }\n\n}\n", "file_path": "src/message/method.rs", "rank": 0, "score": 35231.29536469979 }, { "content": " Binding,\n\n}\n\n\n\nimpl Method {\n\n pub(crate) fn encode(&self) -> u16 {\n\n let method: u16 = self.into();\n\n let method = method & 0xFFF;\n\n let method_part_0_3 = method & 0x000F; // M0-M3\n\n let method_part_4_6 = method & 0x0070; // M4-M6\n\n let method_part_7_11 = method & 0x0F80; // M7-M11\n\n\n\n method_part_0_3 + method_part_4_6 + method_part_7_11\n\n }\n\n\n\n pub(crate) fn decode(value: u16) -> Self {\n\n let method_part_0_3 = value & 0xf; // M0-M3\n\n let method_part_4_6 = (value >> 1) & 0x70; // M4-M6\n\n let method_part_7_11 = (value >> 2) & 0xf80; // M7-M11\n\n let method = method_part_0_3 + method_part_4_6 + method_part_7_11;\n\n\n", "file_path": "src/message/method.rs", "rank": 1, "score": 35229.2580662626 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_encodes_all_methods() {\n\n let encoded = Method::Binding.encode();\n\n assert_eq!(encoded, (&Method::Binding).into());\n\n }\n\n\n\n #[test]\n\n fn it_decodes_all_methods() {\n\n let decoded = Method::decode((&Method::Binding).into());\n\n assert_eq!(decoded, Method::Binding);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn it_panics_when_decoding_a_non_binding_method() {\n\n let method = 0x0002;\n\n let _: Method = method.into();\n\n }\n\n}\n", "file_path": "src/message/method.rs", "rank": 2, "score": 35227.3935077807 }, { "content": "/// The message type field is decomposed further into the following structure:\n\n///\n\n/// 0 1\n\n/// 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n\n/// +--+--+-+-+-+-+-+-+-+-+-+-+-+-+\n\n/// |M |M |M|M|M|C|M|M|M|C|M|M|M|M|\n\n/// |11|10|9|8|7|1|6|5|4|0|3|2|1|0|\n\n/// +--+--+-+-+-+-+-+-+-+-+-+-+-+-+\n\n///\n\n/// Here the bits in the message type field are shown as most significant (M11)\n\n/// through least significant (M0). M11 through M0 represent a 12-bit encoding\n\n/// of the method. C1 and C0 represent a 2-bit encoding of the class. A class\n\n/// of 0b00 is a request, a class of 0b01 is an indication, a class of 0b10\n\n/// is a success response, and a class of 0b11 is an error response. This\n\n/// specification defines a single method, Binding. The method and class are\n\n/// orthogonal, so that for each method, a request, success response, error response,\n\n/// and indication are possible for that method. Extensions defining new methods\n\n/// MUST indicate which classes are permitted for that method.\n\n#[derive(Debug, PartialEq)]\n\npub(crate) enum Method {\n", "file_path": "src/message/method.rs", "rank": 3, "score": 35226.914130176905 }, { "content": "/// this specification is found in Section 17.3.\n\n///\n\n/// The rest of this section describes the format of the various attributes defined\n\n/// in this specification.\n\n#[derive(Eq, PartialEq, Debug)]\n\npub(crate) enum Attribute {\n\n Username(String),\n\n Password(String),\n\n ErrorCode { code: u32, reason: String },\n\n FingerPrint(String),\n\n XorMappedAddress(Address),\n\n UnknownAttributes(Vec<u16>),\n\n}\n\n\n\nimpl From<&mut BytesMut> for Attribute {\n\n fn from(buffer: &mut BytesMut) -> Attribute {\n\n let code = buffer.get_u16();\n\n let _message_length = buffer.get_u16();\n\n let value_32: Vec<u8> = [\n\n buffer.get_u8().to_be_bytes(),\n", "file_path": "src/message/attribute.rs", "rank": 4, "score": 35112.697726249055 }, { "content": "use std::convert::{TryFrom, TryInto};\n\n\n\nuse crate::error::{Error, Result};\n\nuse crate::message::transaction_id::TransactionId;\n\nuse crate::utils::Address;\n\nuse bytes::{Buf, Bytes, BytesMut};\n\n\n\n/// After the STUN header are zero or more attributes. Each attribute MUST be\n\n/// TLV encoded, with a 16-bit type, 16-bit length, and value. Each STUN\n\n/// attribute MUST end on a 32-bit boundary. As mentioned above, all fields\n\n/// in an attribute are transmitted most significant bit first.\n\n///\n\n///\n\n/// 0 1 2 3\n\n/// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n\n/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n/// | Type | Length |\n\n/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n/// | Value (variable) ....\n\n/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n", "file_path": "src/message/attribute.rs", "rank": 5, "score": 35112.44547883326 }, { "content": "}\n\n\n\nimpl Attribute {\n\n pub(crate) fn encode(&self, buffer: &mut BytesMut, transaction_id: &TransactionId) -> u16 {\n\n 0\n\n }\n\n\n\n pub(crate) fn decode(buffer: &Bytes, transaction_id: &TransactionId) -> Result<Self> {\n\n Ok(Attribute::UnknownAttributes(vec![]))\n\n }\n\n}\n", "file_path": "src/message/attribute.rs", "rank": 6, "score": 35112.39943623023 }, { "content": "///\n\n/// The value in the length field MUST contain the length of the Value part of\n\n/// the attribute, prior to padding, measured in bytes. Since STUN aligns attributes\n\n/// on 32-bit boundaries, attributes whose content is not a multiple of 4 bytes\n\n/// are padded with 1, 2, or 3 bytes of padding so that its value contains a multiple\n\n/// of 4 bytes. The padding bits are ignored, and may be any value.\n\n///\n\n/// Any attribute type MAY appear more than once in a STUN message. Unless specified\n\n/// otherwise, the order of appearance is significant: only the first occurrence needs\n\n/// to be processed by a receiver, and any duplicates MAY be ignored by a receiver.\n\n///\n\n/// To allow future revisions of this specification to add new attributes if needed,\n\n/// the attribute space is divided into two ranges. Attributes with type values between\n\n/// 0x0000 and 0x7FFF are comprehension-required attributes, which means that the STUN\n\n/// agent cannot successfully process the message unless it understands the attribute.\n\n/// Attributes with type values between 0x8000 and 0xFFFF are comprehension-optional\n\n/// attributes, which means that those attributes can be ignored by the STUN agent if\n\n/// it does not understand them.\n\n///\n\n/// The set of STUN attribute types is maintained by IANA. The initial set defined by\n", "file_path": "src/message/attribute.rs", "rank": 7, "score": 35106.2006231115 }, { "content": " buffer.get_u8().to_be_bytes(),\n\n buffer.get_u8().to_be_bytes(),\n\n buffer.get_u8().to_be_bytes(),\n\n ]\n\n .concat();\n\n let value = String::from_utf8(value_32).unwrap();\n\n\n\n match code {\n\n 0x0006 => Attribute::Username(value),\n\n 0x0007 => Attribute::Password(value),\n\n 0x0009 => Attribute::ErrorCode {\n\n code: 0,\n\n reason: value,\n\n },\n\n 0x000A => Attribute::UnknownAttributes(vec![code]),\n\n 0x0020 => Attribute::XorMappedAddress(Address::try_from(value).unwrap()),\n\n 0x8028 => Attribute::FingerPrint(value),\n\n _ => Attribute::UnknownAttributes(vec![code]),\n\n }\n\n }\n", "file_path": "src/message/attribute.rs", "rank": 8, "score": 35105.294305270705 }, { "content": " let class_part_1 = (value >> 7) & 0x2; // C1\n\n let class = class_part_0 + class_part_1;\n\n\n\n class.try_into()\n\n }\n\n}\n\n\n\nimpl TryFrom<u16> for Class {\n\n type Error = Error;\n\n\n\n fn try_from(value: u16) -> Result<Class> {\n\n match value {\n\n 0b00 => Ok(Class::Request),\n\n 0b01 => Ok(Class::Indication),\n\n 0b10 => Ok(Class::SuccessResponse),\n\n 0b11 => Ok(Class::FailureResponse),\n\n _ => Err(Error::Parse(format!(\n\n \"Could not convert {} to a message class\",\n\n value\n\n ))),\n", "file_path": "src/message/class.rs", "rank": 9, "score": 34984.95849878303 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Into<u16> for &Class {\n\n fn into(self) -> u16 {\n\n match self {\n\n Class::Request => 0b00,\n\n Class::Indication => 0b01,\n\n Class::SuccessResponse => 0b10,\n\n Class::FailureResponse => 0b11,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n const BINGING_REQUEST: u16 = 0b000000000;\n\n const BINGING_INDICATION_RESPONSE: u16 = 0b000010000;\n", "file_path": "src/message/class.rs", "rank": 10, "score": 34984.70339498564 }, { "content": "/// MUST indicate which classes are permitted for that method.\n\n#[derive(Debug, PartialEq)]\n\npub(crate) enum Class {\n\n Request,\n\n Indication,\n\n SuccessResponse,\n\n FailureResponse,\n\n}\n\n\n\nimpl Class {\n\n pub(crate) fn encode(&self) -> u16 {\n\n let class: u16 = self.into();\n\n let class_part_0 = (class & 0x1) << 4; // C0\n\n let class_part_1 = (class & 0x2) << 7; // C1\n\n\n\n class_part_0 + class_part_1\n\n }\n\n\n\n pub(crate) fn decode(value: u16) -> Result<Self> {\n\n let class_part_0 = (value >> 4) & 0x1; // C0\n", "file_path": "src/message/class.rs", "rank": 11, "score": 34984.57959392152 }, { "content": "use crate::error::{Error, Result};\n\nuse std::convert::{TryFrom, TryInto};\n\n\n\n/// The message type field is decomposed further into the following structure:\n\n///\n\n/// 0 1\n\n/// 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n\n/// +--+--+-+-+-+-+-+-+-+-+-+-+-+-+\n\n/// |M |M |M|M|M|C|M|M|M|C|M|M|M|M|\n\n/// |11|10|9|8|7|1|6|5|4|0|3|2|1|0|\n\n/// +--+--+-+-+-+-+-+-+-+-+-+-+-+-+\n\n///\n\n/// Here the bits in the message type field are shown as most significant (M11)\n\n/// through least significant (M0). M11 through M0 represent a 12-bit encoding\n\n/// of the method. C1 and C0 represent a 2-bit encoding of the class. A class\n\n/// of 0b00 is a request, a class of 0b01 is an indication, a class of 0b10\n\n/// is a success response, and a class of 0b11 is an error response. This\n\n/// specification defines a single method, Binding. The method and class are\n\n/// orthogonal, so that for each method, a request, success response, error response,\n\n/// and indication are possible for that method. Extensions defining new methods\n", "file_path": "src/message/class.rs", "rank": 12, "score": 34982.33845081844 }, { "content": " const BINGING_SUCCESS: u16 = 0b100000000;\n\n const BINGING_FAILURE_RESPONSE: u16 = 0b100010000;\n\n\n\n #[test]\n\n fn it_encodes_all_classes() {\n\n let encoded = Class::Request.encode();\n\n assert_eq!(encoded, BINGING_REQUEST);\n\n\n\n let encoded = Class::Indication.encode();\n\n assert_eq!(encoded, BINGING_INDICATION_RESPONSE);\n\n\n\n let encoded = Class::SuccessResponse.encode();\n\n assert_eq!(encoded, BINGING_SUCCESS);\n\n\n\n let encoded = Class::FailureResponse.encode();\n\n assert_eq!(encoded, BINGING_FAILURE_RESPONSE);\n\n }\n\n\n\n #[test]\n\n fn it_decodes_all_classes() {\n", "file_path": "src/message/class.rs", "rank": 13, "score": 34980.50543199455 }, { "content": " let decoded = Class::decode(BINGING_REQUEST).unwrap();\n\n assert_eq!(decoded, Class::Request);\n\n\n\n let decoded = Class::decode(BINGING_INDICATION_RESPONSE).unwrap();\n\n assert_eq!(decoded, Class::Indication);\n\n\n\n let decoded = Class::decode(BINGING_SUCCESS).unwrap();\n\n assert_eq!(decoded, Class::SuccessResponse);\n\n\n\n let decoded = Class::decode(BINGING_FAILURE_RESPONSE).unwrap();\n\n assert_eq!(decoded, Class::FailureResponse);\n\n }\n\n}\n", "file_path": "src/message/class.rs", "rank": 14, "score": 34978.81990292583 }, { "content": "#[derive(Parser, Debug)]\n\n#[clap(author, version, about, long_about = None)]\n\nstruct Args {\n\n /// Run the binary as a client or a server\n\n #[clap(short, long)]\n\n r#type: String,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<()> {\n\n dotenv::dotenv().ok();\n\n pretty_env_logger::init();\n\n\n\n let args = Args::parse();\n\n\n\n match args.r#type.as_ref() {\n\n \"server\" => server().await,\n\n \"client\" => client().await,\n\n _ => Err(Error::Arguments(args.r#type)),\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 15, "score": 25654.008616723644 }, { "content": "// Log out errors\n\nfn log_error(error: Error) -> Error {\n\n error!(\"{:?}\", error);\n\n error\n\n}\n\n\n\nimpl From<EnvyError> for Error {\n\n fn from(error: EnvyError) -> Self {\n\n let error = match error {\n\n EnvyError::MissingValue(error) => format!(\"Missing config value in .env: {}\", error),\n\n EnvyError::Custom(error) => error,\n\n };\n\n log_error(Error::Config(error))\n\n }\n\n}\n\n\n\nimpl From<AddrParseError> for Error {\n\n fn from(error: AddrParseError) -> Self {\n\n log_error(Error::Parse(error.to_string()))\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 27, "score": 19461.26570510987 }, { "content": "pub mod attribute;\n\npub mod class;\n\npub mod message;\n\npub mod method;\n\npub mod transaction_id;\n", "file_path": "src/message/mod.rs", "rank": 28, "score": 15416.225963950888 }, { "content": "/// port, the transaction IDs in requests sent by the agent have no relationship to\n\n/// the transaction IDs in requests received by the agent.\n\n#[derive(Debug, PartialEq)]\n\npub(crate) struct TransactionId(pub [u8; 12]);\n\n\n\nimpl TransactionId {\n\n pub(crate) fn new() -> Self {\n\n Self(Self::random())\n\n }\n\n\n\n pub(crate) fn decode(buffer: &mut Bytes) -> Result<Self> {\n\n let mut transaction_id = [0u8; 12];\n\n transaction_id.copy_from_slice(&buffer[0..12]);\n\n\n\n Ok(Self(transaction_id))\n\n }\n\n\n\n pub(crate) fn random() -> [u8; 12] {\n\n let mut transaction_id = [0u8; 12];\n\n rand::thread_rng().fill(&mut transaction_id[..]);\n\n transaction_id\n\n }\n\n}\n", "file_path": "src/message/transaction_id.rs", "rank": 29, "score": 14489.41452900724 }, { "content": "use crate::error::{Error, Result};\n\nuse bytes::{Buf, Bytes};\n\nuse rand::Rng;\n\nuse std::convert::TryInto;\n\n\n\n/// The transaction ID is a 96-bit identifier, used to uniquely identify STUN\n\n/// transactions. For request/response transactions, the transaction ID is\n\n/// chosen by the STUN client for the request and echoed by the server in the\n\n/// response. For indications, it is chosen by the agent sending the indication.\n\n/// It primarily serves to correlate requests with responses, though it also\n\n/// plays a small role in helping to prevent certain types of attacks. The\n\n/// server also uses the transaction ID as a key to identify each transaction\n\n/// uniquely across all clients. As such, the transaction ID MUST be uniformly\n\n/// and randomly chosen from the interval 0 .. 2**96-1, and SHOULD be\n\n/// cryptographically random. Resends of the same request reuse the same\n\n/// transaction ID, but the client MUST choose a new transaction ID for new\n\n/// transactions unless the new request is bit-wise identical to the previous\n\n/// request and sent from the same transport address to the same IP address. Success\n\n/// and error responses MUST carry the same transaction ID as their corresponding\n\n/// request. When an agent is acting as a STUN server and STUN client on the same\n", "file_path": "src/message/transaction_id.rs", "rank": 30, "score": 14488.9354391554 }, { "content": "use crate::{\n\n config::CONFIG,\n\n error::{Error, Result},\n\n message::attribute::Attribute,\n\n message::class::Class,\n\n message::message::Message,\n\n message::method::Method,\n\n utils::Address,\n\n};\n\nuse bytes::{Bytes, BytesMut};\n\nuse std::net::SocketAddr;\n\nuse tokio::net::UdpSocket;\n\n\n\npub(crate) async fn server() -> Result<()> {\n\n let server_addr: SocketAddr = (*CONFIG).server.parse()?;\n\n let socket = UdpSocket::bind(server_addr)\n\n .await\n\n .map_err(|e| Error::Startup(e.to_string()))?;\n\n\n\n log::info!(\"Started stun server on {}\", server_addr);\n", "file_path": "src/server.rs", "rank": 31, "score": 14.026072317819635 }, { "content": "\n\n let mut buf = [0u8; 1024];\n\n\n\n loop {\n\n let (bytes_received, client_address) = socket\n\n .recv_from(&mut buf)\n\n .await\n\n .map_err(|e| Error::Receive(e.to_string()))?;\n\n let mut bytes = Bytes::copy_from_slice(&buf);\n\n let message = Message::decode(&mut bytes)?;\n\n\n\n log::info!(\n\n \"received {} bytes from {}: {:?}\",\n\n bytes_received,\n\n client_address,\n\n message\n\n );\n\n\n\n match (message.class, message.method) {\n\n (Class::Request, Method::Binding) => {\n", "file_path": "src/server.rs", "rank": 32, "score": 13.40156207147564 }, { "content": " let mut buf = [0u8; 1024];\n\n let (bytes_received, address) = socket\n\n .recv_from(&mut buf)\n\n .await\n\n .map_err(|e| Error::Receive(e.to_string()))?;\n\n let mut bytes = Bytes::copy_from_slice(&buf);\n\n let message = Message::decode(&mut bytes).map_err(|e| Error::Decode(e.to_string()))?;\n\n\n\n log::info!(\n\n \"received {} bytes from {}: {:?}\",\n\n bytes_received,\n\n address,\n\n message\n\n );\n\n }\n\n}\n", "file_path": "src/client.rs", "rank": 33, "score": 9.966535640400044 }, { "content": " let message = Message::binding_response(vec![Attribute::XorMappedAddress(\n\n Address::parse_address(client_address),\n\n )]);\n\n\n\n log::info!(\"sending message to client: {:?}\", message);\n\n\n\n // encode the binding response\n\n let mut buf = BytesMut::new();\n\n message.encode(&mut buf);\n\n\n\n // send the encoded binding response to the client\n\n socket\n\n .send_to(&mut buf.as_ref(), client_address)\n\n .await\n\n .map_err(|e| Error::BindingResponse(e.to_string()))?;\n\n }\n\n _ => unimplemented!(\"This service is only setup to receive a binding request message\"),\n\n }\n\n }\n\n}\n", "file_path": "src/server.rs", "rank": 34, "score": 9.428990782305238 }, { "content": "use std::net::SocketAddr;\n\n\n\nuse bytes::{Bytes, BytesMut};\n\nuse tokio::net::UdpSocket;\n\n\n\nuse crate::{\n\n config::CONFIG,\n\n error::{Error, Result},\n\n message::message::Message,\n\n};\n\n\n\npub(crate) async fn client() -> Result<()> {\n\n let client_addr: SocketAddr = (*CONFIG).client.parse()?;\n\n let server_addr: SocketAddr = (*CONFIG).server.parse()?;\n\n let socket = UdpSocket::bind(client_addr)\n\n .await\n\n .map_err(|e| Error::Startup(e.to_string()))?;\n\n\n\n log::info!(\n\n \"Started stun client on {}, connected to a stun server on {}\",\n", "file_path": "src/client.rs", "rank": 35, "score": 9.059720252115227 }, { "content": " client_addr,\n\n server_addr\n\n );\n\n\n\n let message = Message::binding_request(vec![]);\n\n\n\n log::info!(\"sending binding request to the server: {:?}\", message);\n\n\n\n // encode the binding request\n\n let mut bytes_mut = BytesMut::new();\n\n message.encode(&mut bytes_mut);\n\n\n\n // send the encoded binding request to the server\n\n socket\n\n .send_to(bytes_mut.as_ref(), server_addr)\n\n .await\n\n .map_err(|e| Error::BindingRequest(e.to_string()))?;\n\n\n\n // listen for a response\n\n loop {\n", "file_path": "src/client.rs", "rank": 36, "score": 7.235475790820133 }, { "content": "use crate::error::{Error, Result};\n\nuse std::{convert::TryFrom, net::SocketAddr};\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) struct Address {\n\n pub address: Vec<u8>,\n\n pub port: u16,\n\n pub ip_kind: IPKind,\n\n}\n\n\n\nimpl Address {\n\n pub(crate) fn ipv4(address: [u8; 4], port: u16) -> Address {\n\n Address {\n\n address: address.to_vec(),\n\n port,\n\n ip_kind: IPKind::IPv4,\n\n }\n\n }\n\n\n\n pub(crate) fn ipv6(address: [u8; 16], port: u16) -> Address {\n", "file_path": "src/utils.rs", "rank": 37, "score": 6.438860228806489 }, { "content": "mod client;\n\nmod config;\n\nmod error;\n\nmod message;\n\nmod server;\n\nmod utils;\n\n\n\nuse crate::client::client;\n\nuse crate::error::{Error, Result};\n\nuse crate::server::server;\n\nuse clap::Parser;\n\n\n\n#[derive(Parser, Debug)]\n\n#[clap(author, version, about, long_about = None)]\n", "file_path": "src/main.rs", "rank": 38, "score": 5.560274826663293 }, { "content": "//! Custom errors for this applicatoin.\n\n//!\n\n//! Map errors from libraries to Error.\n\n//!\n\n//! Define a reusable Result type.\n\n\n\nuse envy::Error as EnvyError;\n\nuse log::error;\n\nuse std::net::AddrParseError;\n\n\n\npub(crate) type Result<T> = std::result::Result<T, Error>;\n\n\n\n#[derive(thiserror::Error, Debug)]\n\npub(crate) enum Error {\n\n #[error(\"Invalid argument: {0}\")]\n\n Arguments(String),\n\n\n\n #[error(\"Error sending the binding request: {0}\")]\n\n BindingRequest(String),\n\n\n", "file_path": "src/error.rs", "rank": 39, "score": 4.640259749175535 }, { "content": "//! Inject dotenv and env variables into the Config struct\n\n//!\n\n//! The envy crate injects environment variables into a struct.\n\n//!\n\n//! dotenv allows environment variables to be augmented/overwriten by a\n\n//! .env file.\n\n//!\n\n//! This file throws the Config struct into a CONFIG lazy_static to avoid\n\n//! multiple processing.\n\n\n\nuse crate::error::Result;\n\nuse dotenv::dotenv;\n\nuse lazy_static::lazy_static;\n\nuse serde_derive::Deserialize;\n\n\n\n#[derive(Deserialize, Debug)]\n\npub(crate) struct Config {\n\n pub(crate) client: String,\n\n pub(crate) server: String,\n\n}\n", "file_path": "src/config.rs", "rank": 40, "score": 3.653953351466349 }, { "content": "\n\n// Throw the Config struct into a CONFIG lazy_static to avoid multiple processing\n\nlazy_static! {\n\n pub(crate) static ref CONFIG: Config = config().unwrap_or_else(|error| panic!(\"{}\", error));\n\n}\n\n\n\n/// Use envy to inject dotenv and env vars into the Config struct\n\npub(crate) fn config() -> Result<Config> {\n\n dotenv().ok();\n\n Ok(envy::from_env::<Config>()?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_gets_the_config() {\n\n assert_ne!(config().unwrap().server, \"\".to_string());\n\n assert_ne!(*CONFIG.server, \"\".to_string());\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 41, "score": 3.0116164130077543 }, { "content": " Address {\n\n address: address.to_vec(),\n\n port,\n\n ip_kind: IPKind::IPv6,\n\n }\n\n }\n\n\n\n pub(crate) fn parse_address(socket_addr: SocketAddr) -> Address {\n\n match socket_addr {\n\n SocketAddr::V4(address) => Address::ipv4(address.ip().octets(), address.port()),\n\n SocketAddr::V6(address) => Address::ipv6(address.ip().octets(), address.port()),\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for Address {\n\n type Error = Error;\n\n\n\n fn try_from(value: String) -> Result<Address> {\n\n let address: SocketAddr = value.parse()?;\n", "file_path": "src/utils.rs", "rank": 42, "score": 2.4624668601927797 }, { "content": "# STUN Client and Server\n\nSession Traversal Utilities for NAT (STUN) is a protocol to discover a client's public ip address and determine any restrictions in the client's router/firewall that would prevent a direct peer connection. This implementation only focuses on the Binding Request and Binding Response portion.\n\n\n\nThis STUN server receives _Binding Request_ messages, validates them, and replies with a _Binding Response_ message. This STUN client sends _Binding Request_ messages and cosumes/decodes a _Binding Response_ from the server.\n\n\n\nThis is primarily a teaching tool for Rust systems programming (UDP, header encoding/decoding, ...etc.) in the WebRTC domain.\n\n\n\n## Configuration\n\nCopy the .env.example file to .env\n\n\n\n```shell\n\ncp .env.example .env\n\n```\n\n\n\nNow update the values in .env as needed.\n\n\n\n## Running the Server\n\nFirst, run the server, which will listen for incoming UDP packets and accept Binding Request messages:\n\n\n\n```shell\n\nRUST_LOG=info cargo run -- --type server\n\n```\n\n\n\n## Running the Client\n\nNext, run the client, which will instantly send a Binding Request message (UDP packet):\n\n\n\n```shell\n\nRUST_LOG=info cargo run -- --type client\n", "file_path": "README.md", "rank": 43, "score": 1.7957736665010382 }, { "content": " #[error(\"Error sending the binding response: {0}\")]\n\n BindingResponse(String),\n\n\n\n #[error(\"{0}. Make sure you copied .env.example to .env\")]\n\n Config(String),\n\n\n\n #[error(\"Error decoding: {0}.\")]\n\n Decode(String),\n\n\n\n #[error(\"Parse error: {0}.\")]\n\n Parse(String),\n\n\n\n #[error(\"Error receiving bytes: {0}.\")]\n\n Receive(String),\n\n\n\n #[error(\"Error starting the server: {0}.\")]\n\n Startup(String),\n\n}\n\n\n\n// Log out errors\n", "file_path": "src/error.rs", "rank": 44, "score": 1.382092798254279 } ]
Rust
src/graphics/camera.rs
yggie/mithril-examples
5dd264bfbe38a80bc52ba5923d2091bcfc3b7d4b
extern crate mithril; use std::f64; use std::num::Float; use self::mithril::math::{ Vector, Quaternion }; pub struct Camera { position: Vector, focus_point: Vector, up: Vector, field_of_view: f64, aspect_ratio: f64, far: f64, near: f64, anchor_point: Option<[f64; 2]>, control_point: [f64; 2], } impl Camera { pub fn new(position: Vector, focus_point: Vector, up: Vector) -> Camera { Camera{ position: position, focus_point: focus_point, up: up.normalize(), field_of_view: (90.0 * f64::consts::PI / 180.0), aspect_ratio: 640.0/480.0, far: 100.0, near: 1.0, anchor_point: None, control_point: [0.0; 2], } } pub fn position(&self) -> Vector { self.position } pub fn focus_point(&self) -> Vector { self.focus_point } pub fn go_to(&mut self, position: Vector) { self.position = position; } pub fn update(&mut self) { } pub fn start_control(&mut self, x: f64, y: f64) { self.anchor_point = Some([x, y]); self.control_point[0] = x; self.control_point[1] = y; } pub fn set_control_point(&mut self, x: f64, y: f64) { self.control_point[0] = x; self.control_point[1] = y; } pub fn release_controls(&mut self) { self.anchor_point = None; } pub fn is_controlled(&self) -> bool { self.anchor_point != None } pub fn view_matrix(&self) -> [f32; 16] { let mut z_view = (self.position - self.focus_point).normalize(); let mut x_view = self.up.cross(z_view).normalize(); let mut y_view = z_view.cross(x_view).normalize(); let x_trans = -self.position.dot(x_view); let y_trans = -self.position.dot(y_view); let z_trans = -self.position.dot(z_view); match self.anchor_point { Some(anchor_point) => { let diff = [ (self.control_point[1] - anchor_point[1]) as f32, (anchor_point[0] - self.control_point[0]) as f32, ]; let diff_sq = (diff[0] * diff[0] + diff[1] * diff[1]).sqrt(); if diff_sq > 0.0001 { let diff_length = diff_sq.sqrt(); let rot_axis = (x_view * diff[0] + y_view * diff[1]) / diff_length; let rot_in_radians = diff_length * 2.0; let u_quat = Quaternion::new(0.0, x_view[0], x_view[1], x_view[2]); let v_quat = Quaternion::new(0.0, y_view[0], y_view[1], y_view[2]); let w_quat = Quaternion::new(0.0, z_view[0], z_view[1], z_view[2]); let rot_quat = Quaternion::new_from_rotation(rot_in_radians, rot_axis[0], rot_axis[1], rot_axis[2]); let new_u_quat = rot_quat * u_quat * rot_quat.inverse(); let new_v_quat = rot_quat * v_quat * rot_quat.inverse(); let new_w_quat = rot_quat * w_quat * rot_quat.inverse(); x_view[0] = new_u_quat[1]; x_view[1] = new_u_quat[2]; x_view[2] = new_u_quat[3]; y_view[0] = new_v_quat[1]; y_view[1] = new_v_quat[2]; y_view[2] = new_v_quat[3]; z_view[0] = new_w_quat[1]; z_view[1] = new_w_quat[2]; z_view[2] = new_w_quat[3]; } } None => { } } [ x_view[0], x_view[1], x_view[2], x_trans, y_view[0], y_view[1], y_view[2], y_trans, z_view[0], z_view[1], z_view[2], z_trans, 0.0, 0.0, 0.0, 1.0, ] } pub fn projection_matrix(&self) -> [f32; 16] { let m_11 = (1.0 / (self.field_of_view / 2.0).tan()) as f32; let m_22 = m_11 * (self.aspect_ratio as f32); let m_33 = -((self.far + self.near) / (self.far - self.near)) as f32; let m_43 = -((2.0 * self.far * self.near) / (self.far - self.near)) as f32; [ m_11, 0.0, 0.0, 0.0, 0.0, m_22, 0.0, 0.0, 0.0, 0.0, m_33, m_43, 0.0, 0.0, -1.0, 0.0, ] } }
extern crate mithril; use std::f64; use std::num::Float; use self::mithril::math::{ Vector, Quaternion }; pub struct Camera { position: Vector, focus_point: Vector, up: Vector, field_of_view: f64, aspect_ratio: f64, far: f64, near: f64, anchor_point: Option<[f64; 2]>, control_point: [f64; 2], } impl Camera { pub fn new(position: Vector, focus_point: Vector, up: Vector) -> Camera { Camera{ position: position, focus_point: focus_point, up: up.normalize(), field_of_view: (90.0 * f64::consts::PI / 180.0), aspect_ratio: 640.0/480.0, far: 100.0, near: 1.0, anchor_point: None, control_point: [0.0; 2], } } pub fn position(&self) -> Vector { self.position } pub fn focus_point(&self) -> Vector { self.focus_point } pub fn go_to(&mut self, position: Vector) { self.position = position; } pub fn update(&mut self) { } pub fn start_control(&mut self, x: f64, y: f64) { self.anchor_point = Some([x, y]); self.control_point[0] = x; self.control_point[1] = y; } pub fn set_control_point(&mut self, x: f64, y: f64) { self.control_point[0] = x; self.control_point[1] = y; } pub fn release_controls(&mut self) { self.anchor_point = None; } pub fn is_controlled(&self) -> bool { self.anchor_point != None } pub fn view_matrix(&self) -> [f32; 16] { let mut z_view = (self.position - self.focus_point).normalize(); let mut x_view = self.up.cross(z_view).normalize(); let mut y_view = z_view.cross(x_view).normalize(); let x_trans = -self.position.dot(x_view); let y_trans = -self.position.dot(y_view); let z_trans = -self.position.dot(z_view); match self.anchor_point { Some(anchor_point) => { let diff = [ (self.control_point[1] - anchor_point[1]) as f32, (anchor_point[0] - self.control_point[0]) as f32, ]; let diff_sq = (diff[0] * diff[0] + diff[1] * diff[1]).sqrt(); if diff_sq > 0.0001 { let diff_length = diff_sq.sqrt(); let rot_axis = (x_view * diff[0] + y_view * diff[1]) / diff_length; let rot_in_radians = diff_length * 2.0; let u_quat = Quaternion::new(0.0, x_view[0], x_view[1], x_view[2]); let v_quat = Quaternion::new(0.0, y_view[0], y_view[1], y_view[2]); let w_quat = Quaternion::new(0.0, z_view[0], z_view[1], z_view[2]); let rot_quat = Quaternion::new_from_rotation(rot_in_radians, rot_a
pub fn projection_matrix(&self) -> [f32; 16] { let m_11 = (1.0 / (self.field_of_view / 2.0).tan()) as f32; let m_22 = m_11 * (self.aspect_ratio as f32); let m_33 = -((self.far + self.near) / (self.far - self.near)) as f32; let m_43 = -((2.0 * self.far * self.near) / (self.far - self.near)) as f32; [ m_11, 0.0, 0.0, 0.0, 0.0, m_22, 0.0, 0.0, 0.0, 0.0, m_33, m_43, 0.0, 0.0, -1.0, 0.0, ] } }
xis[0], rot_axis[1], rot_axis[2]); let new_u_quat = rot_quat * u_quat * rot_quat.inverse(); let new_v_quat = rot_quat * v_quat * rot_quat.inverse(); let new_w_quat = rot_quat * w_quat * rot_quat.inverse(); x_view[0] = new_u_quat[1]; x_view[1] = new_u_quat[2]; x_view[2] = new_u_quat[3]; y_view[0] = new_v_quat[1]; y_view[1] = new_v_quat[2]; y_view[2] = new_v_quat[3]; z_view[0] = new_w_quat[1]; z_view[1] = new_w_quat[2]; z_view[2] = new_w_quat[3]; } } None => { } } [ x_view[0], x_view[1], x_view[2], x_trans, y_view[0], y_view[1], y_view[2], y_trans, z_view[0], z_view[1], z_view[2], z_trans, 0.0, 0.0, 0.0, 1.0, ] }
function_block-function_prefixed
[ { "content": "pub fn import_from_obj(filepath: &str) -> (Vec<GLfloat>, Vec<GLfloat>, Vec<GLuint>) {\n\n let comments_regex = Regex::new(r\"\\A\\s*#(?s:.*)\\z\").ok().unwrap();\n\n let vertex_regex = Regex::new(r\"\\A\\s*v\\s+(\\+?-?\\d+\\.\\d+)\\s+(\\+?-?\\d+\\.\\d+)\\s+(\\+?-?\\d+\\.\\d+)\\s*\\z\").ok().unwrap();\n\n let vertex_normal_regex = Regex::new(r\"\\A\\s*vn\\s+(\\+?-?\\d+\\.\\d+)\\s+(\\+?-?\\d+\\.\\d+)\\s+(\\+?-?\\d+\\.\\d+)\\s*\\z\").ok().unwrap();\n\n let face_regex = Regex::new(r\"\\A\\s*f\\s+(\\d+)/(\\d+)?/(\\d+)\\s+(\\d+)/(\\d+)?/(\\d+)\\s+(\\d+)/(\\d+)?/(\\d+)\\s*\\z\").ok().unwrap();\n\n\n\n let mut file = BufferedReader::new(File::open(&Path::new(filepath)));\n\n let mut vertices: Vec<[GLfloat; 3]> = Vec::new();\n\n let mut normals: Vec<[GLfloat; 3]> = Vec::new();\n\n let mut indices: Vec<GLuint> = Vec::new();\n\n let mut normal_indices: Vec<GLuint> = Vec::new();\n\n\n\n for (line_num, line) in file.lines().enumerate() {\n\n let contents = line.unwrap();\n\n\n\n if comments_regex.is_match(contents.as_slice()) {\n\n continue;\n\n }\n\n\n\n match vertex_regex.captures(contents.as_slice()) {\n", "file_path": "src/graphics/utils.rs", "rank": 0, "score": 31236.566486270982 }, { "content": "fn main() {\n\n let mut app = Application::new();\n\n\n\n app.run(time::Duration::milliseconds(17));\n\n}\n\n\n\npub struct Application<'a> {\n\n context: glfw::Glfw,\n\n window: glfw::Window,\n\n graphics: GraphicsEngine<'a>,\n\n events_receiver: sync::mpsc::Receiver<(f64, glfw::WindowEvent)>,\n\n timer: io::Timer,\n\n left_mouse_button_down: bool,\n\n}\n\n\n\nimpl<'a> Application<'a> {\n\n fn new() -> Application<'a> {\n\n let context = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n\n\n\n context.window_hint(glfw::WindowHint::ContextVersion(3, 2));\n", "file_path": "src/main.rs", "rank": 1, "score": 26560.059450652392 }, { "content": "#[test]\n\nfn unify_indexes_test() {\n\n let v1: Vec<i32> = vec!(1, 2, 3, 4, 5, 6);\n\n let v2: Vec<i32> = vec!(-1, -2, -3, -4, -5);\n\n let i1 = vec!(\n\n 0, 1, 2,\n\n 3, 4, 5,\n\n 0, 1, 2,\n\n 5, 4, 3,\n\n 3, 4, 5,\n\n );\n\n let i2 = vec!(\n\n 3, 4, 1,\n\n 0, 1, 2,\n\n 4, 1, 3,\n\n 2, 1, 0,\n\n 2, 0, 1,\n\n );\n\n let (v3, v4, i3) = unify_indexes(&i1, &v1, &i2, &v2);\n\n\n\n // length must be preserved\n", "file_path": "src/graphics/utils.rs", "rank": 2, "score": 23499.74660744118 }, { "content": "fn compile_shader(shader_type: GLenum, shader_source: &str) -> GLuint {\n\n let shader_source_c_str = CString::from_slice(shader_source.as_bytes());\n\n\n\n unsafe {\n\n let shader = gl::CreateShader(shader_type);\n\n\n\n gl::ShaderSource(shader, 1, &shader_source_c_str.as_ptr(), ptr::null());\n\n gl::CompileShader(shader);\n\n\n\n let mut status = gl::FALSE as GLint;\n\n gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);\n\n\n\n if status != (gl::TRUE as GLint) {\n\n let mut len = 0;\n\n gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buffer: Vec<u8> = iter::repeat(0u8).take(len as usize - 1).collect();\n\n gl::GetShaderInfoLog(shader, len, ptr::null_mut(), buffer.as_mut_ptr() as *mut GLchar);\n\n panic!(\"{}\", str::from_utf8(buffer.as_slice()).unwrap());\n\n }\n\n\n\n return shader;\n\n }\n\n}\n\n\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 10, "score": 15728.296221982662 }, { "content": "fn link_program(vertex_shader_id: GLuint, fragment_shader_id: GLuint) -> GLuint {\n\n unsafe {\n\n let program_id = gl::CreateProgram();\n\n gl::AttachShader(program_id, vertex_shader_id);\n\n gl::AttachShader(program_id, fragment_shader_id);\n\n gl::LinkProgram(program_id);\n\n\n\n let mut status = gl::FALSE as GLint;\n\n gl::GetProgramiv(program_id, gl::LINK_STATUS, &mut status);\n\n\n\n if status != (gl::TRUE as GLint) {\n\n let mut len = 0;\n\n gl::GetProgramiv(program_id, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buffer: Vec<u8> = iter::repeat(0u8).take(len as usize - 1).collect();\n\n gl::GetProgramInfoLog(program_id, len, ptr::null_mut(), buffer.as_mut_ptr() as *mut GLchar);\n\n panic!(\"{}\", str::from_utf8(buffer.as_slice()).unwrap());\n\n }\n\n\n\n return program_id;\n\n }\n\n}\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 11, "score": 14628.750488432337 }, { "content": "mithril-examples\n\n===============\n\n\n\nA testbed for the Mithril Physics Engine\n", "file_path": "README.md", "rank": 12, "score": 12842.616864240108 }, { "content": "fn unify_indexes<T: Clone>(indices_0: &Vec<u32>, values_0: &Vec<T>, indices_1: &Vec<u32>, values_1: &Vec<T>) -> (Vec<T>, Vec<T>, Vec<u32>) {\n\n let indices: Vec<(u32, u32)> = indices_0.iter().zip(indices_1.iter()).map(|(a, b)| (*a, *b)).collect();\n\n\n\n let mut available_index = 0u32;\n\n let mut consumed: Vec<(u32, u32)> = Vec::new();\n\n let mut new_indices: Vec<u32> = Vec::new();\n\n let mut new_values_0: Vec<T> = Vec::new();\n\n let mut new_values_1: Vec<T> = Vec::new();\n\n\n\n for (index, &index_pair) in indices.iter().enumerate() {\n\n let pos = indices.iter().position(|a| index_pair == *a).unwrap();\n\n\n\n if pos == index {\n\n // first occurrence\n\n consumed.push(index_pair);\n\n new_indices.push(available_index);\n\n new_values_0.push(values_0[index_pair.0 as usize].clone());\n\n new_values_1.push(values_1[index_pair.1 as usize].clone());\n\n available_index = available_index + 1;\n\n } else {\n\n // repeated occurrence\n\n new_indices.push(consumed.iter().position(|a| index_pair == *a).unwrap() as u32);\n\n }\n\n }\n\n\n\n return (new_values_0, new_values_1, new_indices);\n\n}\n\n\n", "file_path": "src/graphics/utils.rs", "rank": 13, "score": 10537.678037826841 }, { "content": "extern crate gl;\n\nextern crate glfw;\n\nextern crate mithril;\n\n\n\nuse std::mem;\n\nuse std::ptr;\n\nuse std::str;\n\nuse std::iter;\n\nuse std::rc::Rc;\n\nuse gl::types::*;\n\nuse self::mithril::math::Vector;\n\nuse std::ffi::CString;\n\nuse graphics;\n\n\n\nmacro_rules! verify(\n\n ($e: expr) => {\n\n {\n\n let result = $e;\n\n assert_eq!(gl::GetError(), 0);\n\n result\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 14, "score": 10.375361541248942 }, { "content": "use graphics;\n\nuse std::rc::Rc;\n\n\n\npub struct Object<'a> {\n\n asset: Rc<graphics::Asset<'a>>,\n\n translation: [f32; 3],\n\n scale: f32,\n\n}\n\n\n\n\n\nimpl<'a> Object<'a> {\n\n pub fn new(asset: Rc<graphics::Asset<'a>>) -> Object<'a> {\n\n Object{\n\n asset: asset,\n\n translation: [0.0; 3],\n\n scale: 1.0,\n\n }\n\n }\n\n\n\n\n", "file_path": "src/graphics/object.rs", "rank": 15, "score": 7.879350701088426 }, { "content": "extern crate gl;\n\n\n\nuse graphics;\n\nuse std::mem;\n\nuse gl::types::{ GLfloat, GLuint };\n\n\n\npub struct Asset<'a> {\n\n vertex_buffer_id: GLuint,\n\n element_buffer_id: GLuint,\n\n normal_buffer_id: GLuint,\n\n num_vertices: usize,\n\n num_indices: usize,\n\n num_normals: usize,\n\n}\n\n\n\nimpl<'a> Asset<'a> {\n\n pub fn new_from_file(filepath: &str) -> Asset<'a> {\n\n let (vertices, normals, indices) = graphics::utils::import_from_obj(filepath);\n\n let mut vertex_buffer_id: GLuint = 0;\n\n let mut element_buffer_id: GLuint = 0;\n", "file_path": "src/graphics/asset.rs", "rank": 16, "score": 7.7457156907622196 }, { "content": "pub use self::camera::Camera;\n\npub use self::object::Object;\n\npub use self::graphics_engine::{ Asset, Buffer, GraphicsEngine };\n\n\n\nmod camera;\n\nmod object;\n\nmod graphics_engine;\n\npub mod utils;\n", "file_path": "src/graphics/mod.rs", "rank": 17, "score": 7.738516626067727 }, { "content": " self.graphics.draw();\n\n\n\n self.window.swap_buffers();\n\n period.recv().unwrap();\n\n }\n\n }\n\n\n\n fn flush_events_queue(&mut self) {\n\n for (time, event) in glfw::flush_messages(&self.events_receiver) {\n\n match event {\n\n glfw::WindowEvent::Key(Key::Escape, _, Action::Press, _) => {\n\n self.window.set_should_close(true);\n\n }\n\n\n\n glfw::WindowEvent::Scroll(_, y) => {\n\n let mut camera = self.graphics.camera_mut();\n\n let new_pos = (camera.position() - camera.focus_point()) * (1.0 + y as f32) + camera.focus_point();\n\n camera.go_to(new_pos);\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 7.642754800990021 }, { "content": "extern crate gl;\n\nextern crate regex;\n\n\n\nuse self::regex::Regex;\n\nuse gl::types::{ GLfloat, GLuint };\n\nuse std::io::{ BufferedReader, File };\n\n\n", "file_path": "src/graphics/utils.rs", "rank": 19, "score": 7.465328842694571 }, { "content": " length: usize,\n\n}\n\n\n\npub struct Asset<'a> {\n\n vertex_buffer: Buffer,\n\n normal_buffer: Buffer,\n\n element_buffer: Buffer,\n\n}\n\n\n\nimpl<'a> GraphicsEngine<'a> {\n\n pub fn new(window: &glfw::Window) -> GraphicsEngine<'a> {\n\n let mut graphics = GraphicsEngine{\n\n camera: graphics::Camera::new(Vector::new(4.0, 4.0, 4.0), Vector::new(0.0, 0.0, 0.0), Vector::new(0.0, 1.0, 0.0)),\n\n program_id: 0,\n\n vertex_shader_id: 0,\n\n fragment_shader_id: 0,\n\n color_id: -1,\n\n model_matrix_id: -1,\n\n view_matrix_id: -1,\n\n projection_matrix_id: -1,\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 20, "score": 7.445097625702408 }, { "content": " #[inline]\n\n pub fn asset(&self) -> &graphics::Asset {\n\n &*self.asset\n\n }\n\n\n\n\n\n #[inline]\n\n pub fn set_scale(&mut self, scale: f32) {\n\n self.scale = scale;\n\n }\n\n\n\n\n\n #[inline]\n\n pub fn set_translation(&mut self, x: f32, y: f32, z: f32) {\n\n self.translation = [x, y, z];\n\n }\n\n\n\n\n\n #[inline]\n\n pub fn model_matrix(&self) -> [f32; 16] {\n\n [\n\n self.scale, 0.0, 0.0, self.translation[0],\n\n 0.0, self.scale, 0.0, self.translation[1],\n\n 0.0, 0.0, self.scale, self.translation[2],\n\n 0.0, 0.0, 0.0, 1.0,\n\n ]\n\n }\n\n}\n", "file_path": "src/graphics/object.rs", "rank": 21, "score": 6.951259604139216 }, { "content": "#![feature(unsafe_destructor)]\n\n#![allow(unstable)]\n\n\n\nextern crate gl;\n\nextern crate glfw;\n\n\n\nuse glfw::{Action, Context, Glfw, Key, Window, WindowEvent};\n\nuse graphics::GraphicsEngine;\n\nuse std::io;\n\nuse std::time;\n\nuse std::sync;\n\n\n\nmod graphics;\n\n\n", "file_path": "src/main.rs", "rank": 22, "score": 6.898890229073296 }, { "content": " }\n\n\n\n\n\n pub fn create_object_from_asset(&mut self, asset: Rc<Asset<'a>>) -> &mut graphics::Object<'a> {\n\n self.objects.push(graphics::Object::new(asset.clone()));\n\n\n\n // compiler HAX\n\n let index = self.objects.len() - 1;\n\n return &mut self.objects[index];\n\n }\n\n\n\n\n\n pub fn camera_mut(&mut self) -> &mut graphics::Camera {\n\n &mut self.camera\n\n }\n\n\n\n\n\n pub fn draw(&self) {\n\n unsafe {\n\n gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 23, "score": 6.67670765370227 }, { "content": " let x_norm = 2.0 * (x - 0.5 * width as f64)/(width as f64);\n\n let y_norm = -2.0 * (y - 0.5 * height as f64)/(height as f64);\n\n\n\n if camera.is_controlled() {\n\n camera.set_control_point(x_norm, y_norm);\n\n } else {\n\n camera.start_control(x_norm, y_norm);\n\n }\n\n }\n\n }\n\n\n\n _ => {\n\n // do nothing\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 24, "score": 5.36984484178555 }, { "content": " Some(capture) => {\n\n let vertex: Vec<GLfloat> = capture.iter().skip(1).map(|s| s.parse::<f32>().unwrap()).take(3).collect();\n\n vertices.push([vertex[0], vertex[1], vertex[2]]);\n\n continue;\n\n }\n\n\n\n None => { /* do nothing */ }\n\n }\n\n\n\n match vertex_normal_regex.captures(contents.as_slice()) {\n\n Some(capture) => {\n\n let normal: Vec<GLfloat> = capture.iter().skip(1).map(|s| s.parse::<f32>().unwrap()).take(3).collect();\n\n normals.push([normal[0], normal[1], normal[2]]);\n\n continue;\n\n }\n\n\n\n None => { /* do nothing */ }\n\n }\n\n\n\n match face_regex.captures(contents.as_slice()) {\n", "file_path": "src/graphics/utils.rs", "rank": 25, "score": 5.149092299004881 }, { "content": " glfw::WindowEvent::MouseButton(glfw::MouseButtonLeft, glfw::Action::Press, _) => {\n\n self.left_mouse_button_down = true;\n\n }\n\n\n\n glfw::WindowEvent::MouseButton(glfw::MouseButtonLeft, glfw::Action::Release, _) => {\n\n self.left_mouse_button_down = false;\n\n if self.graphics.camera_mut().is_controlled() {\n\n self.graphics.camera_mut().release_controls();\n\n }\n\n }\n\n\n\n glfw::WindowEvent::MouseButton(button, action, modifiers) => {\n\n println!(\"Time: {:?}, Button: {:?}, Action: {:?}, Modifiers: [{:?}]\", time, glfw::ShowAliases(button), action, modifiers)\n\n }\n\n\n\n glfw::WindowEvent::CursorPos(x, y) => {\n\n if self.left_mouse_button_down {\n\n let camera = self.graphics.camera_mut();\n\n let (width, height) = self.window.get_size();\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 3.5414484329805265 }, { "content": " }\n\n }\n\n);\n\n\n\npub struct GraphicsEngine<'a> {\n\n camera: graphics::Camera,\n\n program_id: GLuint,\n\n vertex_shader_id: GLuint,\n\n fragment_shader_id: GLuint,\n\n color_id: GLint,\n\n model_matrix_id: GLint,\n\n view_matrix_id: GLint,\n\n projection_matrix_id: GLint,\n\n objects: Vec<graphics::Object<'a>>,\n\n assets: Vec<Rc<Asset<'a>>>,\n\n assets_vertex_array_id: GLuint,\n\n}\n\n\n\npub struct Buffer {\n\n id: GLuint,\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 27, "score": 3.241304760856554 }, { "content": "\n\n#[unsafe_destructor]\n\nimpl<'a> Drop for Asset<'a> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n gl::DeleteBuffers(1, &self.vertex_buffer_id);\n\n gl::DeleteBuffers(1, &self.element_buffer_id);\n\n gl::DeleteBuffers(1, &self.normal_buffer_id);\n\n }\n\n }\n\n}\n", "file_path": "src/graphics/asset.rs", "rank": 28, "score": 3.0616973008155517 }, { "content": " gl::ClearDepth(1.0);\n\n\n\n let model_variable_name = CString::from_slice(\"model_matrix\".as_bytes());\n\n self.model_matrix_id = gl::GetUniformLocation(self.program_id, model_variable_name.as_ptr());\n\n\n\n let color_variable_name = CString::from_slice(\"color\".as_bytes());\n\n self.color_id = gl::GetUniformLocation(self.program_id, color_variable_name.as_ptr());\n\n gl::Uniform4fv(self.color_id, 1, mem::transmute(&[1.0f32, 0.0f32, 0.0f32, 1.0f32][0]));\n\n\n\n let view_matrix_variable_name = CString::from_slice(\"view_matrix\".as_bytes());\n\n self.view_matrix_id = gl::GetUniformLocation(self.program_id, view_matrix_variable_name.as_ptr());\n\n\n\n let projection_matrix_variable_name = CString::from_slice(\"projection_matrix\".as_bytes());\n\n self.projection_matrix_id = gl::GetUniformLocation(self.program_id, projection_matrix_variable_name.as_ptr());\n\n }\n\n }\n\n\n\n\n\n pub fn new_asset_from_file(&mut self, filepath: &str) -> Rc<Asset> {\n\n let (vertices, normals, indices) = graphics::utils::import_from_obj(filepath);\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 29, "score": 2.9120753747334573 }, { "content": " const vec3 vertex_to_light = normalize(vec3(1.0, 1.0, 0.0));\n\n\n\n float diffuse = clamp(pow(dot(normal, vertex_to_light), 3), 0.0, 0.7) + 0.3;\n\n\n\n out_color = vec4(color.xyz * diffuse, 1.0);\n\n }\n\n \");\n\n\n\n self.program_id = link_program(self.vertex_shader_id, self.fragment_shader_id);\n\n\n\n\n\n unsafe {\n\n gl::GenVertexArrays(1, &mut self.assets_vertex_array_id as *mut u32);\n\n\n\n verify!(gl::UseProgram(self.program_id));\n\n\n\n gl::ClearColor(0.1, 0.4, 0.2, 0.9);\n\n gl::LineWidth(1.0);\n\n verify!(gl::Enable(gl::DEPTH_TEST));\n\n gl::DepthFunc(gl::LESS);\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 30, "score": 2.4440689775875635 }, { "content": " assert_eq!(i3.len(), 15);\n\n\n\n // repeated elements\n\n assert_eq!(i3[3], i3[11]);\n\n assert_eq!(i3[4], i3[10]);\n\n assert_eq!(i3[5], i3[9]);\n\n\n\n // unique pairs\n\n assert!(i3[0] != i3[6]);\n\n assert!(i3[1] != i3[7]);\n\n assert!(i3[2] != i3[8]);\n\n\n\n // resulting vectors have duplicates\n\n assert_eq!(v3.len(), 12);\n\n assert_eq!(v3.len(), v4.len());\n\n\n\n // unique values\n\n let mut v3_copy = v3.clone();\n\n v3_copy.sort();\n\n v3_copy.dedup();\n", "file_path": "src/graphics/utils.rs", "rank": 31, "score": 2.117484994523519 }, { "content": " let mut vertex_buffer_id: GLuint = 0;\n\n let mut element_buffer_id: GLuint = 0;\n\n let mut normal_buffer_id: GLuint = 0;\n\n\n\n unsafe {\n\n // send vertex data\n\n gl::GenBuffers(1, &mut vertex_buffer_id as *mut u32);\n\n gl::BindBuffer(gl::ARRAY_BUFFER, vertex_buffer_id);\n\n gl::BufferData(gl::ARRAY_BUFFER,\n\n (vertices.len() * mem::size_of::<GLfloat>()) as i64,\n\n mem::transmute(&vertices.as_slice()[0]),\n\n gl::STATIC_DRAW);\n\n\n\n // send vertex index data\n\n gl::GenBuffers(1, &mut element_buffer_id as *mut u32);\n\n gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, element_buffer_id);\n\n gl::BufferData(gl::ELEMENT_ARRAY_BUFFER,\n\n (indices.len() * mem::size_of::<GLuint>()) as i64,\n\n mem::transmute(&indices.as_slice()[0]),\n\n gl::STATIC_DRAW);\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 32, "score": 1.980706055834517 }, { "content": " let period = self.timer.periodic(duration);\n\n\n\n {\n\n let asset_ref = self.graphics.new_asset_from_file(\"assets/cube.obj\");\n\n self.graphics.create_object_from_asset(asset_ref.clone());\n\n let obj = self.graphics.create_object_from_asset(asset_ref.clone());\n\n obj.set_translation(-3.0, -1.0, -1.0);\n\n }\n\n\n\n {\n\n let asset_ref = self.graphics.new_asset_from_file(\"assets/isosphere.obj\");\n\n let obj = self.graphics.create_object_from_asset(asset_ref.clone());\n\n obj.set_translation(3.0, 2.0, -1.0);\n\n }\n\n\n\n while !self.window.should_close() {\n\n self.context.poll_events();\n\n self.flush_events_queue();\n\n\n\n self.graphics.camera_mut().update();\n", "file_path": "src/main.rs", "rank": 33, "score": 1.9072449428033322 }, { "content": " let mut normal_buffer_id: GLuint = 0;\n\n\n\n unsafe {\n\n // send vertex data\n\n gl::GenBuffers(1, &mut vertex_buffer_id as *mut u32);\n\n gl::BindBuffer(gl::ARRAY_BUFFER, vertex_buffer_id);\n\n gl::BufferData(gl::ARRAY_BUFFER,\n\n (vertices.len() * mem::size_of::<GLfloat>()) as i64,\n\n mem::transmute(&vertices.as_slice()[0]),\n\n gl::STATIC_DRAW);\n\n\n\n // send vertex index data\n\n gl::GenBuffers(1, &mut element_buffer_id as *mut u32);\n\n gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, element_buffer_id);\n\n gl::BufferData(gl::ELEMENT_ARRAY_BUFFER,\n\n (indices.len() * mem::size_of::<GLuint>()) as i64,\n\n mem::transmute(&indices.as_slice()[0]),\n\n gl::STATIC_DRAW);\n\n\n\n // send vertex normal data\n", "file_path": "src/graphics/asset.rs", "rank": 34, "score": 1.7921555439977823 }, { "content": " context.window_hint(glfw::WindowHint::OpenglForwardCompat(true));\n\n context.window_hint(glfw::WindowHint::OpenglProfile(glfw::OpenGlProfileHint::Core));\n\n\n\n let (window, events) = context.create_window(640, 480, \"mithril - testbed\", glfw::WindowMode::Windowed)\n\n .expect(\"Failed to create GLFW window\");\n\n\n\n window.set_all_polling(true);\n\n window.make_current();\n\n\n\n return Application{\n\n context: context,\n\n graphics: GraphicsEngine::new(&window),\n\n window: window,\n\n timer: io::Timer::new().unwrap(),\n\n events_receiver: events,\n\n left_mouse_button_down: false,\n\n };\n\n }\n\n\n\n fn run(&mut self, duration: time::Duration) {\n", "file_path": "src/main.rs", "rank": 35, "score": 1.7919831902705297 }, { "content": "\n\n let view_matrix = self.camera.view_matrix();\n\n gl::UniformMatrix4fv(self.view_matrix_id, 1, gl::TRUE, mem::transmute(&view_matrix[0]));\n\n\n\n let projection_matrix = self.camera.projection_matrix();\n\n gl::UniformMatrix4fv(self.projection_matrix_id, 1, gl::TRUE, mem::transmute(&projection_matrix[0]));\n\n\n\n // draw simple objects\n\n gl::BindVertexArray(self.assets_vertex_array_id);\n\n for object in self.objects.iter() {\n\n self.render_object(object);\n\n }\n\n gl::BindVertexArray(0);\n\n }\n\n }\n\n\n\n\n\n fn render_object(&self, object: &graphics::Object) {\n\n let asset = object.asset();\n\n\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 36, "score": 1.6365675907830521 }, { "content": " gl::DrawElements(gl::TRIANGLES, asset.element_buffer.length as i32, gl::UNSIGNED_INT, ptr::null());\n\n\n\n gl::DisableVertexAttribArray(1);\n\n gl::DisableVertexAttribArray(0);\n\n }\n\n }\n\n}\n\n\n\n#[unsafe_destructor]\n\nimpl<'a> Drop for GraphicsEngine<'a> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n gl::DeleteProgram(self.program_id);\n\n gl::DeleteShader(self.fragment_shader_id);\n\n gl::DeleteShader(self.vertex_shader_id);\n\n gl::DeleteVertexArrays(1, &self.assets_vertex_array_id);\n\n\n\n for asset in self.assets.iter() {\n\n gl::DeleteBuffers(1, &asset.vertex_buffer.id);\n\n gl::DeleteBuffers(1, &asset.element_buffer.id);\n\n gl::DeleteBuffers(1, &asset.normal_buffer.id);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 37, "score": 1.6316451294657366 }, { "content": " assert_eq!(v3_copy.len(), 6);\n\n\n\n // unique values\n\n let mut v4_copy = v4.clone();\n\n v4_copy.sort();\n\n v4_copy.dedup();\n\n assert_eq!(v4_copy.len(), 5);\n\n\n\n // unique indexes\n\n let mut i4 = i3.clone();\n\n i4.sort();\n\n i4.dedup();\n\n assert_eq!(i4.len(), 12);\n\n\n\n // indexes generated should be a sequence\n\n for i in range(0us, 11us) {\n\n assert_eq!(i4[i], i as u32);\n\n }\n\n}\n", "file_path": "src/graphics/utils.rs", "rank": 38, "score": 1.5759017456866296 }, { "content": " gl::GenBuffers(1, &mut normal_buffer_id as *mut u32);\n\n gl::BindBuffer(gl::ARRAY_BUFFER, normal_buffer_id);\n\n gl::BufferData(gl::ARRAY_BUFFER,\n\n (normals.len() * mem::size_of::<GLfloat>()) as i64,\n\n mem::transmute(&normals.as_slice()[0]),\n\n gl::STATIC_DRAW);\n\n\n\n gl::BindBuffer(gl::ARRAY_BUFFER, 0);\n\n }\n\n\n\n return Asset{\n\n vertex_buffer_id: vertex_buffer_id,\n\n element_buffer_id: element_buffer_id,\n\n normal_buffer_id: normal_buffer_id,\n\n num_vertices: vertices.len(),\n\n num_indices: indices.len(),\n\n num_normals: normals.len(),\n\n };\n\n }\n\n}\n", "file_path": "src/graphics/asset.rs", "rank": 39, "score": 1.2696089855437704 }, { "content": " in vec3 vertex_norm;\n\n\n\n out vec3 normal;\n\n\n\n void main(void) {\n\n gl_Position = projection_matrix * view_matrix * model_matrix * vec4(vertex_pos, 1.0);\n\n normal = vec3(view_matrix * model_matrix * vec4(vertex_norm, 0.0));\n\n }\n\n \");\n\n\n\n self.fragment_shader_id = compile_shader(gl::FRAGMENT_SHADER, \"\n\n #version 150\n\n\n\n uniform vec4 color;\n\n\n\n in vec3 normal;\n\n\n\n out vec4 out_color;\n\n\n\n void main(void) {\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 40, "score": 1.2514265849910524 }, { "content": "\n\n // send vertex normal data\n\n gl::GenBuffers(1, &mut normal_buffer_id as *mut u32);\n\n gl::BindBuffer(gl::ARRAY_BUFFER, normal_buffer_id);\n\n gl::BufferData(gl::ARRAY_BUFFER,\n\n (normals.len() * mem::size_of::<GLfloat>()) as i64,\n\n mem::transmute(&normals.as_slice()[0]),\n\n gl::STATIC_DRAW);\n\n\n\n gl::BindBuffer(gl::ARRAY_BUFFER, 0);\n\n }\n\n\n\n let asset_ref = Rc::new(Asset{\n\n vertex_buffer: Buffer{ id: vertex_buffer_id, length: vertices.len() },\n\n normal_buffer: Buffer{ id: normal_buffer_id, length: normals.len() },\n\n element_buffer: Buffer{ id: element_buffer_id, length: indices.len() },\n\n });\n\n self.assets.push(asset_ref.clone());\n\n\n\n return asset_ref;\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 41, "score": 1.1498814880996575 }, { "content": " assets: Vec::new(),\n\n assets_vertex_array_id: 0,\n\n objects: Vec::new(),\n\n };\n\n\n\n gl::load_with(|s| window.get_proc_address(s));\n\n graphics.initialize();\n\n\n\n return graphics;\n\n }\n\n\n\n fn initialize(&mut self) {\n\n self.vertex_shader_id = compile_shader(gl::VERTEX_SHADER, \"\n\n #version 150\n\n\n\n uniform mat4 model_matrix;\n\n uniform mat4 view_matrix;\n\n uniform mat4 projection_matrix;\n\n\n\n in vec3 vertex_pos;\n", "file_path": "src/graphics/graphics_engine.rs", "rank": 42, "score": 1.1013567895320056 }, { "content": " Some(capture) => {\n\n indices.push(capture.at(1).unwrap().parse::<u32>().unwrap() - 1);\n\n indices.push(capture.at(4).unwrap().parse::<u32>().unwrap() - 1);\n\n indices.push(capture.at(7).unwrap().parse::<u32>().unwrap() - 1);\n\n\n\n normal_indices.push(capture.at(3).unwrap().parse::<u32>().unwrap() - 1);\n\n normal_indices.push(capture.at(6).unwrap().parse::<u32>().unwrap() - 1);\n\n normal_indices.push(capture.at(9).unwrap().parse::<u32>().unwrap() - 1);\n\n continue;\n\n }\n\n\n\n None => { /* do nothing */ }\n\n }\n\n\n\n println!(\"[IGNORED] {:>20}:{:<4} {:?}\", filepath, line_num, contents);\n\n }\n\n\n\n let (new_vertices, new_normals, new_indices) = unify_indexes(&indices, &vertices, &normal_indices, &normals);\n\n\n\n let flattened_vertices: Vec<GLfloat> = new_vertices.iter().flat_map(|a| a.iter().map(|a| *a)).collect();\n\n let flattened_normals: Vec<GLfloat> = new_normals.iter().flat_map(|a| a.iter().map(|a| *a)).collect();\n\n\n\n return (flattened_vertices, flattened_normals, new_indices);\n\n}\n\n\n\n\n", "file_path": "src/graphics/utils.rs", "rank": 43, "score": 0.7803711342155033 } ]
Rust
07-rust/stm32l0x1/stm32l0x1_pac/src/adc/isr.rs
aaronhktan/stm32-exploration
dcd7674424cd17b02b85c6b3ce533456d5037d65
#[doc = "Reader of register ISR"] pub type R = crate::R<u32, super::ISR>; #[doc = "Writer for register ISR"] pub type W = crate::W<u32, super::ISR>; #[doc = "Register ISR `reset()`'s with value 0"] impl crate::ResetValue for super::ISR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `ADRDY`"] pub type ADRDY_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ADRDY`"] pub struct ADRDY_W<'a> { w: &'a mut W, } impl<'a> ADRDY_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `EOSMP`"] pub type EOSMP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOSMP`"] pub struct EOSMP_W<'a> { w: &'a mut W, } impl<'a> EOSMP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `EOC`"] pub type EOC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOC`"] pub struct EOC_W<'a> { w: &'a mut W, } impl<'a> EOC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `EOS`"] pub type EOS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOS`"] pub struct EOS_W<'a> { w: &'a mut W, } impl<'a> EOS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `OVR`"] pub type OVR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OVR`"] pub struct OVR_W<'a> { w: &'a mut W, } impl<'a> OVR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `AWD`"] pub type AWD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `AWD`"] pub struct AWD_W<'a> { w: &'a mut W, } impl<'a> AWD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `EOCAL`"] pub type EOCAL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOCAL`"] pub struct EOCAL_W<'a> { w: &'a mut W, } impl<'a> EOCAL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } impl R { #[doc = "Bit 0 - ADC ready"] #[inline(always)] pub fn adrdy(&self) -> ADRDY_R { ADRDY_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - End of sampling flag"] #[inline(always)] pub fn eosmp(&self) -> EOSMP_R { EOSMP_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - End of conversion flag"] #[inline(always)] pub fn eoc(&self) -> EOC_R { EOC_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - End of sequence flag"] #[inline(always)] pub fn eos(&self) -> EOS_R { EOS_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - ADC overrun"] #[inline(always)] pub fn ovr(&self) -> OVR_R { OVR_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 7 - Analog watchdog flag"] #[inline(always)] pub fn awd(&self) -> AWD_R { AWD_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 11 - End Of Calibration flag"] #[inline(always)] pub fn eocal(&self) -> EOCAL_R { EOCAL_R::new(((self.bits >> 11) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - ADC ready"] #[inline(always)] pub fn adrdy(&mut self) -> ADRDY_W { ADRDY_W { w: self } } #[doc = "Bit 1 - End of sampling flag"] #[inline(always)] pub fn eosmp(&mut self) -> EOSMP_W { EOSMP_W { w: self } } #[doc = "Bit 2 - End of conversion flag"] #[inline(always)] pub fn eoc(&mut self) -> EOC_W { EOC_W { w: self } } #[doc = "Bit 3 - End of sequence flag"] #[inline(always)] pub fn eos(&mut self) -> EOS_W { EOS_W { w: self } } #[doc = "Bit 4 - ADC overrun"] #[inline(always)] pub fn ovr(&mut self) -> OVR_W { OVR_W { w: self } } #[doc = "Bit 7 - Analog watchdog flag"] #[inline(always)] pub fn awd(&mut self) -> AWD_W { AWD_W { w: self } } #[doc = "Bit 11 - End Of Calibration flag"] #[inline(always)] pub fn eocal(&mut self) -> EOCAL_W { EOCAL_W { w: self } } }
#[doc = "Reader of register ISR"] pub type R = crate::R<u32, super::ISR>; #[doc = "Writer for register ISR"] pub type W = crate::W<u32, super::ISR>; #[doc = "Register ISR `reset()`'s with value 0"] impl crate::ResetValue for super::ISR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `ADRDY`"] pub type ADRDY_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ADRDY`"] pub struct ADRDY_W<'a> { w: &'a mut W, } impl<'a> ADRDY_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `EOSMP`"] pub type EOSMP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOSMP`"] pub struct EOSMP_W<'a> { w: &'a mut W, } impl<'a> EOSMP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `EOC`"] pub type EOC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOC`"] pub struct EOC_W<'a> { w: &'a mut W, } impl<'a> EOC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `EOS`"] pub type EOS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `EOS`"] pub struct EOS_W<'a> { w: &'a mut W, } impl<'a> EOS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `OVR`"] pub type OVR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `OVR`"] pub struct OVR_W<'a> { w: &'a mut W, } impl<'a> OVR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `AWD`"] pub type AWD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `AWD`"] pub struct AWD_W<'a> { w: &'a mut W, } impl<'a> AWD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `EOCAL`"] pub type EOCAL_R = crate::R<bool, bool>; #[doc = "Write proxy fo
a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } impl R { #[doc = "Bit 0 - ADC ready"] #[inline(always)] pub fn adrdy(&self) -> ADRDY_R { ADRDY_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - End of sampling flag"] #[inline(always)] pub fn eosmp(&self) -> EOSMP_R { EOSMP_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - End of conversion flag"] #[inline(always)] pub fn eoc(&self) -> EOC_R { EOC_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - End of sequence flag"] #[inline(always)] pub fn eos(&self) -> EOS_R { EOS_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - ADC overrun"] #[inline(always)] pub fn ovr(&self) -> OVR_R { OVR_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 7 - Analog watchdog flag"] #[inline(always)] pub fn awd(&self) -> AWD_R { AWD_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 11 - End Of Calibration flag"] #[inline(always)] pub fn eocal(&self) -> EOCAL_R { EOCAL_R::new(((self.bits >> 11) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - ADC ready"] #[inline(always)] pub fn adrdy(&mut self) -> ADRDY_W { ADRDY_W { w: self } } #[doc = "Bit 1 - End of sampling flag"] #[inline(always)] pub fn eosmp(&mut self) -> EOSMP_W { EOSMP_W { w: self } } #[doc = "Bit 2 - End of conversion flag"] #[inline(always)] pub fn eoc(&mut self) -> EOC_W { EOC_W { w: self } } #[doc = "Bit 3 - End of sequence flag"] #[inline(always)] pub fn eos(&mut self) -> EOS_W { EOS_W { w: self } } #[doc = "Bit 4 - ADC overrun"] #[inline(always)] pub fn ovr(&mut self) -> OVR_W { OVR_W { w: self } } #[doc = "Bit 7 - Analog watchdog flag"] #[inline(always)] pub fn awd(&mut self) -> AWD_W { AWD_W { w: self } } #[doc = "Bit 11 - End Of Calibration flag"] #[inline(always)] pub fn eocal(&mut self) -> EOCAL_W { EOCAL_W { w: self } } }
r field `EOCAL`"] pub struct EOCAL_W<'a> { w: &'a mut W, } impl<'a> EOCAL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 0, "score": 192988.70578231278 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 1, "score": 192988.70578231278 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 2, "score": 192988.70578231278 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~2s period; STM32F0 by default uses the 8MHz HSI on boot\n\n // (See section 6.2 of the reference manual)\n\n syst.set_reload(16_000_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin B3 as push-pull output\n\n let p = stm32f0x1::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.ahbenr.write(|w| w.iopben().set_bit());\n\n\n\n // Set moder on third pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32f0x1/rust-blink-f031k6/src/main.rs", "rank": 3, "score": 134178.0685062561 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~2s period; STM32L0 boots to a ~2.1MHz internal oscillator\n\n // (See Section 7.2 of the STM32L0x1 reference manual)\n\n syst.set_reload(4_200_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin B3 as push-pull output\n\n let p = stm32l0x1::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.iopenr.write(|w| w.iopben().set_bit());\n\n\n\n // Set moder on third pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32l0x1/rust-blink-l031k6/src/main.rs", "rank": 4, "score": 134178.0685062561 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~1s period; STM32F4 by default uses the 16MHz HSI on boot\n\n // (See section 6.2.2 in the reference manual)\n\n syst.set_reload(16_000_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin A5 as push-pull output\n\n let p = stm32f446::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.ahb1enr.write(|w| w.gpioaen().set_bit());\n\n\n\n // Set moder on fifth pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32f446/rust-blink-f446re/src/main.rs", "rank": 5, "score": 134178.0685062561 }, { "content": "TickType_t uxTaskResetEventItemValue( void )\n\n{\n\nTickType_t uxReturn;\n\n\n\n\tuxReturn = listGET_LIST_ITEM_VALUE( &( pxCurrentTCB->xEventListItem ) );\n\n\n\n\t/* Reset the event list item to its normal value - so it can be used with\n\n\tqueues and semaphores. */\n\n\tlistSET_LIST_ITEM_VALUE( &( pxCurrentTCB->xEventListItem ), ( ( TickType_t ) configMAX_PRIORITIES - ( TickType_t ) pxCurrentTCB->uxPriority ) ); /*lint !e961 MISRA exception as the casts are only redundant for some ports. */\n\n\n\n\treturn uxReturn;\n", "file_path": "06-freertos/freertos/Source/tasks.c", "rank": 6, "score": 104903.10307163426 }, { "content": "EventBits_t xEventGroupGetBitsFromISR( EventGroupHandle_t xEventGroup )\n\n{\n\nUBaseType_t uxSavedInterruptStatus;\n\nEventGroup_t const * const pxEventBits = xEventGroup;\n\nEventBits_t uxReturn;\n\n\n\n\tuxSavedInterruptStatus = portSET_INTERRUPT_MASK_FROM_ISR();\n\n\t{\n\n\t\tuxReturn = pxEventBits->uxEventBits;\n\n\t}\n\n\tportCLEAR_INTERRUPT_MASK_FROM_ISR( uxSavedInterruptStatus );\n\n\n\n\treturn uxReturn;\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 7, "score": 104854.52086577113 }, { "content": "#define portMAX_8_BIT_VALUE\t\t\t\t\t( ( uint8_t ) 0xff )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 8, "score": 100509.13876308527 }, { "content": " uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\n", "file_path": "03-gpio/include_f446re/core_cm4.h", "rank": 9, "score": 94865.05434783109 }, { "content": " uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\n", "file_path": "05-timer/include_f446re/core_cm4.h", "rank": 10, "score": 94865.05434783109 }, { "content": " uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\n", "file_path": "06-freertos/include_f031k6/core_cm0.h", "rank": 11, "score": 94865.05434783109 }, { "content": " uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\n", "file_path": "05-timer/include_f031k6/core_cm0.h", "rank": 12, "score": 94865.05434783109 }, { "content": " uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\n", "file_path": "04-interrupt/include_f031k6/core_cm0.h", "rank": 13, "score": 94865.05434783109 }, { "content": " uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\n", "file_path": "04-interrupt/include_f446re/core_cm4.h", "rank": 14, "score": 94865.05434783109 }, { "content": " uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\n", "file_path": "06-freertos/include_f446re/core_cm4.h", "rank": 15, "score": 94865.05434783109 }, { "content": " uint32_t ISR:9; /*!< bit: 0.. 8 Exception number */\n", "file_path": "03-gpio/include_f031k6/core_cm0.h", "rank": 16, "score": 94865.05434783109 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/build.rs", "rank": 17, "score": 88441.66588380146 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/build.rs", "rank": 18, "score": 88441.66588380146 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/build.rs", "rank": 19, "score": 88441.66588380146 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32f446/rust-blink-f446re/build.rs", "rank": 20, "score": 86885.72247686045 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32l0x1/rust-blink-l031k6/build.rs", "rank": 21, "score": 86885.72247686045 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32f0x1/rust-blink-f031k6/build.rs", "rank": 22, "score": 86885.72247686045 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 23, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 24, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 25, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 26, "score": 79431.945204443 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 27, "score": 79431.945204443 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 28, "score": 79431.945204443 }, { "content": "#[doc = \"Reader of register ISR\"]\n\npub type R = crate::R<u32, super::ISR>;\n\n#[doc = \"Writer for register ISR\"]\n\npub type W = crate::W<u32, super::ISR>;\n\n#[doc = \"Register ISR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `AWD`\"]\n\npub type AWD_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `AWD`\"]\n\npub struct AWD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> AWD_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/isr.rs", "rank": 30, "score": 60414.04152751815 }, { "content": "#[doc = \"Reader of register ISR\"]\n\npub type R = crate::R<u32, super::ISR>;\n\n#[doc = \"Writer for register ISR\"]\n\npub type W = crate::W<u32, super::ISR>;\n\n#[doc = \"Register ISR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MCEF`\"]\n\npub type MCEF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `MCEF`\"]\n\npub struct MCEF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MCEF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/isr.rs", "rank": 31, "score": 60406.06119099651 }, { "content": "#[doc = \"Reader of register ISR\"]\n\npub type R = crate::R<u32, super::ISR>;\n\n#[doc = \"Writer for register ISR\"]\n\npub type W = crate::W<u32, super::ISR>;\n\n#[doc = \"Register ISR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TAMP2F`\"]\n\npub type TAMP2F_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TAMP2F`\"]\n\npub struct TAMP2F_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TAMP2F_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 32, "score": 60406.06119099651 }, { "content": "#[doc = \"Reader of register ISR\"]\n\npub type R = crate::R<u32, super::ISR>;\n\n#[doc = \"Writer for register ISR\"]\n\npub type W = crate::W<u32, super::ISR>;\n\n#[doc = \"Register ISR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXACKE`\"]\n\npub type TXACKE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXACKE`\"]\n\npub struct TXACKE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXACKE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 33, "score": 60406.06119099651 }, { "content": "#[doc = \"Reader of register ISR\"]\n\npub type R = crate::R<u32, super::ISR>;\n\n#[doc = \"Writer for register ISR\"]\n\npub type W = crate::W<u32, super::ISR>;\n\n#[doc = \"Register ISR `reset()`'s with value 0x07\"]\n\nimpl crate::ResetValue for super::ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x07\n\n }\n\n}\n\n#[doc = \"Reader of field `ALRAWF`\"]\n\npub type ALRAWF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `SHPF`\"]\n\npub type SHPF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SHPF`\"]\n\npub struct SHPF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 34, "score": 60401.531419499704 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `EOSMP`\"]\n\npub type EOSMP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `EOSMP`\"]\n\npub struct EOSMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EOSMP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/isr.rs", "rank": 35, "score": 60390.07626525163 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ADRDY`\"]\n\npub type ADRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ADRDY`\"]\n\npub struct ADRDY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADRDY_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/isr.rs", "rank": 37, "score": 60389.0777180351 }, { "content": "#[doc = \"Reader of register ISR\"]\n\npub type R = crate::R<u32, super::ISR>;\n\n#[doc = \"Writer for register ISR\"]\n\npub type W = crate::W<u32, super::ISR>;\n\n#[doc = \"Register ISR `reset()`'s with value 0x07\"]\n\nimpl crate::ResetValue for super::ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x07\n\n }\n\n}\n\n#[doc = \"Reader of field `ALRAWF`\"]\n\npub type ALRAWF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ALRBWF`\"]\n\npub type ALRBWF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `WUTWF`\"]\n\npub type WUTWF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `SHPF`\"]\n\npub type SHPF_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/isr.rs", "rank": 39, "score": 60388.80058167391 }, { "content": "#[doc = \"Reader of register ISR\"]\n\npub type R = crate::R<u32, super::ISR>;\n\n#[doc = \"Writer for register ISR\"]\n\npub type W = crate::W<u32, super::ISR>;\n\n#[doc = \"Register ISR `reset()`'s with value 0x01\"]\n\nimpl crate::ResetValue for super::ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x01\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDCODE`\"]\n\npub type ADDCODE_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `DIR`\"]\n\npub type DIR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `BUSY`\"]\n\npub type BUSY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ALERT`\"]\n\npub type ALERT_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/isr.rs", "rank": 40, "score": 60388.385162453225 }, { "content": "#[doc = \"Reader of register ISR\"]\n\npub type R = crate::R<u32, super::ISR>;\n\n#[doc = \"Writer for register ISR\"]\n\npub type W = crate::W<u32, super::ISR>;\n\n#[doc = \"Register ISR `reset()`'s with value 0x01\"]\n\nimpl crate::ResetValue for super::ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x01\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDCODE`\"]\n\npub type ADDCODE_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `DIR`\"]\n\npub type DIR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `BUSY`\"]\n\npub type BUSY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ALERT`\"]\n\npub type ALERT_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/isr.rs", "rank": 41, "score": 60388.385162453225 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `OVR`\"]\n\npub type OVR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OVR`\"]\n\npub struct OVR_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/isr.rs", "rank": 44, "score": 60384.27102814163 }, { "content": "#[doc = \"Reader of field `EOS`\"]\n\npub type EOS_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `EOS`\"]\n\npub struct EOS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EOS_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/isr.rs", "rank": 45, "score": 60383.32141436648 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `INITF`\"]\n\npub type INITF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `INIT`\"]\n\npub type INIT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `INIT`\"]\n\npub struct INIT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INIT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/isr.rs", "rank": 47, "score": 60383.08255208792 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXBR`\"]\n\npub type TXBR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXBR`\"]\n\npub struct TXBR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXBR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 48, "score": 60381.86213155713 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `BRE`\"]\n\npub type BRE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `BRE`\"]\n\npub struct BRE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BRE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 49, "score": 60381.86213155713 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `WUTF`\"]\n\npub type WUTF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `WUTF`\"]\n\npub struct WUTF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WUTF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 50, "score": 60381.86213155713 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `EOC`\"]\n\npub type EOC_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `EOC`\"]\n\npub struct EOC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EOC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/isr.rs", "rank": 52, "score": 60381.844841870065 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSOVF`\"]\n\npub type TSOVF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TSOVF`\"]\n\npub struct TSOVF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSOVF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/isr.rs", "rank": 53, "score": 60381.274777561 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ALRAF`\"]\n\npub type ALRAF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ALRAF`\"]\n\npub struct ALRAF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ALRAF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 54, "score": 60381.274777561 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXOVR`\"]\n\npub type RXOVR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXOVR`\"]\n\npub struct RXOVR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXOVR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 55, "score": 60380.92861014216 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ALRBF`\"]\n\npub type ALRBF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ALRBF`\"]\n\npub struct ALRBF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ALRBF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 56, "score": 60380.92861014216 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ARBLST`\"]\n\npub type ARBLST_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ARBLST`\"]\n\npub struct ARBLST_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ARBLST_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 57, "score": 60380.92861014216 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ALRAF`\"]\n\npub type ALRAF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ALRAF`\"]\n\npub struct ALRAF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/isr.rs", "rank": 58, "score": 60377.738495847436 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TAMP1F`\"]\n\npub type TAMP1F_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TAMP1F`\"]\n\npub struct TAMP1F_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/isr.rs", "rank": 59, "score": 60377.738495847436 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSF`\"]\n\npub type TSF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TSF`\"]\n\npub struct TSF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 60, "score": 60377.738495847436 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXACKE`\"]\n\npub type RXACKE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXACKE`\"]\n\npub struct RXACKE_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 61, "score": 60376.52358619431 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TAMP1F`\"]\n\npub type TAMP1F_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TAMP1F`\"]\n\npub struct TAMP1F_W<'a> {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 62, "score": 60376.52358619431 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXERR`\"]\n\npub type TXERR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXERR`\"]\n\npub struct TXERR_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 63, "score": 60376.52358619431 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ALRAF`\"]\n\npub type ALRAF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ALRAF`\"]\n\npub struct ALRAF_W<'a> {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 64, "score": 60376.52358619431 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `EOAF`\"]\n\npub type EOAF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `EOAF`\"]\n\npub struct EOAF_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/isr.rs", "rank": 65, "score": 60376.52358619431 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXEND`\"]\n\npub type RXEND_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXEND`\"]\n\npub struct RXEND_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 66, "score": 60376.52358619431 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TAMP2F`\"]\n\npub type TAMP2F_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TAMP2F`\"]\n\npub struct TAMP2F_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TAMP2F_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 67, "score": 60375.570302011954 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXE`\"]\n\npub type TXE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXE`\"]\n\npub struct TXE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/isr.rs", "rank": 68, "score": 60375.570302011954 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXE`\"]\n\npub type TXE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXE`\"]\n\npub struct TXE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/isr.rs", "rank": 69, "score": 60375.570302011954 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSF`\"]\n\npub type TSF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TSF`\"]\n\npub struct TSF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/isr.rs", "rank": 70, "score": 60375.570302011954 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `INITF`\"]\n\npub type INITF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `INIT`\"]\n\npub type INIT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `INIT`\"]\n\npub struct INIT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INIT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 71, "score": 60375.50101881915 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `INITF`\"]\n\npub type INITF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RSF`\"]\n\npub type RSF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RSF`\"]\n\npub struct RSF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RSF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 72, "score": 60375.50101881915 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 7 - Analog watchdog flag\"]\n\n #[inline(always)]\n\n pub fn awd(&self) -> AWD_R {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/isr.rs", "rank": 73, "score": 60375.36484450159 }, { "content": "#[doc = \"Reader of field `RSF`\"]\n\npub type RSF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RSF`\"]\n\npub struct RSF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RSF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 74, "score": 60375.236441609624 }, { "content": "#[doc = \"Reader of field `INIT`\"]\n\npub type INIT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `INIT`\"]\n\npub struct INIT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INIT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 75, "score": 60375.236441609624 }, { "content": "#[doc = \"Reader of field `LBPE`\"]\n\npub type LBPE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `LBPE`\"]\n\npub struct LBPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LBPE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 76, "score": 60375.236441609624 }, { "content": "#[doc = \"Reader of field `TSOVF`\"]\n\npub type TSOVF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TSOVF`\"]\n\npub struct TSOVF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSOVF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 77, "score": 60375.236441609624 }, { "content": "#[doc = \"Reader of field `RXBR`\"]\n\npub type RXBR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXBR`\"]\n\npub struct RXBR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXBR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 78, "score": 60375.236441609624 }, { "content": "#[doc = \"Reader of field `TXUDR`\"]\n\npub type TXUDR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXUDR`\"]\n\npub struct TXUDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXUDR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 79, "score": 60375.236441609624 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RECALPF`\"]\n\npub type RECALPF_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Alarm A write flag\"]\n\n #[inline(always)]\n\n pub fn alrawf(&self) -> ALRAWF_R {\n\n ALRAWF_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 3 - Shift operation pending\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 80, "score": 60375.063045697694 }, { "content": "impl<'a> SHPF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `INITS`\"]\n\npub type INITS_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 81, "score": 60374.926858525854 }, { "content": "impl<'a> TAMP1F_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TAMP2F`\"]\n\npub type TAMP2F_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/isr.rs", "rank": 82, "score": 60374.926858525854 }, { "content": "impl<'a> ALRAF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ALRBF`\"]\n\npub type ALRBF_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/rtc/isr.rs", "rank": 83, "score": 60374.926858525854 }, { "content": "impl<'a> TSF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSOVF`\"]\n\npub type TSOVF_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/isr.rs", "rank": 84, "score": 60374.926858525854 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSF`\"]\n\npub type TSF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TSF`\"]\n\npub struct TSF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 85, "score": 60373.630941360054 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXEND`\"]\n\npub type TXEND_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXEND`\"]\n\npub struct TXEND_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXEND_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 86, "score": 60373.630941360054 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SBPE`\"]\n\npub type SBPE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SBPE`\"]\n\npub struct SBPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SBPE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/cec/isr.rs", "rank": 87, "score": 60373.630941360054 }, { "content": "#[doc = \"Reader of field `RXNE`\"]\n\npub type RXNE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXIS`\"]\n\npub type TXIS_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXIS`\"]\n\npub struct TXIS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXIS_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/isr.rs", "rank": 88, "score": 60373.20644615335 }, { "content": "#[doc = \"Reader of field `RXNE`\"]\n\npub type RXNE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXIS`\"]\n\npub type TXIS_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXIS`\"]\n\npub struct TXIS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXIS_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/isr.rs", "rank": 89, "score": 60373.20644615335 }, { "content": " EOSMP_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 0 - ADC ready\"]\n\n #[inline(always)]\n\n pub fn adrdy(&self) -> ADRDY_R {\n\n ADRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 7 - Analog watchdog flag\"]\n\n #[inline(always)]\n\n pub fn awd(&mut self) -> AWD_W {\n\n AWD_W { w: self }\n\n }\n\n #[doc = \"Bit 4 - ADC overrun\"]\n\n #[inline(always)]\n\n pub fn ovr(&mut self) -> OVR_W {\n\n OVR_W { w: self }\n\n }\n\n #[doc = \"Bit 3 - End of sequence flag\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/adc/isr.rs", "rank": 90, "score": 60372.41166760668 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `INITS`\"]\n\npub type INITS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `SHPF`\"]\n\npub type SHPF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `WUTWF`\"]\n\npub type WUTWF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ALRBWF`\"]\n\npub type ALRBWF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ALRAWF`\"]\n\npub type ALRAWF_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/rtc/isr.rs", "rank": 91, "score": 60371.81023418033 }, { "content": "#[doc = \"Reader of register SR\"]\n\npub type R = crate::R<u32, super::SR>;\n\n#[doc = \"Writer for register SR\"]\n\npub type W = crate::W<u32, super::SR>;\n\n#[doc = \"Register SR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OVR`\"]\n\npub type OVR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OVR`\"]\n\npub struct OVR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OVR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/adc1/sr.rs", "rank": 92, "score": 96.74033817512611 }, { "content": "#[doc = \"Reader of register CEC_ISR\"]\n\npub type R = crate::R<u32, super::CEC_ISR>;\n\n#[doc = \"Writer for register CEC_ISR\"]\n\npub type W = crate::W<u32, super::CEC_ISR>;\n\n#[doc = \"Register CEC_ISR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CEC_ISR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXACKE`\"]\n\npub type TXACKE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXACKE`\"]\n\npub struct TXACKE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXACKE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/hdmi_cec/cec_isr.rs", "rank": 93, "score": 94.54362903920487 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RMP`\"]\n\npub type RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RMP`\"]\n\npub struct RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/tim11/or.rs", "rank": 94, "score": 90.83063566651923 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RMP`\"]\n\npub type RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RMP`\"]\n\npub struct RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tim14/or.rs", "rank": 95, "score": 90.83063566651926 }, { "content": "#[doc = \"Reader of register CR\"]\n\npub type R = crate::R<u32, super::CR>;\n\n#[doc = \"Writer for register CR\"]\n\npub type W = crate::W<u32, super::CR>;\n\n#[doc = \"Register CR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESET`\"]\n\npub type RESET_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RESET`\"]\n\npub struct RESET_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESET_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crc/cr.rs", "rank": 96, "score": 90.40786609064351 }, { "content": "#[doc = \"Reader of register SDCMR\"]\n\npub type R = crate::R<u32, super::SDCMR>;\n\n#[doc = \"Writer for register SDCMR\"]\n\npub type W = crate::W<u32, super::SDCMR>;\n\n#[doc = \"Register SDCMR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SDCMR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `MODE`\"]\n\npub struct MODE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MODE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/fmc/sdcmr.rs", "rank": 97, "score": 89.48791375812647 }, { "content": "#[doc = \"Reader of register CPAR6\"]\n\npub type R = crate::R<u32, super::CPAR6>;\n\n#[doc = \"Writer for register CPAR6\"]\n\npub type W = crate::W<u32, super::CPAR6>;\n\n#[doc = \"Register CPAR6 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CPAR6 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `PA`\"]\n\npub type PA_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `PA`\"]\n\npub struct PA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/dma1/cpar6.rs", "rank": 98, "score": 89.23820382900091 }, { "content": "#[doc = \"Reader of register BKP4R\"]\n\npub type R = crate::R<u32, super::BKP4R>;\n\n#[doc = \"Writer for register BKP4R\"]\n\npub type W = crate::W<u32, super::BKP4R>;\n\n#[doc = \"Register BKP4R `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::BKP4R {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BKP`\"]\n\npub type BKP_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `BKP`\"]\n\npub struct BKP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BKP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rtc/bkp4r.rs", "rank": 99, "score": 89.23820382900091 } ]
Rust
day_05_puzzle_02/src/grid.rs
simonhaisz/advent_of_code_2021
cc75f0d281a5a67b8a2e552fd240896a48a6a795
use std::collections::HashSet; use crate::line::{self, Line, Point}; pub struct Grid { lines: Vec<Line>, } impl Grid { pub fn new() -> Grid { Grid { lines: vec![], } } pub fn add_line(&mut self, line: Line) { self.lines.push(line); } pub fn overlaps(&self) -> HashSet<Point> { let mut overlaps = HashSet::new(); let lines = self.lines.iter().collect::<Vec<&Line>>(); let mut compare_counter = 0; for outer in 0..(lines.len()-1) { for inner in (outer+1)..lines.len() { let a = lines[outer]; let b = lines[inner]; compare_counter += 1; let points = line::intersections_specialized(a, b); if points.len() > 0 { for p in points.into_iter() { overlaps.insert(p); } } } } let expected_comparisons = lines.len() * (lines.len() - 1) / 2; println!("Compared {} pairs of lines together (expected {}) from a total of {}", compare_counter, expected_comparisons, lines.len()); overlaps } } #[cfg(test)] mod tests { use super::*; #[test] fn test_overlaps_square() { let mut grid = Grid::new(); grid.add_line(Line::new(Point::new(1, 1), Point::new(1, 10))); grid.add_line(Line::new(Point::new(1, 1), Point::new(10, 1))); grid.add_line(Line::new(Point::new(1, 10), Point::new(10, 10))); grid.add_line(Line::new(Point::new(10, 10), Point::new(10, 1))); let overlaps = grid.overlaps(); assert_eq!(4, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(1, 1))); assert_eq!(true, overlaps.contains(&Point::new(1, 10))); assert_eq!(true, overlaps.contains(&Point::new(10, 1))); assert_eq!(true, overlaps.contains(&Point::new(10, 10))); } #[test] fn test_overlaps_thicc_line() { let mut grid = Grid::new(); grid.add_line(Line::new(Point::new(1,1), Point::new(10, 1))); grid.add_line(Line::new(Point::new(9,1), Point::new(2, 1))); grid.add_line(Line::new(Point::new(5,1), Point::new(7, 1))); grid.add_line(Line::new(Point::new(7,1), Point::new(6, 1))); grid.add_line(Line::new(Point::new(6,1), Point::new(6, 1))); let overlaps = grid.overlaps(); assert_eq!(8, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(2, 1))); assert_eq!(true, overlaps.contains(&Point::new(3, 1))); assert_eq!(true, overlaps.contains(&Point::new(4, 1))); assert_eq!(true, overlaps.contains(&Point::new(5, 1))); assert_eq!(true, overlaps.contains(&Point::new(6, 1))); assert_eq!(true, overlaps.contains(&Point::new(7, 1))); assert_eq!(true, overlaps.contains(&Point::new(8, 1))); assert_eq!(true, overlaps.contains(&Point::new(9, 1))); } #[test] fn test_demo() { let mut grid = Grid::new(); let input = r" 0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2 "; let lines = input.split("\n").filter(|l| !l.trim().is_empty()).collect::<Vec<&str>>(); for line in lines.iter() { let l = Line::from(line); grid.add_line(l); } let overlaps = grid.overlaps(); assert_eq!(12, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(7, 1))); assert_eq!(true, overlaps.contains(&Point::new(2, 2))); assert_eq!(true, overlaps.contains(&Point::new(5, 3))); assert_eq!(true, overlaps.contains(&Point::new(7, 3))); assert_eq!(true, overlaps.contains(&Point::new(3, 4))); assert_eq!(true, overlaps.contains(&Point::new(4, 4))); assert_eq!(true, overlaps.contains(&Point::new(6, 4))); assert_eq!(true, overlaps.contains(&Point::new(7, 4))); assert_eq!(true, overlaps.contains(&Point::new(5, 5))); assert_eq!(true, overlaps.contains(&Point::new(0, 9))); assert_eq!(true, overlaps.contains(&Point::new(2, 9))); assert_eq!(true, overlaps.contains(&Point::new(2, 9))); } }
use std::collections::HashSet; use crate::line::{self, Line, Point}; pub struct Grid { lines: Vec<Line>, } impl Grid { pub fn new() -> Grid { Grid { lines: vec![], } } pub fn add_line(&mut self, line: Line) { self.lines.push(line); }
} #[cfg(test)] mod tests { use super::*; #[test] fn test_overlaps_square() { let mut grid = Grid::new(); grid.add_line(Line::new(Point::new(1, 1), Point::new(1, 10))); grid.add_line(Line::new(Point::new(1, 1), Point::new(10, 1))); grid.add_line(Line::new(Point::new(1, 10), Point::new(10, 10))); grid.add_line(Line::new(Point::new(10, 10), Point::new(10, 1))); let overlaps = grid.overlaps(); assert_eq!(4, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(1, 1))); assert_eq!(true, overlaps.contains(&Point::new(1, 10))); assert_eq!(true, overlaps.contains(&Point::new(10, 1))); assert_eq!(true, overlaps.contains(&Point::new(10, 10))); } #[test] fn test_overlaps_thicc_line() { let mut grid = Grid::new(); grid.add_line(Line::new(Point::new(1,1), Point::new(10, 1))); grid.add_line(Line::new(Point::new(9,1), Point::new(2, 1))); grid.add_line(Line::new(Point::new(5,1), Point::new(7, 1))); grid.add_line(Line::new(Point::new(7,1), Point::new(6, 1))); grid.add_line(Line::new(Point::new(6,1), Point::new(6, 1))); let overlaps = grid.overlaps(); assert_eq!(8, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(2, 1))); assert_eq!(true, overlaps.contains(&Point::new(3, 1))); assert_eq!(true, overlaps.contains(&Point::new(4, 1))); assert_eq!(true, overlaps.contains(&Point::new(5, 1))); assert_eq!(true, overlaps.contains(&Point::new(6, 1))); assert_eq!(true, overlaps.contains(&Point::new(7, 1))); assert_eq!(true, overlaps.contains(&Point::new(8, 1))); assert_eq!(true, overlaps.contains(&Point::new(9, 1))); } #[test] fn test_demo() { let mut grid = Grid::new(); let input = r" 0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2 "; let lines = input.split("\n").filter(|l| !l.trim().is_empty()).collect::<Vec<&str>>(); for line in lines.iter() { let l = Line::from(line); grid.add_line(l); } let overlaps = grid.overlaps(); assert_eq!(12, overlaps.len()); assert_eq!(true, overlaps.contains(&Point::new(7, 1))); assert_eq!(true, overlaps.contains(&Point::new(2, 2))); assert_eq!(true, overlaps.contains(&Point::new(5, 3))); assert_eq!(true, overlaps.contains(&Point::new(7, 3))); assert_eq!(true, overlaps.contains(&Point::new(3, 4))); assert_eq!(true, overlaps.contains(&Point::new(4, 4))); assert_eq!(true, overlaps.contains(&Point::new(6, 4))); assert_eq!(true, overlaps.contains(&Point::new(7, 4))); assert_eq!(true, overlaps.contains(&Point::new(5, 5))); assert_eq!(true, overlaps.contains(&Point::new(0, 9))); assert_eq!(true, overlaps.contains(&Point::new(2, 9))); assert_eq!(true, overlaps.contains(&Point::new(2, 9))); } }
pub fn overlaps(&self) -> HashSet<Point> { let mut overlaps = HashSet::new(); let lines = self.lines.iter().collect::<Vec<&Line>>(); let mut compare_counter = 0; for outer in 0..(lines.len()-1) { for inner in (outer+1)..lines.len() { let a = lines[outer]; let b = lines[inner]; compare_counter += 1; let points = line::intersections_specialized(a, b); if points.len() > 0 { for p in points.into_iter() { overlaps.insert(p); } } } } let expected_comparisons = lines.len() * (lines.len() - 1) / 2; println!("Compared {} pairs of lines together (expected {}) from a total of {}", compare_counter, expected_comparisons, lines.len()); overlaps }
function_block-full_function
[ { "content": "pub fn intersections_optimized(a: &Line, b: &Line) -> Vec<Point> {\n\n let mut points = vec!();\n\n\n\n if (!a.horizontal() && !a.vertical()) || (!b.horizontal() && !b.vertical()) {\n\n panic!(\"Expected lines to be either horizontal or vertical in order to determine intersections\\nline a:{:?}\\nline b:{:?}\", a, b);\n\n }\n\n\n\n if a.horizontal() && b.horizontal() {\n\n // if both horizontal then they only intersect if matching y axis\n\n if a.start().y() == b.start().y() {\n\n let a_min_x = a.min_x();\n\n let a_max_x = a.max_x();\n\n\n\n let b_min_x = b.min_x();\n\n let b_max_x = b.max_x();\n\n\n\n let intersection_min = cmp::max(a_min_x, b_min_x);\n\n let intersection_max = cmp::min(a_max_x, b_max_x);\n\n\n\n if intersection_min <= intersection_max {\n", "file_path": "day_05_puzzle_01/src/line.rs", "rank": 0, "score": 312968.74457197613 }, { "content": "pub fn intersections_specialized(a: &Line, b: &Line) -> Vec<Point> {\n\n if !a.valid() || !b.valid() {\n\n panic!(\"Expected lines to be either horizontal, vertical, or diagonal in order to determine intersections\\nline a:{:?}\\nline b:{:?}\", a, b);\n\n }\n\n\n\n let mut points = vec![];\n\n\n\n if parallel(a, b) {\n\n if a.horizontal() && b.horizontal() {\n\n if a.start().y() == b.start().y() {\n\n let a_flat = a.x_dimension();\n\n let b_flat = b.x_dimension();\n\n if let Some(overlap_range) = geometry::overlap_range(a_flat.0, a_flat.1, b_flat.0, b_flat.1) {\n\n for x in overlap_range.0..=overlap_range.1 {\n\n points.push(Point::new(x, a.start().y()));\n\n }\n\n }\n\n }\n\n } else if a.vertical() && b.vertical() {\n\n if a.start().x() == b.start().x() {\n", "file_path": "day_05_puzzle_02/src/line.rs", "rank": 1, "score": 312968.74457197613 }, { "content": "#[allow(dead_code)]\n\nfn intersections_unoptimized(a: &Line, b: &Line) -> Vec<Point> {\n\n let mut points = vec![];\n\n\n\n let a_points = a.points();\n\n let b_points = b.points();\n\n\n\n for a_point in a_points.iter() {\n\n for b_point in b_points.iter() {\n\n if a_point == b_point {\n\n points.push(Point::new(a_point.x(), a_point.y()));\n\n }\n\n }\n\n }\n\n\n\n points\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "day_05_puzzle_01/src/line.rs", "rank": 2, "score": 259286.91750148323 }, { "content": "#[allow(dead_code)]\n\nfn intersections_unoptimized(a: &Line, b: &Line) -> Vec<Point> {\n\n if !a.valid() || !b.valid() {\n\n panic!(\"Expected lines to be either horizontal, vertical, or diagonal in order to determine intersections\\nline a:{:?}\\nline b:{:?}\", a, b);\n\n }\n\n\n\n let mut points = vec![];\n\n\n\n if geometry::overlap(a.min_x(), a.max_x(), b.min_x(), b.max_x()) && geometry::overlap(a.min_y(), a.max_y(), b.min_y(), b.max_y()) {\n\n let a_points = a.points();\n\n let b_points = b.points();\n\n \n\n for a_point in a_points.iter() {\n\n for b_point in b_points.iter() {\n\n if a_point == b_point {\n\n points.push(Point::new(a_point.x(), a_point.y()));\n\n }\n\n }\n\n }\n\n }\n\n\n\n points\n\n}\n\n\n", "file_path": "day_05_puzzle_02/src/line.rs", "rank": 3, "score": 259286.91750148323 }, { "content": "#[allow(dead_code)]\n\nfn intersections_optimized(a: &Line, b: &Line) -> Vec<Point> {\n\n if !a.valid() || !b.valid() {\n\n panic!(\"Expected lines to be either horizontal, vertical, or diagonal in order to determine intersections\\nline a:{:?}\\nline b:{:?}\", a, b);\n\n }\n\n\n\n let mut points = vec!();\n\n\n\n if a.horizontal() && b.horizontal() {\n\n // if both horizontal then they only intersect if matching y axis\n\n if a.start().y() == b.start().y() {\n\n let a_min_x = a.min_x();\n\n let a_max_x = a.max_x();\n\n\n\n let b_min_x = b.min_x();\n\n let b_max_x = b.max_x();\n\n\n\n let intersection_min = cmp::max(a_min_x, b_min_x);\n\n let intersection_max = cmp::min(a_max_x, b_max_x);\n\n\n\n if intersection_min <= intersection_max {\n", "file_path": "day_05_puzzle_02/src/line.rs", "rank": 4, "score": 259286.91750148323 }, { "content": "pub fn first_illegal_character(line: &str) -> Option<char> {\n\n\tlet mut opens = vec![];\n\n\tfor c in line.chars() {\n\n\t\tfor (open, close) in OPEN_CLOSE_PAIRS.iter() {\n\n\t\t\tif c == *open {\n\n\t\t\t\topens.push(c);\n\n\t\t\t\tcontinue;\n\n\t\t\t} else if c == *close {\n\n\t\t\t\tlet last_index = opens.len() - 1;\n\n\t\t\t\tlet last_open = opens[last_index];\n\n\t\t\t\tlet expected_close = OPEN_CLOSE_PAIRS.get(&last_open).unwrap();\n\n\t\t\t\tif expected_close == close {\n\n\t\t\t\t\topens.remove(last_index);\n\n\t\t\t\t} else {\n\n\t\t\t\t\treturn Some(*close);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n", "file_path": "day_10_puzzle_01/src/chunk.rs", "rank": 5, "score": 172166.57908930077 }, { "content": "pub fn find_incomplete_opens(line: &str) -> Option<String> {\n\n\tlet mut opens = vec![];\n\n\t'chars: for c in line.chars() {\n\n\t\tfor (open, close) in OPEN_CLOSE_PAIRS.iter() {\n\n\t\t\tif c == *open {\n\n\t\t\t\topens.push(c);\n\n\t\t\t\tcontinue 'chars;\n\n\t\t\t} else if c == *close {\n\n\t\t\t\tlet last_index = opens.len() - 1;\n\n\t\t\t\tlet last_open = opens[last_index];\n\n\t\t\t\tlet expected_close = OPEN_CLOSE_PAIRS.get(&last_open).unwrap();\n\n\t\t\t\tif expected_close == close {\n\n\t\t\t\t\topens.remove(last_index);\n\n\t\t\t\t} else {\n\n\t\t\t\t\t// illegal character - skip this line\n\n\t\t\t\t\treturn None;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n", "file_path": "day_10_puzzle_02/src/chunk.rs", "rank": 6, "score": 172166.57908930077 }, { "content": "pub fn parse_numbers(input: &str) -> Vec<i32> {\n\n let mut numbers = input\n\n .split(\",\")\n\n .filter(|v| !v.trim().is_empty())\n\n .map(|v| i32::from_str_radix(v.trim(), 10).unwrap())\n\n .collect::<Vec<i32>>();\n\n \n\n numbers.sort();\n\n\n\n numbers\n\n}\n\n\n", "file_path": "day_07_puzzle_01/src/crab_alignment.rs", "rank": 7, "score": 170965.7433280269 }, { "content": "pub fn parse_numbers(input: &str) -> Vec<i32> {\n\n let mut numbers = input\n\n .split(\",\")\n\n .filter(|v| !v.trim().is_empty())\n\n .map(|v| i32::from_str_radix(v.trim(), 10).unwrap())\n\n .collect::<Vec<i32>>();\n\n \n\n numbers.sort();\n\n\n\n numbers\n\n}\n\n\n", "file_path": "day_07_puzzle_02/src/crab_alignment.rs", "rank": 8, "score": 170965.7433280269 }, { "content": "pub fn find_fanciest_hit_arc(target: &Rectangle) -> Option<Vec<Vector>> {\n\n\n\n let (steps, initial_velocity_x, resulting_velocity_x) = find_max_steps_initial_velocity_x(target.horizontal_range());\n\n\n\n let max_steps = if resulting_velocity_x > 0 {\n\n Some(steps)\n\n } else {\n\n None\n\n };\n\n\n\n let initial_velocity_y = find_max_initial_velocity_y(target.vertical_range(), max_steps);\n\n\n\n let launch = Vector::new(Position::origin(), initial_velocity_x, initial_velocity_y);\n\n\n\n hit_arc(&launch, target)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n", "file_path": "day_17_puzzle_01/src/ballistics.rs", "rank": 9, "score": 164452.07043541264 }, { "content": "pub fn hit_arc(launch: &Vector, target: &Rectangle) -> Option<Vec<Vector>> {\n\n let mut ballistic_arc = vec![];\n\n\n\n if target.within(&launch.pos) {\n\n panic!(\"Danger close! Launching within the target zone\")\n\n }\n\n\n\n let mut current = launch.clone();\n\n\n\n loop {\n\n ballistic_arc.push(current.clone());\n\n\n\n if target.within(&current.pos) {\n\n return Some(ballistic_arc);\n\n } else if target.far(&current.pos) {\n\n return None;\n\n } else {\n\n current = current.next_step();\n\n }\n\n }\n\n}\n\n\n", "file_path": "day_17_puzzle_01/src/ballistics.rs", "rank": 10, "score": 160098.55591028172 }, { "content": "pub fn hit_arc(launch: &Vector, target: &Rectangle) -> Option<Vec<Vector>> {\n\n let mut ballistic_arc = vec![];\n\n\n\n if target.within(&launch.pos) {\n\n panic!(\"Danger close! Launching within the target zone\")\n\n }\n\n\n\n let mut current = launch.clone();\n\n\n\n loop {\n\n ballistic_arc.push(current.clone());\n\n\n\n if target.within(&current.pos) {\n\n return Some(ballistic_arc);\n\n } else if target.far(&current.pos) {\n\n return None;\n\n } else {\n\n current = current.next_step();\n\n }\n\n }\n\n}\n\n\n", "file_path": "day_17_puzzle_02/src/ballistics.rs", "rank": 11, "score": 160098.55591028172 }, { "content": "pub fn min_offset_total_target_brute_force(numbers: &Vec<i32>) -> (i32, i64) {\n\n let mut min_offset = i32::MAX;\n\n let mut min_offset_total = i64::MAX;\n\n\n\n for n in 0..numbers[numbers.len()-1] {\n\n let offset_total = offset_total(&numbers, n);\n\n if offset_total < min_offset_total {\n\n min_offset_total = offset_total;\n\n min_offset = n;\n\n } else {\n\n break;\n\n }\n\n }\n\n (min_offset, min_offset_total)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "day_07_puzzle_02/src/crab_alignment.rs", "rank": 12, "score": 158884.72431264026 }, { "content": "pub fn min_offset_total_target_brute_force(numbers: &Vec<i32>) -> (i32, i64) {\n\n let mut min_offset = i32::MAX;\n\n let mut min_offset_total = i64::MAX;\n\n\n\n for n in 0..numbers[numbers.len()-1] {\n\n let offset_total = offset_total(&numbers, n);\n\n if offset_total < min_offset_total {\n\n min_offset_total = offset_total;\n\n min_offset = n;\n\n } else {\n\n break;\n\n }\n\n }\n\n (min_offset, min_offset_total)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "day_07_puzzle_01/src/crab_alignment.rs", "rank": 13, "score": 158884.72431264026 }, { "content": "pub fn polymerization(input_polymer: &str, insertion_rules: &Vec<PairInsertionRule>) -> String {\n\n let mut todo_insertions: Vec<Insertion> = vec![];\n\n let mut done_insertions: Vec<Insertion> = vec![];\n\n\n\n for rule in insertion_rules.iter() {\n\n let mut start = 0;\n\n while let Some(m) = input_polymer[start..].find(rule.pair) {\n\n let index = start + m + 1;\n\n todo_insertions.push(Insertion { index, element: rule.element });\n\n start = index;\n\n }\n\n }\n\n\n\n let mut ouput_polymer = String::from(input_polymer);\n\n\n\n for todo in todo_insertions.into_iter() {\n\n let offset = done_insertions.iter().filter(|&d| d.index <= todo.index).count();\n\n ouput_polymer.insert(todo.index + offset, todo.element);\n\n done_insertions.push(todo);\n\n }\n\n\n\n ouput_polymer\n\n}\n\n\n", "file_path": "day_14_puzzle_01/src/polymer.rs", "rank": 14, "score": 158418.3438832782 }, { "content": "pub fn find_paths<'input>(start: CaveRef<'input>, end: CaveRef<'input>) -> Vec<Vec<CaveRef<'input>>> {\n\n let mut start_to_end_paths: Vec<Vec<CaveRef<'input>>> = vec![];\n\n\n\n let mut cave_paths: Vec<Vec<CaveRef<'input>>> = vec![];\n\n\n\n cave_paths.push(vec![start.clone()]);\n\n\n\n while cave_paths.len() > 0 {\n\n let current_path = cave_paths.pop().unwrap();\n\n for next_cave in current_path.last().unwrap().borrow().connections.iter() {\n\n if next_cave.borrow().size == CaveSize::Small && current_path.iter().any(|c| c.borrow().name == next_cave.borrow().name) {\n\n continue;\n\n }\n\n let mut next_path = current_path.clone();\n\n next_path.push(next_cave.clone());\n\n if next_cave.borrow().name == end.borrow().name {\n\n start_to_end_paths.push(next_path);\n\n } else {\n\n cave_paths.push(next_path);\n\n }\n\n }\n\n }\n\n\n\n start_to_end_paths\n\n}\n\n\n", "file_path": "day_12_puzzle_01/src/cave_network.rs", "rank": 15, "score": 157928.8902212202 }, { "content": "pub fn create_cave_network<'input>(connection_inputs: Vec<&'input str>) -> HashMap<&str, CaveRef<'input>> {\n\n let mut all_caves = HashMap::new();\n\n\n\n for connection_input in connection_inputs.iter() {\n\n let mut split = connection_input.split(\"-\");\n\n let start = split.next().unwrap();\n\n let end = split.next().unwrap();\n\n if let Some(_) = split.next() {\n\n panic!(\"connection inputs should only have two components (start to end) - found extra component(s) in '{}'\", connection_input);\n\n }\n\n\n\n all_caves.entry(start).or_insert(Cave::new(start));\n\n all_caves.entry(end).or_insert(Cave::new(end));\n\n\n\n let start_cave = all_caves[start].clone();\n\n let end_cave = all_caves[end].clone();\n\n\n\n start_cave.borrow_mut().add_connection(end_cave.clone());\n\n end_cave.borrow_mut().add_connection(start_cave.clone());\n\n }\n", "file_path": "day_12_puzzle_02/src/cave_network.rs", "rank": 16, "score": 144204.49308435863 }, { "content": "pub fn create_cave_network<'input>(connection_inputs: Vec<&'input str>) -> HashMap<&str, CaveRef<'input>> {\n\n let mut all_caves = HashMap::new();\n\n\n\n for connection_input in connection_inputs.iter() {\n\n let mut split = connection_input.split(\"-\");\n\n let start = split.next().unwrap();\n\n let end = split.next().unwrap();\n\n if let Some(_) = split.next() {\n\n panic!(\"connection inputs should only have two components (start to end) - found extra component(s) in '{}'\", connection_input);\n\n }\n\n\n\n all_caves.entry(start).or_insert(Cave::new(start));\n\n all_caves.entry(end).or_insert(Cave::new(end));\n\n\n\n let start_cave = all_caves[start].clone();\n\n let end_cave = all_caves[end].clone();\n\n\n\n start_cave.borrow_mut().add_connection(end_cave.clone());\n\n end_cave.borrow_mut().add_connection(start_cave.clone());\n\n }\n", "file_path": "day_12_puzzle_01/src/cave_network.rs", "rank": 17, "score": 144204.49308435863 }, { "content": "pub fn find_paths<'input>(network: &HashMap<&str, CaveRef<'input>>, start: CaveRef<'input>, end: CaveRef<'input>) -> Vec<Vec<CaveRef<'input>>> {\n\n let mut start_to_end_paths: HashMap<String,Vec<CaveRef<'input>>> = HashMap::new();\n\n\n\n for duplicate_allowed in network.values().filter(|c| c.borrow().name != \"start\" && c.borrow().name != \"end\" && c.borrow().size == CaveSize::Small) {\n\n let mut cave_paths: Vec<Vec<CaveRef<'input>>> = vec![];\n\n\n\n cave_paths.push(vec![start.clone()]);\n\n\n\n while cave_paths.len() > 0 {\n\n let current_path = cave_paths.pop().unwrap();\n\n for next_cave in current_path.last().unwrap().borrow().connections.iter() {\n\n let allowed_duplicate_count = if next_cave.borrow().name == duplicate_allowed.borrow().name {\n\n 1\n\n } else {\n\n 0\n\n };\n\n if next_cave.borrow().size == CaveSize::Small && current_path.iter().filter(|c| c.borrow().name == next_cave.borrow().name).count() > allowed_duplicate_count {\n\n continue;\n\n }\n\n let mut next_path = current_path.clone();\n", "file_path": "day_12_puzzle_02/src/cave_network.rs", "rank": 18, "score": 141963.9445859145 }, { "content": "fn parallel(a: &Line, b: &Line) -> bool {\n\n if a.horizontal() && b.horizontal() {\n\n true\n\n } else if a.vertical() && b.vertical() {\n\n true\n\n } else if a.diagonal() && b.diagonal() {\n\n a.slope_x() == b.slope_x() && a.slope_y() == b.slope_y()\n\n } else {\n\n false\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_line() {\n\n let line = Line::from(\"1,5->3,7\");\n\n assert_eq!(Line::new(Point::new(1, 5), Point::new(3, 7)), line);\n", "file_path": "day_05_puzzle_02/src/line.rs", "rank": 19, "score": 139118.41910860472 }, { "content": "pub fn find_safest_path<'input>(cave: &'input Cave, start: &'input Position, end: &'input Position) -> Vec<&'input Position> {\n\n let mut frontier = BinaryHeap::new();\n\n frontier.push(PriorityPos::new(start, 0));\n\n\n\n let mut came_from: HashMap<&Position, &Position> = HashMap::new();\n\n let mut risk_so_far: HashMap<&Position, u32> = HashMap::new();\n\n risk_so_far.insert(start, 0);\n\n\n\n while !frontier.is_empty() {\n\n let current = frontier.pop().unwrap();\n\n\n\n if current.pos == end {\n\n break;\n\n }\n\n\n\n for next in cave.neighbors(current.pos) {\n\n let new_risk = risk_so_far[current.pos] + next.risk();\n\n if !risk_so_far.contains_key(next) || new_risk < risk_so_far[next] {\n\n risk_so_far.insert(next, new_risk);\n\n let priority = new_risk as usize + current.pos.distance(next);\n", "file_path": "day_15_puzzle_01/src/path_finder.rs", "rank": 20, "score": 136338.9818866714 }, { "content": "pub fn find_safest_path<'input>(cave: &'input Cave, start: &'input Position, end: &'input Position) -> Vec<&'input Position> {\n\n let mut frontier = BinaryHeap::new();\n\n frontier.push(PriorityPos::new(start, 0));\n\n\n\n let mut came_from: HashMap<&Position, &Position> = HashMap::new();\n\n let mut risk_so_far: HashMap<&Position, u32> = HashMap::new();\n\n risk_so_far.insert(start, 0);\n\n\n\n while !frontier.is_empty() {\n\n let current = frontier.pop().unwrap();\n\n\n\n if current.pos == end {\n\n break;\n\n }\n\n\n\n for next in cave.neighbors(current.pos) {\n\n let new_risk = risk_so_far[current.pos] + next.risk();\n\n if !risk_so_far.contains_key(next) || new_risk < risk_so_far[next] {\n\n risk_so_far.insert(next, new_risk);\n\n let priority = new_risk as usize + current.pos.distance(next);\n", "file_path": "day_15_puzzle_02/src/path_finder.rs", "rank": 21, "score": 136338.98188667142 }, { "content": "pub fn decode(input: &str) -> u32 {\n\n\tlazy_static! {\n\n\t\tstatic ref PIXEL_FORMAT_REGEX: Regex = Regex::new(r\"^[.#]+$\").unwrap();\n\n\t}\n\n\tif !PIXEL_FORMAT_REGEX.is_match(input) {\n\n\t\tpanic!(\"Invalid input '{}' - expected a series of . and # characters\", input)\n\n\t}\n\n\n\n\tlet binary = input\n\n\t\t.replace(DARK, \"0\")\n\n\t\t.replace(LIGHT, \"1\");\n\n\t\n\n\tu32::from_str_radix(&binary, 2).unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\tpub fn from(input: &str) -> Image {\n", "file_path": "day_20_puzzle_02/src/pixel.rs", "rank": 22, "score": 116459.83136412405 }, { "content": "pub fn decode(input: &str) -> u32 {\n\n\tlazy_static! {\n\n\t\tstatic ref PIXEL_FORMAT_REGEX: Regex = Regex::new(r\"^[.#]+$\").unwrap();\n\n\t}\n\n\tif !PIXEL_FORMAT_REGEX.is_match(input) {\n\n\t\tpanic!(\"Invalid input '{}' - expected a series of . and # characters\", input)\n\n\t}\n\n\n\n\tlet binary = input\n\n\t\t.replace(DARK, \"0\")\n\n\t\t.replace(LIGHT, \"1\");\n\n\t\n\n\tu32::from_str_radix(&binary, 2).unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\tpub fn from(input: &str) -> Image {\n", "file_path": "day_20_puzzle_01/src/pixel.rs", "rank": 23, "score": 116459.83136412405 }, { "content": "pub fn score_polymer(polymer: &str) -> u32 {\n\n let mut element_counts = HashMap::new();\n\n\n\n for element in polymer.chars() {\n\n let count = element_counts.entry(element).or_insert(0);\n\n *count += 1;\n\n }\n\n\n\n let mut max = u32::MIN;\n\n let mut min = u32::MAX;\n\n\n\n for (_, count) in element_counts.into_iter() {\n\n max = cmp::max(max, count);\n\n min = cmp::min(min, count);\n\n }\n\n\n\n max - min\n\n}\n\n\n", "file_path": "day_14_puzzle_01/src/polymer.rs", "rank": 24, "score": 114881.70604080148 }, { "content": "fn parse_input(input: &str) -> Vec<u32> {\n\n input\n\n .split(\",\")\n\n .filter(|v| !v.is_empty())\n\n .map(|v| u32::from_str_radix(v, 10).unwrap())\n\n .collect::<Vec<u32>>()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_input() {\n\n let numbers = parse_input(\"3,4,3,1,2\");\n\n assert_eq!(vec![3, 4, 3, 1, 2], numbers);\n\n }\n\n\n\n #[test]\n\n fn test_school_from() {\n", "file_path": "day_06_puzzle_01/src/school_of_fish.rs", "rank": 25, "score": 113442.03812844201 }, { "content": "pub fn extract_output_digits(input: &str) -> &str {\n\n\tlet mut split = input.split(\"|\");\n\n\t// signals, ignore for now\n\n\tsplit.next();\n\n\tif let Some(output) = split.next() {\n\n\t\tif let Some(v) = split.next() {\n\n\t\t\tpanic!(\"Unexpected third entry after spliting '{}' on | - '{}'\", input, v);\n\n\t\t}\n\n\t\toutput.trim()\n\n\t} else {\n\n\t\tpanic!(\"Failed to parse '{}'\", input);\n\n\t}\n\n}\n\n\n", "file_path": "day_08_puzzle_01/src/digital_display.rs", "rank": 26, "score": 111923.26034261016 }, { "content": "pub fn extract_packet(packet: &str) -> (Packet, u32) {\n\n\tlet id = get_id(&packet);\n\n\tmatch id {\n\n\t\tTYPE_LITERAL_VALUE => {\n\n\t\t\tlet (literal, bits_read) = extract_literal_packet(&packet);\n\n\t\t\t(Packet::Literal(Box::new(literal)), bits_read)\n\n\t\t},\n\n\t\t_ => {\n\n\t\t\tlet (operator, bits_read) = extract_operator_packet(&packet);\n\n\t\t\t(Packet::Operator(Box::new(operator)), bits_read)\n\n\t\t},\n\n\t}\n\n}\n\n\n", "file_path": "day_16_puzzle_01/src/bits.rs", "rank": 27, "score": 111766.87298638953 }, { "content": "pub fn extract_packet(packet: &str) -> (Packet, u32) {\n\n\tlet id = get_id(&packet);\n\n\tmatch id {\n\n\t\tTYPE_SUM_VALUE => {\n\n\t\t\tlet (operator, bits_read) = extract_operator_packet(&packet);\n\n\t\t\t(Packet::Sum(Box::new(operator)), bits_read)\n\n\t\t},\n\n\t\tTYPE_PRODUCT_VALUE => {\n\n\t\t\tlet (operator, bits_read) = extract_operator_packet(&packet);\n\n\t\t\t(Packet::Product(Box::new(operator)), bits_read)\n\n\t\t},\n\n\t\tTYPE_MIN_VALUE => {\n\n\t\t\tlet (operator, bits_read) = extract_operator_packet(&packet);\n\n\t\t\t(Packet::Min(Box::new(operator)), bits_read)\n\n\t\t},\n\n\t\tTYPE_MAX_VALUE => {\n\n\t\t\tlet (operator, bits_read) = extract_operator_packet(&packet);\n\n\t\t\t(Packet::Max(Box::new(operator)), bits_read)\n\n\t\t},\n\n\t\tTYPE_LITERAL_VALUE => {\n", "file_path": "day_16_puzzle_02/src/bits.rs", "rank": 28, "score": 111766.87298638953 }, { "content": "pub fn convert_hex_value_to_binary(hex_value: &str) -> String {\n\n\tlet mut binary_value = String::new();\n\n\n\n\tfor hex_code in hex_value.chars() {\n\n\t\tbinary_value.push_str(convert_hex_char_to_binary(hex_code));\n\n\t}\n\n\n\n\tbinary_value\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn test_convert() {\n\n\t\tassert_eq!(\"110100101111111000101000\", convert_hex_value_to_binary(&\"D2FE28\").as_str());\n\n\n\n\t\tassert_eq!(\"00111000000000000110111101000101001010010001001000000000\", convert_hex_value_to_binary(&\"38006F45291200\").as_str());\n\n\n\n\t\tassert_eq!(\"11101110000000001101010000001100100000100011000001100000\", convert_hex_value_to_binary(&\"EE00D40C823060\").as_str());\n\n\t}\n\n}", "file_path": "day_16_puzzle_02/src/hex.rs", "rank": 29, "score": 110534.844676298 }, { "content": "pub fn count_known_digits(encoded_digits: &str) -> u32 {\n\n\tlet mut count = 0;\n\n\tfor d in encoded_digits.split(\" \") {\n\n\t\tmatch d.trim().len() {\n\n\t\t\t2 | 3 | 4 | 7 => count += 1,\n\n\t\t\t_ => {}\n\n\t\t}\n\n\t}\n\n\tcount\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn test_count_known_digits_empty() {\n\n\t\tassert_eq!(0, count_known_digits(\"\"));\n\n\t}\n\n\n", "file_path": "day_08_puzzle_01/src/digital_display.rs", "rank": 30, "score": 110534.844676298 }, { "content": "pub fn convert_hex_value_to_binary(hex_value: &str) -> String {\n\n\tlet mut binary_value = String::new();\n\n\n\n\tfor hex_code in hex_value.chars() {\n\n\t\tbinary_value.push_str(convert_hex_char_to_binary(hex_code));\n\n\t}\n\n\n\n\tbinary_value\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn test_convert() {\n\n\t\tassert_eq!(\"110100101111111000101000\", convert_hex_value_to_binary(&\"D2FE28\").as_str());\n\n\n\n\t\tassert_eq!(\"00111000000000000110111101000101001010010001001000000000\", convert_hex_value_to_binary(&\"38006F45291200\").as_str());\n\n\n\n\t\tassert_eq!(\"11101110000000001101010000001100100000100011000001100000\", convert_hex_value_to_binary(&\"EE00D40C823060\").as_str());\n\n\t}\n\n}", "file_path": "day_16_puzzle_01/src/hex.rs", "rank": 31, "score": 110534.844676298 }, { "content": "fn get_sub_packets(packet: &str) -> (Vec<Packet>, u32) {\n\n\tlet id = get_id(&packet);\n\n\tif id == TYPE_LITERAL_VALUE {\n\n\t\tpanic!(\"Expected type to be an operator value - found {}\", id)\n\n\t}\n\n\n\n\tlet length_type_id = convert_to_integer(&packet[6..7]);\n\n\tif length_type_id == LENGTH_TYPE_PACKETS_SIZE {\n\n\t\tlet sub_packets_bit_length = convert_to_integer(&packet[7..22]) as u32;\n\n\t\tlet mut sub_packets = vec![];\n\n\t\tlet mut total_bits_read = 0;\n\n\t\tlet mut index = 22;\n\n\t\tloop {\n\n\t\t\tlet (packet, bits_read) = extract_packet(&packet[index..]);\n\n\t\t\tsub_packets.push(packet);\n\n\t\t\tindex += bits_read as usize;\n\n\t\t\ttotal_bits_read += bits_read;\n\n\t\t\tif total_bits_read == sub_packets_bit_length {\n\n\t\t\t\tbreak;\n\n\t\t\t} else if total_bits_read > sub_packets_bit_length {\n", "file_path": "day_16_puzzle_01/src/bits.rs", "rank": 32, "score": 110387.89452255797 }, { "content": "fn get_sub_packets(packet: &str) -> (Vec<Packet>, u32) {\n\n\tlet id = get_id(&packet);\n\n\tif id == TYPE_LITERAL_VALUE {\n\n\t\tpanic!(\"Expected type to be an operator value - found {}\", id)\n\n\t}\n\n\n\n\tlet length_type_id = convert_to_integer(&packet[6..7]);\n\n\tif length_type_id == LENGTH_TYPE_PACKETS_SIZE {\n\n\t\tlet sub_packets_bit_length = convert_to_integer(&packet[7..22]) as u32;\n\n\t\tlet mut sub_packets = vec![];\n\n\t\tlet mut total_bits_read = 0;\n\n\t\tlet mut index = 22;\n\n\t\tloop {\n\n\t\t\tlet (packet, bits_read) = extract_packet(&packet[index..]);\n\n\t\t\tsub_packets.push(packet);\n\n\t\t\tindex += bits_read as usize;\n\n\t\t\ttotal_bits_read += bits_read;\n\n\t\t\tif total_bits_read == sub_packets_bit_length {\n\n\t\t\t\tbreak;\n\n\t\t\t} else if total_bits_read > sub_packets_bit_length {\n", "file_path": "day_16_puzzle_02/src/bits.rs", "rank": 33, "score": 110387.89452255797 }, { "content": "fn parse_seed_numbers(seed_numbers: &str) -> Vec<u32> {\n\n seed_numbers.split(\",\")\n\n .map(|n| u32::from_str_radix(n, 10).unwrap())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_single_value() {\n\n let numbers = parse_seed_numbers(\"13\");\n\n assert_eq!(vec![13], numbers);\n\n }\n\n\n\n #[test]\n\n fn test_parse_multiple_values() {\n\n let numbers = parse_seed_numbers(\"1,17,44,98,27\");\n\n assert_eq!(vec![1,17,44,98,27], numbers);\n", "file_path": "day_04_puzzle_01/src/bingo/bingo_ball.rs", "rank": 34, "score": 109272.6204383058 }, { "content": "fn parse_row_numbers(row_numbers: &str) -> Vec<u32> {\n\n row_numbers.split(\" \")\n\n .filter(|&s| !s.is_empty())\n\n .map(|n| u32::from_str_radix(n, 10).unwrap())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_row() {\n\n let row = parse_row_numbers(\" 2 13 14 8 74\");\n\n assert_eq!(vec![2, 13, 14, 8, 74], row);\n\n }\n\n\n\n #[test]\n\n fn test_build_card() {\n\n let mut card = BingoCard::new();\n", "file_path": "day_04_puzzle_02/src/bingo/bingo_card.rs", "rank": 35, "score": 109272.6204383058 }, { "content": "fn parse_seed_numbers(seed_numbers: &str) -> Vec<u32> {\n\n seed_numbers.split(\",\")\n\n .map(|n| u32::from_str_radix(n, 10).unwrap())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_single_value() {\n\n let numbers = parse_seed_numbers(\"13\");\n\n assert_eq!(vec![13], numbers);\n\n }\n\n\n\n #[test]\n\n fn test_parse_multiple_values() {\n\n let numbers = parse_seed_numbers(\"1,17,44,98,27\");\n\n assert_eq!(vec![1,17,44,98,27], numbers);\n", "file_path": "day_04_puzzle_02/src/bingo/bingo_ball.rs", "rank": 36, "score": 109272.6204383058 }, { "content": "fn parse_row_numbers(row_numbers: &str) -> Vec<u32> {\n\n row_numbers.split(\" \")\n\n .filter(|&s| !s.is_empty())\n\n .map(|n| u32::from_str_radix(n, 10).unwrap())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_row() {\n\n let row = parse_row_numbers(\" 2 13 14 8 74\");\n\n assert_eq!(vec![2, 13, 14, 8, 74], row);\n\n }\n\n\n\n #[test]\n\n fn test_build_card() {\n\n let mut card = BingoCard::new();\n", "file_path": "day_04_puzzle_01/src/bingo/bingo_card.rs", "rank": 37, "score": 109272.6204383058 }, { "content": "pub fn find_all_hit_launches(target: &Rectangle) -> HashSet<Vector> {\n\n\n\n let horizontal_velocities = find_valid_horizontal_velocities(target.horizontal_range());\n\n\n\n let mut exact_steps = HashSet::new();\n\n let mut lowest_min_step = i32::MAX;\n\n for hori_vel in horizontal_velocities.iter() {\n\n if hori_vel.2 > 0 {\n\n exact_steps.insert(hori_vel.0);\n\n } else {\n\n lowest_min_step = cmp::min(lowest_min_step, hori_vel.0);\n\n }\n\n }\n\n\n\n let mut step_vertical_velocities = HashMap::new();\n\n\n\n for exact in exact_steps.iter() {\n\n let vertical_velocities = find_hit_vertical_velocities(target.vertical_range(), *exact);\n\n if vertical_velocities.len() > 0 {\n\n step_vertical_velocities.insert(*exact, vertical_velocities);\n", "file_path": "day_17_puzzle_02/src/ballistics.rs", "rank": 38, "score": 108930.79517343052 }, { "content": "fn find_digit(positions: Vec<u32>) -> Option<&'static Digit> {\n\n\tfor (_, digits) in ALL_DIGITS.iter() {\n\n\t\t'digits: for d in digits.iter() {\n\n\t\t\tif d.len() == positions.len() {\n\n\t\t\t\tfor i in 0..positions.len() {\n\n\t\t\t\t\tif d.positions[i] != positions[i] {\n\n\t\t\t\t\t\tcontinue 'digits;\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\treturn Some(d);\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tNone\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n", "file_path": "day_08_puzzle_02/src/frequency_analysis.rs", "rank": 39, "score": 107505.82246510634 }, { "content": "fn offset_total(numbers: &Vec<i32>, target: i32) -> i64 {\n\n let mut offset_total = 0;\n\n\n\n for n in numbers.iter() {\n\n let offset = (target - n).abs();\n\n offset_total += fuel_cost(offset) as i64;\n\n }\n\n\n\n offset_total\n\n}\n\n\n", "file_path": "day_07_puzzle_02/src/crab_alignment.rs", "rank": 40, "score": 107505.82246510634 }, { "content": "fn offset_total(numbers: &Vec<i32>, target: i32) -> i64 {\n\n let mut offset_total = 0;\n\n\n\n for n in numbers.iter() {\n\n let offset = (target - n).abs();\n\n offset_total += offset as i64;\n\n }\n\n\n\n offset_total\n\n}\n\n\n", "file_path": "day_07_puzzle_01/src/crab_alignment.rs", "rank": 41, "score": 107505.82246510634 }, { "content": "fn write_path<'input>(path: &Vec<CaveRef<'input>>) -> String {\n\n path.iter().map(|c| c.borrow().name).collect::<Vec<&str>>().join(\",\")\n\n}\n\n\n", "file_path": "day_12_puzzle_02/src/cave_network.rs", "rank": 42, "score": 106172.89524545251 }, { "content": "pub fn intersection(a: &CubeRange, b: &CubeRange) -> Option<CubeRange> {\n\n let (first, second) = if a.start() <= b.start() {\n\n (a, b)\n\n } else {\n\n (b, a)\n\n };\n\n\n\n let start = cmp::max(first.start(), second.start());\n\n let end = cmp::min(first.end(), second.end());\n\n\n\n if start <= end {\n\n Some(*start..=*end)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "day_22_puzzle_02/src/range.rs", "rank": 43, "score": 106049.21107858658 }, { "content": "pub fn intersection(a: &CubeRange, b: &CubeRange) -> Option<CubeRange> {\n\n let (first, second) = if a.start() <= b.start() {\n\n (a, b)\n\n } else {\n\n (b, a)\n\n };\n\n\n\n let start = cmp::max(first.start(), second.start());\n\n let end = cmp::min(first.end(), second.end());\n\n\n\n if start <= end {\n\n Some(*start..=*end)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "day_22_puzzle_01/src/range.rs", "rank": 44, "score": 106049.21107858658 }, { "content": "fn compute_character_counts(signals: Vec<&str>) -> HashMap<char, u32> {\n\n\tlet mut character_counts = HashMap::new();\n\n\n\n\tfor signal in signals.iter() {\n\n\t\tfor c in signal.chars() {\n\n\t\t\tlet count = character_counts.entry(c).or_insert(0);\n\n\t\t\t*count += 1;\n\n\t\t}\n\n\t}\n\n\n\n\tcharacter_counts\n\n}\n\n\n\nlazy_static! {\n\n\tstatic ref ZERO: Digit = Digit { value: 0, positions: vec![0, 1, 2, 4, 5, 6] };\n\n\tstatic ref ONE: Digit = Digit { value: 1, positions: vec![2, 5] };\n\n\tstatic ref TWO: Digit = Digit { value: 2, positions: vec![0, 2, 3, 4, 6] };\n\n\tstatic ref THREE: Digit = Digit { value: 3, positions: vec![0, 2, 3, 5, 6] };\n\n\tstatic ref FOUR: Digit = Digit { value: 4, positions: vec![1, 2, 3, 5] };\n\n\tstatic ref FIVE: Digit = Digit { value: 5, positions: vec![0, 1, 3, 5, 6] };\n", "file_path": "day_08_puzzle_02/src/frequency_analysis.rs", "rank": 45, "score": 104892.37415374354 }, { "content": "fn compute_position_counts(digits: &Vec<&Digit>) -> HashMap<u32, u32> {\n\n\tlet mut position_counts = HashMap::new();\n\n\n\n\tfor d in digits.iter() {\n\n\t\tfor p in d.positions.iter() {\n\n\t\t\tlet c = position_counts.entry(*p).or_insert(0);\n\n\t\t\t*c += 1;\n\n\t\t}\n\n\t}\n\n\n\n\tposition_counts\n\n}\n\n\n", "file_path": "day_08_puzzle_02/src/frequency_analysis.rs", "rank": 46, "score": 104892.37415374354 }, { "content": "fn find_hit_vertical_velocities(distance_range: (i32, i32), steps: i32) -> Vec<i32> {\n\n if distance_range.0 <= distance_range.1 {\n\n panic!(\"Distance range should go from closest to furthest and they should not equal - found ({}, {})\", distance_range.0, distance_range.1)\n\n }\n\n\n\n if steps == 0 {\n\n panic!(\"Zero steps means we are already in the target area - danger close!\")\n\n }\n\n\n\n let mut velocities = vec![];\n\n\n\n for velocity in 0.. {\n\n let gravity = 1 - steps;\n\n let fall = velocity * steps + gravity.triangle_number();\n\n if fall >= distance_range.1 && fall <= distance_range.0 {\n\n velocities.push(velocity);\n\n } else if fall > distance_range.0 {\n\n break;\n\n }\n\n }\n", "file_path": "day_17_puzzle_02/src/ballistics.rs", "rank": 47, "score": 102269.10793849188 }, { "content": "fn find_valid_horizontal_velocities(distance_range: (i32, i32)) -> Vec<(i32, i32, i32)> {\n\n if distance_range.0 >= distance_range.1 {\n\n panic!(\"Distance range should go from closest to furthest and they should not equal - found ({}, {})\", distance_range.0, distance_range.1)\n\n }\n\n let max_velocity = distance_range.1;\n\n\n\n let mut hits = vec![];\n\n\n\n 'velocities: for initial_velocity in 1..=max_velocity {\n\n let mut d = 0;\n\n let mut v = initial_velocity;\n\n for step in 1.. {\n\n d += v;\n\n v -= 1;\n\n if d >= distance_range.0 && d <= distance_range.1 {\n\n hits.push((step, initial_velocity, v));\n\n }\n\n if d > distance_range.1 || v == 0 {\n\n continue 'velocities;\n\n }\n\n }\n\n }\n\n\n\n hits\n\n}\n\n\n", "file_path": "day_17_puzzle_02/src/ballistics.rs", "rank": 48, "score": 102269.10793849188 }, { "content": "pub fn overlap(a_min: i32, a_max: i32, b_min: i32, b_max: i32) -> bool {\n\n b_min <= a_max && b_max >= a_min\n\n}\n\n\n", "file_path": "day_05_puzzle_02/src/geometry.rs", "rank": 49, "score": 99779.242750209 }, { "content": "pub fn play_game(die: &mut Die, player_1: &mut Player, player_2: &mut Player) {\n\n\tloop {\n\n\t\tif player_1.take_turn(die) {\n\n\t\t\tbreak;\n\n\t\t}\n\n\t\tif player_2.take_turn(die) {\n\n\t\t\tbreak;\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn example() {\n\n\t\tlet mut die = Die::new();\n\n\n\n\t\tlet mut player_1 = Player::new(4);\n\n\t\tlet mut player_2 = Player::new(8);\n\n\n\n\t\tplay_game(&mut die, &mut player_1, &mut player_2);\n\n\n\n\t\tassert_eq!(1000, player_1.score());\n\n\t\tassert_eq!(745, player_2.score());\n\n\t\tassert_eq!(993, die.roll_count());\n\n\t}\n\n}", "file_path": "day_21_puzzle_01/src/game.rs", "rank": 50, "score": 98548.27583721225 }, { "content": "pub fn overlap_range(a_min: i32, a_max: i32, b_min: i32, b_max: i32) -> Option<(i32, i32)> {\n\n if !overlap(a_min, a_max, b_min, b_max) {\n\n return None\n\n }\n\n\n\n let start = cmp::max(a_min, b_min);\n\n let end = cmp::min(a_max, b_max);\n\n\n\n Some((start, end))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_a_before_b() {\n\n assert_eq!(false, overlap(1, 10, 11, 20));\n\n }\n\n\n", "file_path": "day_05_puzzle_02/src/geometry.rs", "rank": 51, "score": 94139.0863230948 }, { "content": "struct Insertion {\n\n index: usize,\n\n element: char,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_single_step() {\n\n let template = \"NNCB\";\n\n\n\n let rules = vec![\n\n PairInsertionRule { pair: \"NN\", element: 'C' },\n\n PairInsertionRule { pair: \"NC\", element: 'B' },\n\n PairInsertionRule { pair: \"CB\", element: 'H' },\n\n ];\n\n\n\n let polymer = polymerization(&template, &rules);\n", "file_path": "day_14_puzzle_01/src/polymer.rs", "rank": 52, "score": 74527.05829989414 }, { "content": "struct DiagnosticSummary {\n\n\tsummary: Option<Vec<i32>>,\n\n\tcount: i32,\n\n}\n\n\n\nimpl DiagnosticSummary {\n\n\tfn new() -> DiagnosticSummary {\n\n\t\tDiagnosticSummary {\n\n\t\t\tsummary: None,\n\n\t\t\tcount: 0\n\n\t\t}\n\n\t}\n\n\n\n\tfn analyze_diagnostic(&mut self, diagnostic: &str) {\n\n\t\tif let None = self.summary {\n\n\t\t\t// first entry determines the size\n\n\t\t\tself.summary = Some(vec![0; diagnostic.len().try_into().unwrap()]);\n\n\t\t}\n\n\t\tlet summary = self.summary.as_mut().unwrap();\n\n\t\tif diagnostic.len() != summary.len() {\n", "file_path": "day_03_puzzle_02/src/life_support.rs", "rank": 53, "score": 72634.09300103245 }, { "content": "struct SlidingWindowState {\n\n pub depth_sum: i32,\n\n pub depth_count: i32,\n\n}\n\n\n\nimpl SlidingWindowState {\n\n fn new() -> SlidingWindowState {\n\n SlidingWindowState {\n\n depth_sum: 0,\n\n depth_count: 0,\n\n }\n\n }\n\n\n\n fn add(&mut self, depth: i32) -> bool {\n\n if self.depth_count < SLIDING_WINDOW_SIZE {\n\n self.depth_sum += depth;\n\n self.depth_count += 1;\n\n }\n\n self.depth_count == SLIDING_WINDOW_SIZE\n\n }\n", "file_path": "day_01_puzzle_02/src/sonar_scan.rs", "rank": 54, "score": 71752.3357649555 }, { "content": "struct PriorityPos<'input> {\n\n pos: &'input Position,\n\n priority: usize,\n\n}\n\n\n\nimpl<'input> PriorityPos<'input> {\n\n fn new(pos: &'input Position, priority: usize) -> PriorityPos<'input> {\n\n PriorityPos {\n\n pos,\n\n priority,\n\n }\n\n }\n\n}\n\n\n\nimpl<'input> PartialEq for PriorityPos<'input> {\n\n\n\n fn eq(&self, other: &Self) -> bool {\n\n self.priority == other.priority\n\n }\n\n}\n", "file_path": "day_15_puzzle_01/src/path_finder.rs", "rank": 55, "score": 70306.07510256252 }, { "content": "struct PriorityPos<'input> {\n\n pos: &'input Position,\n\n priority: usize,\n\n}\n\n\n\nimpl<'input> PriorityPos<'input> {\n\n fn new(pos: &'input Position, priority: usize) -> PriorityPos<'input> {\n\n PriorityPos {\n\n pos,\n\n priority,\n\n }\n\n }\n\n}\n\n\n\nimpl<'input> PartialEq for PriorityPos<'input> {\n\n\n\n fn eq(&self, other: &Self) -> bool {\n\n self.priority == other.priority\n\n }\n\n}\n", "file_path": "day_15_puzzle_02/src/path_finder.rs", "rank": 56, "score": 70306.07510256252 }, { "content": "pub trait TriangleNumber {\n\n fn triangle_number(&self) -> i32;\n\n}\n\n\n\nimpl TriangleNumber for i32 {\n\n \n\n fn triangle_number(&self) -> i32 {\n\n self * (self + 1) / 2\n\n }\n\n}", "file_path": "day_17_puzzle_01/src/triangle_number.rs", "rank": 57, "score": 68178.621654238 }, { "content": "pub trait TriangleNumber {\n\n fn triangle_number(&self) -> i32;\n\n}\n\n\n\nimpl TriangleNumber for i32 {\n\n \n\n fn triangle_number(&self) -> i32 {\n\n self * (self.abs() + 1) / 2\n\n }\n\n}", "file_path": "day_17_puzzle_02/src/triangle_number.rs", "rank": 58, "score": 68178.621654238 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_08_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut known_digit_totals = 0;\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n let output_digits = digital_display::extract_output_digits(&entry.trim());\n\n\n\n known_digit_totals += digital_display::count_known_digits(output_digits);\n\n }\n\n }\n\n\n\n println!(\"{}\", known_digit_totals);\n\n\n\n Ok(())\n\n}\n", "file_path": "day_08_puzzle_01/src/main.rs", "rank": 59, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let file = File::open(\"./day_10_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut checker = ChunkChecker::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n checker.parse_line(entry.trim());\n\n }\n\n }\n\n\n\n println!(\"{}\", checker.syntax_error_score());\n\n\n\n Ok(())\n\n}", "file_path": "day_10_puzzle_01/src/main.rs", "rank": 60, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let data = fs::read_to_string(\"./day_06_puzzle_01/input.txt\").unwrap();\n\n\n\n let mut school = SchoolOfFish::from(data.trim());\n\n\n\n // for _ in 0..80 {\n\n for _ in 0..256 {\n\n school.next_day();\n\n }\n\n\n\n println!(\"After 80 days there are {} lanternfish\", school.len());\n\n\n\n Ok(())\n\n}\n", "file_path": "day_06_puzzle_01/src/main.rs", "rank": 61, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let dot_regex = Regex::new(r\"(?P<x>\\d+),(?P<y>\\d+)\").unwrap();\n\n let fold_regex = Regex::new(r\"fold along (?P<axis>[xy])=(?P<offset>\\d+)\").unwrap();\n\n\n\n let file = File::open(\"./day_13_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut builder = PaperBuilder::new();\n\n\n\n let mut fold_commands: Vec<String> = vec![];\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n if dot_regex.is_match(entry.trim()) {\n\n if let Some(captures) = dot_regex.captures(entry.trim()) {\n\n builder.add_dot(\n", "file_path": "day_13_puzzle_02/src/main.rs", "rank": 62, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_20_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut enhancement: Option<String> = None;\n\n let mut pixels = vec![];\n\n \n\n for (index, line) in lines.enumerate() {\n\n if line.is_err() {\n\n continue;\n\n }\n\n let line = line.unwrap();\n\n if line.trim().is_empty() {\n\n continue;\n\n }\n\n if index == 0 {\n\n enhancement = Some(line);\n\n } else {\n\n pixels.push(line);\n\n }\n", "file_path": "day_20_puzzle_01/src/main.rs", "rank": 63, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_01_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut scan = SonarScan::new();\n\n for line in lines {\n\n let depth = line.expect(\"Error reading line\").trim().parse().expect(\"Line should be an integer\");\n\n scan.process_depth(depth);\n\n }\n\n\n\n println!(\"{}\", scan.depth_increase_count());\n\n Ok(())\n\n}\n", "file_path": "day_01_puzzle_01/src/main.rs", "rank": 64, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_22_puzzle_01/input.txt\")?;\n\n\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut reactor = Reactor::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n reactor = reactor.run_command(&entry.trim());\n\n }\n\n }\n\n\n\n println!(\"{}\", reactor.cube_count(false));\n\n\n\n Ok(())\n\n}\n", "file_path": "day_22_puzzle_02/src/main.rs", "rank": 65, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let dot_regex = Regex::new(r\"(?P<x>\\d+),(?P<y>\\d+)\").unwrap();\n\n let fold_regex = Regex::new(r\"fold along (?P<axis>[xy])=(?P<offset>\\d+)\").unwrap();\n\n\n\n let file = File::open(\"./day_13_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut builder = PaperBuilder::new();\n\n\n\n let mut fold_commands: Vec<String> = vec![];\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n if dot_regex.is_match(entry.trim()) {\n\n if let Some(captures) = dot_regex.captures(entry.trim()) {\n\n builder.add_dot(\n", "file_path": "day_13_puzzle_01/src/main.rs", "rank": 66, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_02_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut sub = Submarine::new();\n\n for line in lines {\n\n sub.execute_command(&line.expect(\"Expected a line\"));\n\n }\n\n\n\n println!(\"Submarines ends up at horizontal position {} and depth {}\", sub.horizontal_position(), sub.depth());\n\n println!(\"Multiplied together they are {}\", sub.horizontal_position() * sub.depth());\n\n Ok(())\n\n}\n", "file_path": "day_02_puzzle_02/src/main.rs", "rank": 67, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_14_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut template = None;\n\n let mut rules = vec![];\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n if template.is_none() {\n\n template = Some(String::from(entry.trim()));\n\n } else {\n\n rules.push(String::from(entry.trim()));\n\n }\n\n }\n\n }\n\n\n", "file_path": "day_14_puzzle_01/src/main.rs", "rank": 68, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_04_puzzle_01/input.txt\")?;\n\n let reader = BufReader::new(file);\n\n\n\n let mut game = BingoGame::new();\n\n\n\n let mut current_card = None;\n\n\n\n for (i, line) in reader.lines().enumerate() {\n\n if let Ok(line) = line {\n\n if line.trim().len() == 0 {\n\n // skip empty lines\n\n continue;\n\n }\n\n if i == 0 {\n\n let ball = BingoBall::new(line.trim());\n\n game.setup_ball(ball);\n\n } else {\n\n if current_card.is_none() {\n\n current_card = Some(BingoCard::new());\n", "file_path": "day_04_puzzle_02/src/main.rs", "rank": 69, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let file = File::open(\"./day_12_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut connections = vec![];\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n connections.push(entry);\n\n }\n\n }\n\n\n\n let network = cave_network::create_cave_network(connections.iter().map(|s| s.as_str()).collect());\n\n let paths = cave_network::find_paths(network[\"start\"].clone(), network[\"end\"].clone());\n\n\n\n println!(\"{}\", paths.len());\n\n\n\n Ok(())\n\n}", "file_path": "day_12_puzzle_01/src/main.rs", "rank": 70, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let input = fs::read_to_string(\"./day_17_puzzle_01/input.txt\").unwrap();\n\n\n\n let target = Rectangle::from(&input.trim());\n\n\n\n let fanciest_hit = ballistics::find_fanciest_hit_arc(&target);\n\n\n\n if let Some(hit) = fanciest_hit {\n\n let mut max_y_pos = 0;\n\n for p in hit.iter() {\n\n max_y_pos = cmp::max(max_y_pos, p.p_y());\n\n }\n\n\n\n println!(\"{}\", max_y_pos);\n\n\n\n } else {\n\n panic!(\"No hit was found for target area\")\n\n }\n\n\n\n Ok(())\n\n}", "file_path": "day_17_puzzle_01/src/main.rs", "rank": 71, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let file = File::open(\"./day_10_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut checker = ChunkChecker::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n checker.parse_line(entry.trim());\n\n }\n\n }\n\n\n\n println!(\"{}\", checker.middle_incomplete_score());\n\n\n\n Ok(())\n\n}", "file_path": "day_10_puzzle_02/src/main.rs", "rank": 72, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_03_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut power = PowerConsumption::new();\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().len() == 0 {\n\n // skip any rows with no content\n\n continue;\n\n }\n\n power.analyze_entry(&entry.trim());\n\n }\n\n }\n\n\n\n println!(\"The submarine's diagnostics give it a gamma rate of {} and an epsilon rate of {}\", power.gamme_rate(), power.epsilon_rate());\n\n println!(\"Multiplied together that gives {}\", power.gamme_rate() * power.epsilon_rate());\n\n\n\n Ok(())\n\n}\n", "file_path": "day_03_puzzle_01/src/main.rs", "rank": 73, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_01_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut scan = SonarScan::new();\n\n for line in lines {\n\n let depth = line.expect(\"Error reading line\").trim().parse().expect(\"Line should be an integer\");\n\n scan.process_depth(depth);\n\n }\n\n\n\n println!(\"{}\", scan.depth_increase_count());\n\n Ok(())\n\n}\n", "file_path": "day_01_puzzle_02/src/main.rs", "rank": 74, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_04_puzzle_01/input.txt\")?;\n\n let reader = BufReader::new(file);\n\n\n\n let mut game = BingoGame::new();\n\n\n\n let mut current_card = None;\n\n\n\n for (i, line) in reader.lines().enumerate() {\n\n if let Ok(line) = line {\n\n if line.trim().len() == 0 {\n\n // skip empty lines\n\n continue;\n\n }\n\n if i == 0 {\n\n let ball = BingoBall::new(line.trim());\n\n game.setup_ball(ball);\n\n } else {\n\n if current_card.is_none() {\n\n current_card = Some(BingoCard::new());\n", "file_path": "day_04_puzzle_01/src/main.rs", "rank": 75, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_15_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut builder = CaveBuilder::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n builder.add_row(\n\n entry.trim()\n\n .chars()\n\n .map(|c| u32::from_str_radix(&c.to_string(), 10).unwrap())\n\n .collect::<Vec<u32>>()\n\n );\n\n }\n\n }\n\n\n", "file_path": "day_15_puzzle_01/src/main.rs", "rank": 76, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let input = fs::read_to_string(\"./day_17_puzzle_01/input.txt\").unwrap();\n\n\n\n let target = Rectangle::from(&input.trim());\n\n\n\n let all_hit_launches = ballistics::find_all_hit_launches(&target);\n\n println!(\"Found {} unique hit launches\", all_hit_launches.len());\n\n\n\n Ok(())\n\n}", "file_path": "day_17_puzzle_02/src/main.rs", "rank": 77, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_03_puzzle_01/input.txt\")?;\n\n let reader = BufReader::new(file);\n\n\n\n let mut entries: Vec<String> = vec!();\n\n for line in reader.lines() {\n\n if let Ok(line) = line {\n\n if line.trim().len() > 0 {\n\n entries.push(String::from(line.trim()));\n\n }\n\n }\n\n }\n\n let mut life_support = LifeSupport::new();\n\n for entry in entries.iter() {\n\n if entry.trim().len() == 0 {\n\n // skip any rows with no content\n\n continue;\n\n }\n\n life_support.load_diagnostic(&entry.trim());\n\n }\n\n\n\n let oxygen = life_support.oxygen_rating();\n\n let scrubber = life_support.scrubber_rating();\n\n\n\n println!(\"The submarine's diagnostics give it an oxygen rating of {} and a scrubber rating of {}\", oxygen, scrubber);\n\n println!(\"Multiplied together that gives {}\", oxygen * scrubber);\n\n\n\n Ok(())\n\n}\n", "file_path": "day_03_puzzle_02/src/main.rs", "rank": 78, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let hex_data = fs::read_to_string(\"./day_16_puzzle_01/input.txt\").unwrap();\n\n\n\n let binary_data = hex::convert_hex_value_to_binary(hex_data.trim());\n\n\n\n let (packet, _) = bits::extract_packet(&binary_data);\n\n\n\n let version_totals = packet.version_total();\n\n\n\n println!(\"{}\", version_totals);\n\n\n\n Ok(())\n\n}", "file_path": "day_16_puzzle_01/src/main.rs", "rank": 79, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_15_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut builder = CaveBuilder::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n builder.add_row(\n\n entry.trim()\n\n .chars()\n\n .map(|c| u32::from_str_radix(&c.to_string(), 10).unwrap())\n\n .collect::<Vec<u32>>()\n\n );\n\n }\n\n }\n\n\n", "file_path": "day_15_puzzle_02/src/main.rs", "rank": 80, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let file = File::open(\"./day_09_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut scanner = MapScanner::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n let row = entry.trim().chars().map(|c| c.to_digit(10).unwrap()).collect::<Vec<u32>>();\n\n scanner.scan_row(row);\n\n }\n\n }\n\n\n\n scanner.merge_basins();\n\n\n\n println!(\"{}\", scanner.largest_basins_score(3));\n\n\n\n Ok(())\n\n}\n", "file_path": "day_09_puzzle_02/src/main.rs", "rank": 81, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_22_puzzle_01/input.txt\")?;\n\n\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut reactor = Reactor::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n reactor = reactor.run_command(&entry.trim());\n\n }\n\n }\n\n\n\n println!(\"{}\", reactor.cube_count());\n\n\n\n Ok(())\n\n}\n", "file_path": "day_22_puzzle_01/src/main.rs", "rank": 82, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let mut die = Die::new();\n\n let mut player_1 = Player::new(8);\n\n let mut player_2 = Player::new(10);\n\n\n\n game::play_game(&mut die, &mut player_1, &mut player_2);\n\n\n\n let (winner, looser) = if player_1.winner() {\n\n (player_1, player_2)\n\n } else if player_2.winner() {\n\n (player_2, player_1)\n\n } else {\n\n panic!(\"Neither player 1 or player 2 has won the game\")\n\n };\n\n\n\n println!(\"{} > {}\", winner.score(), looser.score());\n\n println!(\"{}\", looser.score() as u32 * die.roll_count());\n\n\n\n Ok(())\n\n}\n", "file_path": "day_21_puzzle_01/src/main.rs", "rank": 83, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_05_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut grid = Grid::new();\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().len() == 0 {\n\n // skip any rows with no content\n\n continue;\n\n }\n\n let l = Line::from(entry.trim());\n\n grid.add_line(l);\n\n }\n\n }\n\n\n\n let overlaps = grid.overlaps();\n\n\n\n println!(\"With all of the lines there are {} points where they overlap\", overlaps.len());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day_05_puzzle_02/src/main.rs", "rank": 84, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let file = File::open(\"./day_09_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut map = HeightMap::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n let row = entry.trim().chars().map(|c| c.to_digit(10).unwrap()).collect::<Vec<u32>>();\n\n map.process_row(Some(row));\n\n }\n\n }\n\n map.process_row(None);\n\n\n\n println!(\"{}\", map.risk_level_total());\n\n\n\n Ok(())\n\n}\n", "file_path": "day_09_puzzle_01/src/main.rs", "rank": 85, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_05_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut grid = Grid::new();\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().len() == 0 {\n\n // skip any rows with no content\n\n continue;\n\n }\n\n let l = Line::from(entry.trim());\n\n if l.horizontal() || l.vertical() {\n\n grid.add_line(l);\n\n }\n\n }\n\n }\n\n\n\n let overlaps = grid.overlaps();\n\n\n\n println!(\"With all of the lines there are {} points where they overlap\", overlaps.len());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day_05_puzzle_01/src/main.rs", "rank": 86, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let file = File::open(\"./day_12_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut connections = vec![];\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n connections.push(entry);\n\n }\n\n }\n\n\n\n let network = cave_network::create_cave_network(connections.iter().map(|s| s.as_str()).collect());\n\n let paths = cave_network::find_paths(&network, network[\"start\"].clone(), network[\"end\"].clone());\n\n\n\n println!(\"{}\", paths.len());\n\n\n\n Ok(())\n\n}", "file_path": "day_12_puzzle_02/src/main.rs", "rank": 87, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_08_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut display_output_total = 0;\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n let mut analysis = FrequencyAnalysis::from(&entry);\n\n analysis.analyze();\n\n display_output_total += analysis.decode_display_output();\n\n }\n\n }\n\n\n\n println!(\"{}\", display_output_total);\n\n\n\n Ok(())\n\n}\n", "file_path": "day_08_puzzle_02/src/main.rs", "rank": 88, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let data = fs::read_to_string(\"./day_07_puzzle_01/input.txt\").unwrap();\n\n\n\n let numbers = crab_alignment::parse_numbers(&data);\n\n let min_offset = crab_alignment::min_offset_total_target_brute_force(&numbers);\n\n\n\n println!(\"The minimum amount of fuel to align all of the crabs (at position {}) is {}\", min_offset.0, min_offset.1);\n\n\n\n Ok(())\n\n}\n", "file_path": "day_07_puzzle_01/src/main.rs", "rank": 89, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let data = fs::read_to_string(\"./day_07_puzzle_01/input.txt\").unwrap();\n\n\n\n let numbers = crab_alignment::parse_numbers(&data);\n\n let min_offset = crab_alignment::min_offset_total_target_brute_force(&numbers);\n\n\n\n println!(\"The minimum amount of fuel to align all of the crabs (at position {}) is {}\", min_offset.0, min_offset.1);\n\n\n\n Ok(())\n\n}\n", "file_path": "day_07_puzzle_02/src/main.rs", "rank": 90, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_02_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut sub = Submarine::new();\n\n for line in lines {\n\n sub.execute_command(&line.expect(\"Expected a line\"));\n\n }\n\n\n\n println!(\"Submarines ends up at horizontal position {} and depth {}\", sub.horizontal_position(), sub.depth());\n\n println!(\"Multiplied together they are {}\", sub.horizontal_position() * sub.depth());\n\n Ok(())}\n", "file_path": "day_02_puzzle_01/src/main.rs", "rank": 91, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let file = File::open(\"./day_11_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut grid = OctopusGrid::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n grid.add_row(entry.trim().chars()\n\n .map(|c| u32::from_str_radix(&c.to_string(), 10).unwrap())\n\n .collect::<Vec<u32>>());\n\n }\n\n }\n\n\n\n if let Some(step) = grid.compute_synchronized_flash_step() {\n\n println!(\"All octopi synchronize their flashes on step {}\", step);\n\n } else {\n\n println!(\"The octopi never synchronize their flashes\");\n\n }\n\n\n\n\n\n Ok(())\n\n}", "file_path": "day_11_puzzle_02/src/main.rs", "rank": 92, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_14_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut template = None;\n\n let mut rules = vec![];\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n if template.is_none() {\n\n template = Some(String::from(entry.trim()));\n\n } else {\n\n rules.push(String::from(entry.trim()));\n\n }\n\n }\n\n }\n\n\n", "file_path": "day_14_puzzle_02/src/main.rs", "rank": 93, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n\n\n let file = File::open(\"./day_11_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut grid = OctopusGrid::new();\n\n\n\n for line in lines {\n\n if let Ok(entry) = line {\n\n if entry.trim().is_empty() {\n\n continue;\n\n }\n\n grid.add_row(entry.trim().chars()\n\n .map(|c| u32::from_str_radix(&c.to_string(), 10).unwrap())\n\n .collect::<Vec<u32>>());\n\n }\n\n }\n\n\n\n println!(\"{}\", grid.compute_flashes(100));\n\n\n\n Ok(())\n\n}", "file_path": "day_11_puzzle_01/src/main.rs", "rank": 94, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let hex_data = fs::read_to_string(\"./day_16_puzzle_01/input.txt\").unwrap();\n\n\n\n let binary_data = hex::convert_hex_value_to_binary(hex_data.trim());\n\n\n\n let (packet, _) = bits::extract_packet(&binary_data);\n\n\n\n let value = packet.value();\n\n\n\n println!(\"{}\", value);\n\n\n\n Ok(())\n\n}", "file_path": "day_16_puzzle_02/src/main.rs", "rank": 95, "score": 54070.92188537092 }, { "content": "fn main() -> std::io::Result<()> {\n\n let file = File::open(\"./day_20_puzzle_01/input.txt\")?;\n\n let lines = BufReader::new(file).lines();\n\n\n\n let mut enhancement: Option<String> = None;\n\n let mut pixels = vec![];\n\n \n\n for (index, line) in lines.enumerate() {\n\n if line.is_err() {\n\n continue;\n\n }\n\n let line = line.unwrap();\n\n if line.trim().is_empty() {\n\n continue;\n\n }\n\n if index == 0 {\n\n enhancement = Some(line);\n\n } else {\n\n pixels.push(line);\n\n }\n", "file_path": "day_20_puzzle_02/src/main.rs", "rank": 96, "score": 54070.92188537092 }, { "content": "fn get_id(packet: &str) -> u64 {\n\n\tif packet.len() < 6 {\n\n\t\tpanic!(\"packet is too small - found size {}\", packet.len())\n\n\t}\n\n\n\n\tconvert_to_integer(&packet[3..6])\n\n}\n\n\n", "file_path": "day_16_puzzle_02/src/bits.rs", "rank": 97, "score": 53256.268865213715 }, { "content": "fn get_version(packet: &str) -> u64 {\n\n\tif packet.len() < 3 {\n\n\t\tpanic!(\"packet is too small - found size {}\", packet.len())\n\n\t}\n\n\n\n\tconvert_to_integer(&packet[..3])\n\n}\n\n\n", "file_path": "day_16_puzzle_01/src/bits.rs", "rank": 98, "score": 53256.268865213715 }, { "content": "fn get_version(packet: &str) -> u64 {\n\n\tif packet.len() < 3 {\n\n\t\tpanic!(\"packet is too small - found size {}\", packet.len())\n\n\t}\n\n\n\n\tconvert_to_integer(&packet[..3])\n\n}\n\n\n", "file_path": "day_16_puzzle_02/src/bits.rs", "rank": 99, "score": 53256.268865213715 } ]
Rust
backend/src/integrations/lifx/utils.rs
FruitieX/homectl-rs
fc3d8e5569ce813491e7cd234b1ad774ca81ebd6
use anyhow::{anyhow, Result}; use byteorder::{ByteOrder, LittleEndian}; use homectl_types::device::{Device, DeviceColor, DeviceId, DeviceState, Light}; use homectl_types::integration::IntegrationId; use palette::Hsv; use std::net::SocketAddr; #[derive(Clone, Debug)] pub struct LifxState { pub hue: u16, pub sat: u16, pub bri: u16, pub power: u16, pub label: String, pub addr: SocketAddr, pub transition: Option<u32>, } #[derive(Clone, Debug)] pub enum LifxMsg { Get(SocketAddr), SetColor(LifxState), State(LifxState), SetPower(LifxState), Unknown, } pub fn lifx_msg_type_to_u16(msg_type: LifxMsg) -> u16 { match msg_type { LifxMsg::Get(_) => 101, LifxMsg::SetColor(_) => 102, LifxMsg::State(_) => 107, LifxMsg::SetPower(_) => 117, LifxMsg::Unknown => panic!("Cannot convert LifxMsg::Unknown to u16"), } } fn mk_lifx_msg_payload(lifx_msg: LifxMsg) -> Option<Vec<u8>> { match lifx_msg { LifxMsg::SetPower(state) => { let mut buf: [u8; 16 + 32] = [0; 16 + 32]; LittleEndian::write_u16(&mut buf, state.power); if let Some(t) = state.transition { LittleEndian::write_u32(&mut buf[2..], t) } Some(buf.to_vec()) } LifxMsg::SetColor(state) => { let mut buf: [u8; 8 + 16 * 4 + 32] = [0; 8 + 16 * 4 + 32]; LittleEndian::write_u16(&mut buf[1..], state.hue); LittleEndian::write_u16(&mut buf[3..], state.sat); LittleEndian::write_u16(&mut buf[5..], state.bri); LittleEndian::write_u16(&mut buf[7..], 6500); let t = state.transition.unwrap_or(500); LittleEndian::write_u32(&mut buf[9..], t); Some(buf.to_vec()) } _ => None, } } pub fn mk_lifx_udp_msg(lifx_msg: LifxMsg) -> Vec<u8> { let mut frame: [u8; 8] = [0; 8]; let protocol = 1024; let origin = 0; let tagged = 1; let addressable = 1; LittleEndian::write_u16(&mut frame, 0); LittleEndian::write_u16( &mut frame[2..], protocol | (origin << 14) | (tagged << 13) | (addressable << 12), ); LittleEndian::write_u16(&mut frame[1..], 4); let mut frame_address: [u8; 16] = [0; 16]; let ack_required = 0; let res_required = match lifx_msg { LifxMsg::Get(_) => 1, _ => 0, }; frame_address[14] = (ack_required << 1) | res_required; let mut protocol_header: [u8; 12] = [0; 12]; let msg_type = lifx_msg_type_to_u16(lifx_msg.clone()); LittleEndian::write_u16(&mut protocol_header[8..], msg_type); let payload = mk_lifx_msg_payload(lifx_msg); let payload_size = payload.clone().map(|p| p.len()).unwrap_or(0); let msg_size = frame.len() + frame_address.len() + protocol_header.len() + payload_size; LittleEndian::write_u16(&mut frame, msg_size as u16); let mut msg: Vec<u8> = vec![]; msg.append(&mut frame.to_vec()); msg.append(&mut frame_address.to_vec()); msg.append(&mut protocol_header.to_vec()); if let Some(payload) = payload { msg.append(&mut payload.to_vec()); }; msg } pub fn read_lifx_msg(buf: &[u8], addr: SocketAddr) -> LifxMsg { let msg_type = LittleEndian::read_u16(&buf[32..]); let payload = &buf[36..]; match msg_type { 107 => { let hue = LittleEndian::read_u16(payload); let sat = LittleEndian::read_u16(&payload[2..]); let bri = LittleEndian::read_u16(&payload[4..]); let power = LittleEndian::read_u16(&payload[10..]); let label = std::str::from_utf8(&payload[12..(12 + 32)]) .unwrap_or("Unknown") .to_owned() .replace('\0', ""); let state = LifxState { hue, sat, bri, power, label, addr, transition: None, }; LifxMsg::State(state) } _ => LifxMsg::Unknown, } } pub fn from_lifx_state(lifx_state: LifxState, integration_id: IntegrationId) -> Device { let hue = from_lifx_hue((f32::from(lifx_state.hue) / 65535.0) * 360.0); let sat = f32::from(lifx_state.sat) / 65535.0; let bri = f32::from(lifx_state.bri) / 65535.0; let power = lifx_state.power == 65535; let color = Hsv::new(hue, sat, bri); let transition_ms = lifx_state.transition.map(|transition| transition as u64); let state = DeviceState::Light(Light::new( power, None, Some(DeviceColor::Color(color)), transition_ms, )); Device { id: DeviceId::new(&lifx_state.addr.to_string()), name: lifx_state.label, integration_id, scene: None, state, } } pub fn to_lifx_state(device: &Device) -> Result<LifxState> { let light_state = match device.state.clone() { DeviceState::Light(Light { brightness, color, power, transition_ms, }) => Ok(Light { power, brightness, color, transition_ms, }), _ => Err(anyhow!("Unsupported device state")), }?; let power = if light_state.power { 65535 } else { 0 }; let transition = light_state .transition_ms .map(|transition_ms| transition_ms as u32); match light_state.color { Some(DeviceColor::Color(color)) => { let hue = ((to_lifx_hue(color.hue.to_positive_degrees()) / 360.0) * 65535.0).floor() as u16; let sat = (color.saturation * 65535.0).floor() as u16; let bri = (light_state.brightness.unwrap_or(1.0) * color.value * 65535.0).floor() as u16; Ok(LifxState { hue, sat, bri, power, label: device.name.clone(), addr: device.id.to_string().parse()?, transition, }) } Some(DeviceColor::Cct(_)) => Err(anyhow!( "Support for Lifx color temperature mode not implemented" )), None => Ok(LifxState { hue: 0, sat: 0, bri: 0, power, label: device.name.clone(), addr: device.id.to_string().parse()?, transition, }), } } pub fn to_lifx_hue(h: f32) -> f32 { if h > 0.0 && h < 60.0 { let p = h / 60.0; f32::powf(p, 1.0 / 2.0) * 60.0 } else { h } } pub fn from_lifx_hue(h: f32) -> f32 { if h > 0.0 && h < 60.0 { let p = h / 60.0; f32::powf(p, 2.0 / 1.0) * 60.0 } else { h } }
use anyhow::{anyhow, Result}; use byteorder::{ByteOrder, LittleEndian}; use homectl_types::device::{Device, DeviceColor, DeviceId, DeviceState, Light}; use homectl_types::integration::IntegrationId; use palette::Hsv; use std::net::SocketAddr; #[derive(Clone, Debug)] pub struct LifxState { pub hue: u16, pub sat: u16, pub bri: u16, pub power: u16, pub label: String, pub addr: SocketAddr, pub transition: Option<u32>, } #[derive(Clone, Debug)] pub enum LifxMsg { Get(SocketAddr), SetColor(LifxState), State(LifxState), SetPower(LifxState), Unknown, } pub fn lifx_msg_type_to_u16(msg_type: LifxMsg) -> u16 { match msg_type { LifxMsg::Get(_) => 101, LifxMsg::SetColor(_) => 102, LifxMsg::State(_) => 107, LifxMsg::SetPower(_) => 117, LifxMsg::Unknown => panic!("Cannot convert LifxMsg::Unknown to u16"), } } fn mk_lifx_msg_payload(lifx_msg: LifxMsg) -> Option<Vec<u8>> { match lifx_msg { LifxMsg::SetPower(state) => { let mut buf: [u8; 16 + 32] = [0; 16 + 32]; LittleEndian::write_u16(&mut buf, state.power); if let Some(t) = state.transition { LittleEndian::write_u32(&mut buf[2..], t) } Some(buf.to_vec()) } LifxMsg::SetColor(state) => { let mut buf: [u8; 8 + 16 * 4 + 32] = [0; 8 + 16 * 4 + 32]; LittleEndian::write_u16(&mut buf[1..], state.hue); LittleEndian::write_u16(&mut buf[3..], state.sat); LittleEndian::write_u16(&mut buf[5..], state.bri); LittleEndian::write_u16(&mut buf[7..], 6500); let t = state.transition.unwrap_or(500); LittleEndian::write_u32(&mut buf[9..], t); Some(buf.to_vec()) } _ => None, } } pub fn mk_lifx_udp_msg(lifx_msg: LifxMsg) -> Vec<u8> { let mut frame: [u8; 8] = [0; 8]; let protocol = 1024; let origin = 0; let tagged = 1; let addressable = 1; LittleEndian::write_u16(&mut frame, 0); LittleEndian::write_u16( &mut frame[2..], protocol | (origin << 14) | (tagged << 13) | (addressable << 12), ); LittleEndian::write_u16(&mut frame[1..], 4); let mut frame_address: [u8; 16] = [0; 16]; let ack_required = 0; let res_required = match lifx_msg { LifxMsg::Get(_) => 1, _ => 0, }; frame_address[14] = (ack_required << 1) | res_required; let mut protocol_header: [u8; 12] = [0; 12]; let msg_type = lifx_msg_type_to_u16(lifx_msg.clone()); LittleEndian::write_u16(&mut protocol_header[8..], msg_type); let payload = mk_lifx_msg_payload(lifx_msg); let payload_size = payload.clone().map(|p| p.len()).unwrap_or(0); let msg_size = frame.len() + frame_address.len() + protocol_header.len() + payload_size; LittleEndian::write_u16(&mut frame, msg_size as u16); let mut msg: Vec<u8> = vec![]; msg.append(&mut frame.to_vec()); msg.append(&mut frame_address.to_vec()); msg.append(&mut protocol_header.to_vec()); if let Some(payload) = payload { msg.append(&mut payload.to_vec()); }; msg } pub fn read_lifx_msg(buf: &[u8], addr: SocketAddr) -> LifxMsg { let msg_type = LittleEndian::read_u16(&buf[32..]); let payload = &buf[36..]; match msg_type { 107 => { let hue = LittleEndian::read_u16(payload); let sat = LittleEndian::read_u16(&payload[2..]); let bri = LittleEndian::read_u16(&payload[4..]); let power = LittleEndian::read_u16(&payload[10..]); let label = std::str::from_utf8(&payload[12..(12 + 32)]) .unwrap_or("Unknown") .to_owned() .replace('\0', ""); let state = LifxState { hue, sat, bri, power, label, addr, transition: None, }; LifxMsg::State(state) } _ => LifxMsg::Unknown, } } pub fn from_lifx_state(lifx_state: LifxState, integration_id: IntegrationId) -> Device { let hue = from_lifx_hue((f32::from(lifx_state.hue) / 65535.0) * 360.0); let sat = f32::from(lifx_state.sat) / 65535.0; let bri = f32::from(lifx_state.bri) / 65535.0; let power = lifx_state.power == 65535; let color = Hsv::new(hue, sat, bri); let transition_ms = lifx_state.transition.map(|transition| transition as u64); let state = DeviceState::Light(Light::new( power, None, Some(DeviceColor::Color(color)), transition_ms, )); Device { id: DeviceId::new(&lifx_state.addr.to_string()), name: lifx_state.label, integration_id, scene: None, state, } } pub fn to_lifx_state(device: &Device) -> Result<LifxState> { let light_state =
?; let power = if light_state.power { 65535 } else { 0 }; let transition = light_state .transition_ms .map(|transition_ms| transition_ms as u32); match light_state.color { Some(DeviceColor::Color(color)) => { let hue = ((to_lifx_hue(color.hue.to_positive_degrees()) / 360.0) * 65535.0).floor() as u16; let sat = (color.saturation * 65535.0).floor() as u16; let bri = (light_state.brightness.unwrap_or(1.0) * color.value * 65535.0).floor() as u16; Ok(LifxState { hue, sat, bri, power, label: device.name.clone(), addr: device.id.to_string().parse()?, transition, }) } Some(DeviceColor::Cct(_)) => Err(anyhow!( "Support for Lifx color temperature mode not implemented" )), None => Ok(LifxState { hue: 0, sat: 0, bri: 0, power, label: device.name.clone(), addr: device.id.to_string().parse()?, transition, }), } } pub fn to_lifx_hue(h: f32) -> f32 { if h > 0.0 && h < 60.0 { let p = h / 60.0; f32::powf(p, 1.0 / 2.0) * 60.0 } else { h } } pub fn from_lifx_hue(h: f32) -> f32 { if h > 0.0 && h < 60.0 { let p = h / 60.0; f32::powf(p, 2.0 / 1.0) * 60.0 } else { h } }
match device.state.clone() { DeviceState::Light(Light { brightness, color, power, transition_ms, }) => Ok(Light { power, brightness, color, transition_ms, }), _ => Err(anyhow!("Unsupported device state")), }
if_condition
[ { "content": "fn default_device(device_id: DeviceId, name: String, integration_id: IntegrationId) -> Device {\n\n Device {\n\n id: device_id,\n\n name,\n\n integration_id,\n\n scene: None,\n\n state: DeviceState::Light(Light {\n\n power: false,\n\n brightness: None,\n\n color: None,\n\n transition_ms: None,\n\n }),\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Integration for Tuya {\n\n fn new(id: &IntegrationId, config: &config::Value, event_tx: TxEventChannel) -> Result<Self> {\n\n let config: TuyaConfig = config\n\n .clone()\n", "file_path": "backend/src/integrations/tuya/mod.rs", "rank": 1, "score": 287455.2131933322 }, { "content": "pub fn color_config_as_device_color(color_config: ColorConfig) -> DeviceColor {\n\n DeviceColor::Color(match color_config {\n\n ColorConfig::Lch(lch) => lch.into(),\n\n ColorConfig::Hsv(hsv) => hsv,\n\n ColorConfig::Rgb(rgb) => rgb.into(),\n\n })\n\n}\n\n\n\n#[derive(Clone, Deserialize, Debug, Serialize)]\n\npub struct SceneDeviceLink {\n\n pub integration_id: IntegrationId,\n\n pub device_id: Option<DeviceId>,\n\n pub name: Option<String>,\n\n pub brightness: Option<f32>, // allow overriding brightness\n\n}\n\n\n\n#[derive(Clone, Deserialize, Serialize, Debug)]\n\npub struct SceneDescriptor {\n\n pub scene_id: SceneId,\n\n\n", "file_path": "types/src/scene.rs", "rank": 6, "score": 234768.79559059802 }, { "content": "pub fn get_device_state_color(state: &DeviceState) -> Option<Hsv> {\n\n match (state.get_color(), state.get_cct()) {\n\n (Some(color), _) => Some(color),\n\n (_, Some(cct)) => {\n\n let rgb = cct_to_rgb(cct.get_cct());\n\n let hsv: Hsv = rgb.into();\n\n Some(hsv)\n\n }\n\n (_, _) => None,\n\n }\n\n}\n\n\n", "file_path": "frontend/src/util.rs", "rank": 7, "score": 229858.85876332704 }, { "content": "fn is_rule_triggered(state: &DevicesState, groups: &Groups, rule: &Rule) -> Result<bool, String> {\n\n // Try finding matching device\n\n let devices = match rule {\n\n Rule::Any(AnyRule { any: rules }) => {\n\n let any_triggered = rules\n\n .iter()\n\n .map(|rule| is_rule_triggered(state, groups, rule))\n\n .any(|result| result == Ok(true));\n\n\n\n return Ok(any_triggered);\n\n }\n\n Rule::Sensor(rule) => {\n\n vec![find_device(\n\n state,\n\n &rule.integration_id,\n\n rule.device_id.as_ref(),\n\n rule.name.as_ref(),\n\n )\n\n .ok_or(format!(\n\n \"Could not find matching sensor for rule: {:?}\",\n", "file_path": "backend/src/homectl_core/rules.rs", "rank": 9, "score": 214331.7742881552 }, { "content": "/// Converts BridgeLight into Device\n\npub fn bridge_light_to_device(\n\n id: DeviceId,\n\n integration_id: IntegrationId,\n\n bridge_light: BridgeLight,\n\n) -> Device {\n\n let name = bridge_light.name.clone();\n\n let state = DeviceState::Light(to_light(bridge_light));\n\n\n\n Device {\n\n id: DeviceId::new(&format!(\"lights/{}\", id)),\n\n name,\n\n integration_id,\n\n scene: None,\n\n state,\n\n }\n\n}\n", "file_path": "backend/src/integrations/hue/light_utils.rs", "rank": 10, "score": 211674.82265346084 }, { "content": "fn compare_rule_device_state(rule: &Rule, device: &Device) -> Result<bool, String> {\n\n let sensor_kind = get_device_sensor_kind(device);\n\n\n\n match rule {\n\n Rule::Any(_) => {\n\n panic!(\"compare_rule_device_state() cannot be called directly on Any rule\");\n\n }\n\n Rule::Sensor(rule) => {\n\n // FIXME: there must be a better way\n\n match (rule.state.clone(), sensor_kind) {\n\n (\n\n SensorRuleState::OnOffSensor { value: rule_value },\n\n Some(SensorKind::OnOffSensor {\n\n value: sensor_value,\n\n }),\n\n ) => Ok(rule_value == sensor_value),\n\n (\n\n SensorRuleState::DimmerSwitch {\n\n on: Some(rule_on),\n\n up: _,\n", "file_path": "backend/src/homectl_core/rules.rs", "rank": 11, "score": 207577.09825449533 }, { "content": "pub fn to_light(bridge_light: BridgeLight) -> Light {\n\n let power = bridge_light.state.on;\n\n let xy = bridge_light.state.xy;\n\n let ct = bridge_light.state.ct.map(|ct| 1_000_000.0 / ct as f32);\n\n let hue = bridge_light.state.hue.map(|hue| hue as f32 / 65535.0);\n\n let sat = bridge_light.state.sat.map(|sat| sat as f32 / 254.0);\n\n let brightness = bridge_light.state.bri.map(|bri| bri as f32 / 254.0);\n\n let transition_ms = bridge_light\n\n .state\n\n .transitiontime\n\n .map(|transitiontime| (transitiontime * 100) as u64);\n\n\n\n let color = match bridge_light.state.colormode {\n\n Some(ColorMode::Ct) => (move || {\n\n let ct = ct?;\n\n let cct = CorrelatedColorTemperature::new(ct, 2000.0..6500.0);\n\n Some(DeviceColor::Cct(cct))\n\n })(),\n\n Some(ColorMode::Xy) => (move || {\n\n let (x, y) = xy?;\n", "file_path": "backend/src/integrations/hue/light_utils.rs", "rank": 12, "score": 199348.34459664553 }, { "content": "// Example of warp usage: https://github.com/seanmonstar/warp/blob/master/examples/todos.rs\n\npub fn init_api(app_state: &Arc<AppState>) -> Result<()> {\n\n let api = warp::path(\"api\")\n\n .and(warp::path(\"v1\"))\n\n .and(devices(app_state).or(actions(app_state)));\n\n\n\n let ws = ws(app_state);\n\n\n\n tokio::spawn(async move {\n\n warp::serve(ws.or(api)).run(([127, 0, 0, 1], 45289)).await;\n\n });\n\n\n\n Ok(())\n\n}\n", "file_path": "backend/src/api/mod.rs", "rank": 13, "score": 186272.78414348437 }, { "content": "pub fn use_init_app_state(cx: &Scope) {\n\n use_init_atom_root(cx);\n\n let set_devices = use_set(cx, DEVICES_ATOM);\n\n let set_scenes = use_set(cx, SCENES_ATOM);\n\n let set_groups = use_set(cx, GROUPS_ATOM);\n\n\n\n {\n\n let set_devices = set_devices.clone();\n\n let set_scenes = set_scenes.clone();\n\n let set_groups = set_groups.clone();\n\n\n\n use_ws_context_provider_json(\n\n cx,\n\n WS_ENDPOINT.unwrap_or(\"ws://localhost:8080/ws\"),\n\n move |msg| match msg {\n\n WebSocketResponse::State(state) => {\n\n set_devices(state.devices);\n\n set_scenes(state.scenes);\n\n set_groups(state.groups);\n\n }\n\n },\n\n );\n\n }\n\n}\n", "file_path": "frontend/src/app_state.rs", "rank": 14, "score": 173304.29700345043 }, { "content": "fn to_tuya_state(device: &Device, device_config: &TuyaDeviceConfig) -> Result<TuyaState> {\n\n let light_state = match device.state.clone() {\n\n DeviceState::Light(Light {\n\n brightness,\n\n color,\n\n power,\n\n transition_ms,\n\n }) => Ok(Light {\n\n power,\n\n brightness,\n\n color,\n\n transition_ms,\n\n }),\n\n _ => Err(anyhow!(\"Unsupported device state\")),\n\n }?;\n\n\n\n // TODO: do this kind of conversion for the integrations in homectl core\n\n match light_state.color {\n\n Some(DeviceColor::Color(color)) => {\n\n if device_config.color_field.is_some() {\n", "file_path": "backend/src/integrations/tuya/mod.rs", "rank": 15, "score": 171821.7565320355 }, { "content": "/// Converts BridgeSensor into Device\n\npub fn bridge_sensor_to_device(\n\n id: DeviceId,\n\n integration_id: IntegrationId,\n\n bridge_sensor: BridgeSensor,\n\n) -> Device {\n\n let id = DeviceId::new(&format!(\"sensors/{}\", id));\n\n let name = get_bridge_sensor_name(bridge_sensor.clone());\n\n let scene = None;\n\n\n\n match bridge_sensor {\n\n BridgeSensor::ZLLPresence { state, .. } => {\n\n let kind = DeviceState::Sensor(SensorKind::OnOffSensor {\n\n value: state.presence.unwrap_or_default(),\n\n });\n\n\n\n Device {\n\n id,\n\n name,\n\n integration_id,\n\n scene,\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 16, "score": 170019.41359953984 }, { "content": "pub fn xy_to_cct(color: &Yxy) -> f32 {\n\n let x = color.x;\n\n let y = color.y;\n\n\n\n // McCamy's approximation\n\n let n = (x - 0.3320) / (0.1858 - y);\n\n 437.0 * n.powf(3.0) + 3601.0 * n.powf(2.0) + 6861.0 * n + 5517.0\n\n}\n\n\n", "file_path": "types/src/utils.rs", "rank": 17, "score": 159352.80502171392 }, { "content": "pub fn devices(\n\n app_state: &Arc<AppState>,\n\n) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {\n\n warp::path(\"devices\").and(get_devices(app_state).or(put_device(app_state)))\n\n}\n\n\n", "file_path": "backend/src/api/devices.rs", "rank": 18, "score": 157981.447552595 }, { "content": "fn mk_timer_device(id: &IntegrationId, config: &TimerConfig, value: bool) -> Device {\n\n let state = DeviceState::Sensor(SensorKind::OnOffSensor { value });\n\n\n\n Device {\n\n id: DeviceId::new(\"timer\"),\n\n name: config.device_name.clone(),\n\n integration_id: id.clone(),\n\n scene: None,\n\n state,\n\n }\n\n}\n", "file_path": "backend/src/integrations/timer/mod.rs", "rank": 19, "score": 155666.65586197382 }, { "content": "/// Returns name of BridgeSensor\n\nfn get_bridge_sensor_name(bridge_sensor: BridgeSensor) -> String {\n\n match bridge_sensor {\n\n BridgeSensor::Daylight { name } => name,\n\n BridgeSensor::ZLLLightLevel { name } => name,\n\n BridgeSensor::ZLLPresence { name, .. } => name,\n\n BridgeSensor::ZLLSwitch { name, .. } => name,\n\n BridgeSensor::ZLLTemperature { name } => name,\n\n BridgeSensor::CLIPPresence { name } => name,\n\n BridgeSensor::CLIPGenericStatus { name } => name,\n\n BridgeSensor::CLIPGenericFlag { name } => name,\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 20, "score": 155299.7216909728 }, { "content": "fn cmp_light_color(\n\n a: &Option<DeviceColor>,\n\n a_bri: &Option<f32>,\n\n b: &Option<DeviceColor>,\n\n b_bri: &Option<f32>,\n\n) -> bool {\n\n let hue_delta = 1.0;\n\n let sat_delta = 0.01;\n\n let val_delta = 0.01;\n\n let cct_delta = 10.0;\n\n\n\n match (a, b) {\n\n (None, None) => true,\n\n (None, Some(_)) => false,\n\n (Some(_), None) => false,\n\n (Some(DeviceColor::Color(a)), Some(DeviceColor::Color(b))) => {\n\n let mut a_hsv: Hsv = *a;\n\n let mut b_hsv: Hsv = *b;\n\n\n\n a_hsv.value *= a_bri.unwrap_or(1.0);\n", "file_path": "backend/src/homectl_core/devices.rs", "rank": 21, "score": 153705.22079302004 }, { "content": "pub fn find_device(\n\n devices: &DevicesState,\n\n integration_id: &IntegrationId,\n\n device_id: Option<&DeviceId>,\n\n name: Option<&String>,\n\n) -> Option<Device> {\n\n let device = devices\n\n .0\n\n .iter()\n\n .find(\n\n |(\n\n DeviceKey {\n\n integration_id: candidate_integration_id,\n\n device_id: candidate_device_id,\n\n },\n\n candidate_device,\n\n )| {\n\n if integration_id != candidate_integration_id {\n\n return false;\n\n }\n", "file_path": "backend/src/homectl_core/devices.rs", "rank": 22, "score": 152720.88921496848 }, { "content": "fn get_random_color() -> DeviceColor {\n\n let mut rng = rand::thread_rng();\n\n\n\n let r: f32 = rng.gen();\n\n let g: f32 = rng.gen();\n\n let b: f32 = rng.gen();\n\n\n\n let rgb: Rgb = Rgb::new(r, g, b);\n\n DeviceColor::Color(rgb.into())\n\n}\n\n\n\nasync fn poll_sensor(random: Random) {\n\n let poll_rate = Duration::from_millis(1000);\n\n let mut interval = time::interval(poll_rate);\n\n\n\n loop {\n\n interval.tick().await;\n\n\n\n let sender = random.event_tx.clone();\n\n\n\n let device = mk_random_device(&random);\n\n sender.send(Message::SetDeviceState {\n\n device,\n\n set_scene: false,\n\n });\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/random/mod.rs", "rank": 23, "score": 152251.66695486484 }, { "content": "fn mk_boolean_device(id: &IntegrationId, config: &BooleanConfig, value: Option<bool>) -> Device {\n\n let state = DeviceState::Sensor(SensorKind::OnOffSensor {\n\n value: value.unwrap_or(config.init_value),\n\n });\n\n\n\n Device {\n\n id: DeviceId::new(\"boolean\"),\n\n name: config.device_name.clone(),\n\n integration_id: id.clone(),\n\n scene: None,\n\n state,\n\n }\n\n}\n", "file_path": "backend/src/integrations/boolean/mod.rs", "rank": 24, "score": 151567.04810399347 }, { "content": "fn update_state(machine: &PingMachine, id: &IntegrationId, state_b: bool, sender: &TxEventChannel) {\n\n let state = DeviceState::OnOffDevice(OnOffDevice { power: state_b });\n\n let device = Device {\n\n id: DeviceId::new(&machine.id.to_string()),\n\n name: machine.id.to_string(),\n\n integration_id: id.clone(),\n\n state,\n\n scene: None,\n\n };\n\n sender.send(Message::IntegrationDeviceRefresh { device });\n\n}\n", "file_path": "backend/src/integrations/ping/mod.rs", "rank": 25, "score": 151165.8893804355 }, { "content": "pub fn with_state(\n\n app_state: &Arc<AppState>,\n\n) -> impl Filter<Extract = (Arc<AppState>,), Error = std::convert::Infallible> + Clone {\n\n let app_state = app_state.clone();\n\n warp::any().map(move || app_state.clone())\n\n}\n\n\n", "file_path": "backend/src/api/mod.rs", "rank": 26, "score": 150379.05447057463 }, { "content": "pub fn read_config() -> Result<(Config, OpaqueIntegrationsConfigs)> {\n\n let mut settings = config::Config::default();\n\n\n\n let root = std::env::current_dir().unwrap();\n\n let sample_path = root.join(\"Settings.toml.example\");\n\n\n\n let path = root.join(\"Settings.toml\");\n\n\n\n if !path.exists() && std::env::var(\"SKIP_SAMPLE_CONFIG\").is_err() {\n\n println!(\"Settings.toml not found, generating sample configuration.\");\n\n println!(\"Set SKIP_SAMPLE_CONFIG environment variable to opt out of this behavior.\");\n\n std::fs::copy(sample_path, path).unwrap();\n\n }\n\n\n\n settings\n\n .merge(config::File::with_name(\"Settings\"))\n\n .context(\"Failed to load Settings.toml config file\")?;\n\n\n\n let config: Config = serde_path_to_error::deserialize(settings.clone()).context(\n\n \"Failed to deserialize config, compare your config file to Settings.toml.example!\",\n\n )?;\n\n\n\n let integrations_config = settings\n\n .get::<OpaqueIntegrationsConfigs>(\"integrations\")\n\n .context(\"Expected to find integrations key in config\")?;\n\n\n\n Ok((config, integrations_config))\n\n}\n", "file_path": "backend/src/homectl_core/config.rs", "rank": 27, "score": 149795.98970196812 }, { "content": "pub fn hsv_to_css_hsl_str(hsv: &Option<Hsv>) -> String {\n\n let hsv = hsv.unwrap_or_else(|| Hsv::new(0.0, 0.0, 1.0));\n\n let hsl: Hsl = hsv.into();\n\n\n\n format!(\n\n \"hsl({}, {}%, {}%)\",\n\n hsl.hue.to_positive_degrees().floor(),\n\n (hsl.saturation * 100.0).floor(),\n\n (hsl.lightness * 100.0).floor()\n\n )\n\n}\n\n\n", "file_path": "frontend/src/util.rs", "rank": 28, "score": 148931.78172825548 }, { "content": "/// Returns whether BridgeButtonEvent refers to DimmerSwitchButtonId\n\npub fn cmp_button_id(buttonevent: BridgeButtonEvent, button_id: DimmerSwitchButtonId) -> bool {\n\n let event_button_id = get_button_id(buttonevent);\n\n\n\n event_button_id == button_id\n\n}\n\n\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 29, "score": 148543.37930831983 }, { "content": "fn power_to_tuya(power: bool) -> TuyaState {\n\n TuyaState {\n\n power_on: power,\n\n brightness: None,\n\n color_temperature: None,\n\n color: None,\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/tuya/mod.rs", "rank": 30, "score": 145654.94967853738 }, { "content": "fn get_circadian_color(circadian: &Circadian) -> DeviceColor {\n\n match (\n\n circadian.converted_day_color.clone(),\n\n circadian.converted_night_color.clone(),\n\n ) {\n\n (DeviceColor::Color(day), DeviceColor::Color(night)) => {\n\n let gradient = Gradient::new(vec![day, night]);\n\n\n\n let i = get_night_fade(circadian);\n\n\n\n DeviceColor::Color(gradient.get(i))\n\n }\n\n (DeviceColor::Cct(_), DeviceColor::Cct(_)) => todo!(),\n\n _ => panic!(\"Mixed color types not supported\"),\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/circadian/mod.rs", "rank": 31, "score": 142413.78151691135 }, { "content": "pub fn listen_udp_stream(\n\n socket: Arc<UdpSocket>,\n\n integration_id: IntegrationId,\n\n sender: TxEventChannel,\n\n) {\n\n let mut buf: [u8; MAX_UDP_PACKET_SIZE] = [0; MAX_UDP_PACKET_SIZE];\n\n tokio::spawn(async move {\n\n loop {\n\n let res = socket.recv_from(&mut buf).await;\n\n\n\n match res {\n\n // FIXME: should probably do some sanity checks on bytes_read\n\n Ok((_bytes_read, addr)) => {\n\n let msg = read_lifx_msg(&buf, addr);\n\n\n\n handle_lifx_msg(msg, integration_id.clone(), sender.clone()).await;\n\n }\n\n Err(e) => {\n\n println!(\"Error in udp recv_from {}\", e);\n\n }\n", "file_path": "backend/src/integrations/lifx/lights.rs", "rank": 32, "score": 142113.18024130558 }, { "content": "/// Returns whether DimmerSwitchButtonId is in a pressed state in the\n\n/// BridgeButtonEvent\n\npub fn is_button_pressed(\n\n buttonevent: Option<BridgeButtonEvent>,\n\n button_id: DimmerSwitchButtonId,\n\n button_type: DimmerSwitchButtonPressType,\n\n) -> bool {\n\n match buttonevent {\n\n Some(buttonevent) => {\n\n let button_id_match = cmp_button_id(buttonevent, button_id);\n\n let pressed = get_button_state(buttonevent);\n\n\n\n button_id_match && button_type == pressed\n\n }\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 33, "score": 141665.3667929818 }, { "content": "/// Do some extrapolation on old and new bridge_sensor states to try and figure\n\n/// out what individual state transitions might have occurred\n\n///\n\n/// Usually this will return a Vec with 0 or 1 items, but there are some\n\n/// scenarios where we might have missed some events due to polling.\n\npub fn extrapolate_sensor_updates(\n\n prev_bridge_sensor: Option<BridgeSensor>,\n\n next_bridge_sensor: BridgeSensor,\n\n) -> Vec<BridgeSensor> {\n\n // Quick optimization: if the states are equal, there are no updates\n\n if prev_bridge_sensor == Some(next_bridge_sensor.clone()) {\n\n return vec![];\n\n }\n\n\n\n match (prev_bridge_sensor, next_bridge_sensor.clone()) {\n\n // ZLLPresence sensor updates are infrequent enough that we should not\n\n // need to worry about missing out on updates\n\n (_, BridgeSensor::ZLLPresence { .. }) => vec![next_bridge_sensor],\n\n\n\n // ZLLSwitches can be pressed quickly, and a naive polling implementation would\n\n // miss out on a lot of button state transition events.\n\n (\n\n Some(BridgeSensor::ZLLSwitch {\n\n state:\n\n ZLLSwitchState {\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 34, "score": 139194.81691156855 }, { "content": "/// Tries to find BridgeSensor with matching BridgeSensorId\n\npub fn find_bridge_sensor(\n\n bridge_sensors: &BridgeSensors,\n\n sensor_id: &BridgeSensorId,\n\n) -> Option<BridgeSensor> {\n\n bridge_sensors.get(sensor_id).cloned()\n\n}\n\n\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 35, "score": 139191.8428321023 }, { "content": "#[allow(non_snake_case)]\n\npub fn ColorSwatch(cx: Scope<ColorSwatchProps>) -> Element {\n\n let background_color = hsv_to_css_hsl_str(&cx.props.color);\n\n\n\n cx.render(rsx! {\n\n span {\n\n class: \"h-8 w-8 rounded-full border border-slate-300 flex-shrink-0\",\n\n background_color: \"{background_color}\",\n\n }\n\n })\n\n}\n", "file_path": "frontend/src/color_swatch.rs", "rank": 36, "score": 138976.55260542457 }, { "content": "#[allow(non_snake_case)]\n\npub fn SceneList(cx: Scope<SceneListProps>) -> Element {\n\n let scenes = use_read(&cx, SCENES_ATOM).clone();\n\n\n\n let scenes: FlattenedScenesConfig = scenes\n\n .into_iter()\n\n .filter(|(_, config)| config.hidden != Some(true))\n\n .collect();\n\n\n\n let filtered_scenes = if let Some(filters) = &cx.props.filter_by_device_ids {\n\n scenes\n\n .into_iter()\n\n .filter(|(_, scene)| filters.iter().any(|k| scene.devices.contains_key(k)))\n\n .collect()\n\n } else {\n\n scenes\n\n };\n\n\n\n let sorted_scenes: Vec<(SceneId, FlattenedSceneConfig)> = filtered_scenes\n\n .into_iter()\n\n .sorted_by(|a, b| a.1.name.cmp(&b.1.name))\n", "file_path": "frontend/src/scene_list.rs", "rank": 37, "score": 138290.3445556662 }, { "content": "#[allow(non_snake_case)]\n\npub fn DeviceList(cx: Scope<DeviceListProps>) -> Element {\n\n let devices = use_read(&cx, DEVICES_ATOM);\n\n\n\n let devices = devices\n\n .0\n\n .values()\n\n .into_iter()\n\n .sorted_by(|a, b| a.name.cmp(&b.name));\n\n\n\n let devices = devices.into_iter().filter_map(|device| {\n\n if let Some(filters) = &cx.props.filters {\n\n if !filters.contains(&device.get_device_key()) {\n\n return None;\n\n }\n\n }\n\n\n\n let key = device.get_device_key().to_string();\n\n\n\n Some(rsx! {\n\n DeviceTile {\n", "file_path": "frontend/src/device_list.rs", "rank": 38, "score": 137288.72409570293 }, { "content": "#[allow(non_snake_case)]\n\npub fn GroupDeviceList(cx: Scope) -> Element {\n\n let group_id: GroupId = GroupId::new(use_route(&cx).segment(\"group_id\")?.to_string());\n\n let groups = use_read(&cx, GROUPS_ATOM);\n\n\n\n let (_, group) = groups\n\n .iter()\n\n .find(|(candidate_group_id, _)| *candidate_group_id == &group_id)?;\n\n\n\n let name = &group.name;\n\n\n\n cx.render(rsx! {\n\n DeviceList { filters: Some(group.device_ids.clone()) }\n\n\n\n h2 { class: \"mt-4\", \"{name} scenes:\" }\n\n SceneList { filter_by_device_ids: group.device_ids.clone() }\n\n })\n\n}\n", "file_path": "frontend/src/group_device_list.rs", "rank": 39, "score": 136375.2401268786 }, { "content": "fn cmp_device_states(device: &DeviceState, expected: &DeviceState) -> bool {\n\n match (device, expected) {\n\n (DeviceState::OnOffDevice(a), DeviceState::OnOffDevice(b)) => a.power == b.power,\n\n (DeviceState::Light(device), DeviceState::Light(expected)) => {\n\n if device.power != expected.power {\n\n return false;\n\n }\n\n\n\n // If both lights are turned off, state matches\n\n if !device.power && !expected.power {\n\n return true;\n\n }\n\n\n\n // Compare colors if supported\n\n if device.color.is_some() {\n\n return cmp_light_color(\n\n &device.color,\n\n &device.brightness,\n\n &expected.color,\n\n &expected.brightness,\n", "file_path": "backend/src/homectl_core/devices.rs", "rank": 40, "score": 134177.24874871364 }, { "content": "#[allow(non_snake_case)]\n\npub fn DeviceModal<'a>(cx: Scope<'a, DeviceModalProps<'a>>) -> Element<'a> {\n\n let ws = use_ws_context(&cx);\n\n\n\n let show_debug = use_state(&cx, || false);\n\n // let toggle_debug = move |_: MouseEvent| {\n\n // let mut show_debug = show_debug.modify();\n\n // *show_debug = !*show_debug;\n\n // };\n\n\n\n let brightness = cx.props.device.state.get_brightness().unwrap_or_default();\n\n let power = cx.props.device.state.is_powered_on().unwrap_or_default();\n\n let color = cx.props.device.state.get_color();\n\n\n\n let hue = color.unwrap_or_default().hue.to_positive_degrees();\n\n let saturation = color.unwrap_or_default().saturation;\n\n let cct = cx\n\n .props\n\n .device\n\n .state\n\n .get_cct()\n", "file_path": "frontend/src/device_modal.rs", "rank": 43, "score": 131223.65777789638 }, { "content": "pub fn from_hh_mm<'de, D>(d: D) -> Result<chrono::NaiveTime, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n{\n\n let str = String::deserialize(d)?;\n\n chrono::NaiveTime::parse_from_str(&str, \"%H:%M\").map_err(serde::de::Error::custom)\n\n}\n", "file_path": "backend/src/utils/mod.rs", "rank": 44, "score": 128934.31503920635 }, { "content": "/// Best effort conversion of DimmerSwitchButtonId and button_state back into a\n\n/// BridgeButtonEvent. This conversion is lossy because we don't have the data\n\n/// needed to reconstruct the exact button state.\n\nfn to_buttonevent(button_id: DimmerSwitchButtonId, button_state: bool) -> BridgeButtonEvent {\n\n let mut s = String::new();\n\n\n\n s.push(match button_id {\n\n DimmerSwitchButtonId::On => '1',\n\n DimmerSwitchButtonId::Up => '2',\n\n DimmerSwitchButtonId::Down => '3',\n\n DimmerSwitchButtonId::Off => '4',\n\n _ => '0',\n\n });\n\n s.push('0');\n\n s.push('0');\n\n s.push(match button_state {\n\n true => '0', // INITIAL_PRESSED\n\n false => '2', // SHORT_RELEASED\n\n });\n\n\n\n let buttonevent: BridgeButtonEvent = s.parse().unwrap_or(1000);\n\n\n\n buttonevent\n\n}\n\n\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 45, "score": 128748.10869461762 }, { "content": "#[allow(non_snake_case)]\n\npub fn SaveSceneModal<'a>(cx: Scope<'a, SaveSceneModalProps<'a>>) -> Element<'a> {\n\n let set_disable_scroll = use_set(&cx, DISABLE_SCROLL_ATOM);\n\n let devices = use_read(&cx, DEVICES_ATOM);\n\n\n\n let ws = use_ws_context(&cx);\n\n\n\n let name = use_state(&cx, || String::from(\"New scene\"));\n\n\n\n let onchange = {\n\n move |evt: FormEvent| {\n\n let new_name = evt.data.value.clone();\n\n name.set(new_name)\n\n }\n\n };\n\n\n\n let save_scene = {\n\n let filters = cx.props.filters.clone();\n\n\n\n move |evt: MouseEvent| {\n\n evt.cancel_bubble();\n", "file_path": "frontend/src/save_scene_modal.rs", "rank": 46, "score": 126822.09449774047 }, { "content": "#[allow(non_snake_case)]\n\npub fn EditSceneModal<'a>(cx: Scope<'a, EditSceneModalProps<'a>>) -> Element<'a> {\n\n let set_disable_scroll = use_set(&cx, DISABLE_SCROLL_ATOM);\n\n let scenes = use_read(&cx, SCENES_ATOM);\n\n let scene_id = cx.props.scene_id;\n\n let _scene = scenes.get(scene_id);\n\n\n\n let ws = use_ws_context(&cx);\n\n\n\n let name = use_state(&cx, || String::from(\"New scene\"));\n\n\n\n let onchange = {\n\n move |evt: FormEvent| {\n\n let new_name = evt.data.value.clone();\n\n name.set(new_name)\n\n }\n\n };\n\n\n\n let save_scene = { move |_| {} };\n\n\n\n let confirm_delete_visible = use_state(&cx, || false);\n", "file_path": "frontend/src/edit_scene_modal.rs", "rank": 47, "score": 126822.09449774047 }, { "content": "fn read_payload_from_json(json: &str) -> PayloadStruct {\n\n let dps: std::option::Option<\n\n std::collections::HashMap<std::string::String, serde_json::Value>,\n\n > = serde_json::from_str(json).ok();\n\n PayloadStruct {\n\n dev_id: \"\".to_string(),\n\n gw_id: None,\n\n uid: None,\n\n t: None,\n\n dp_id: None,\n\n dps,\n\n }\n\n}\n\n\n\nasync fn set_tuya_state(device: &Device, device_config: &TuyaDeviceConfig) -> Result<()> {\n\n // println!(\"setting tuya state: {:?} {}\", device.state, device.name);\n\n let tuya_state = to_tuya_state(device, device_config)?;\n\n\n\n let current_time = SystemTime::now()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n", "file_path": "backend/src/integrations/tuya/mod.rs", "rank": 48, "score": 117998.65549580779 }, { "content": "fn hsv_to_tuya(power: bool, brightness: Option<f32>, hsv: Hsv) -> TuyaState {\n\n let hue: f32 = hsv.hue.to_positive_degrees();\n\n let saturation = (hsv.saturation as f32) * 1000.0;\n\n let value = brightness.unwrap_or(1.0) * (hsv.value as f32) * 1000.0;\n\n let tuya_color_string = format!(\n\n \"{:0>4x}{:0>4x}{:0>4x}\",\n\n hue as i32, saturation as i32, value as i32\n\n );\n\n\n\n TuyaState {\n\n power_on: power,\n\n color: Some(tuya_color_string),\n\n brightness: None,\n\n color_temperature: None,\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/tuya/mod.rs", "rank": 49, "score": 115987.2240774625 }, { "content": "fn ct_to_tuya(power: bool, brightness: Option<f32>, ct: f32) -> TuyaState {\n\n // Range of my bulbs is from 2700K - 4100K (and they express this as a\n\n // 0-1000 range), this is very likely not true for all Tuya bulbs\n\n let min_supported_temp = 2700.0;\n\n let max_supported_temp = 4100.0;\n\n\n\n // Scale the value into 0.0 - 1.0 range\n\n let q = (ct - min_supported_temp) / (max_supported_temp - min_supported_temp);\n\n let q = q.clamp(0.0, 1.0);\n\n\n\n // Scale the value into 0 - 1000 range\n\n let color_temperature = f32::floor(q * 1000.0) as u32;\n\n\n\n // Brightness goes from 10 to 1000 ¯\\_(ツ)_/¯\n\n let brightness = brightness.map(|bri| f32::floor(bri * 990.0) as u32 + 10);\n\n\n\n TuyaState {\n\n power_on: power,\n\n color: None,\n\n brightness,\n\n color_temperature: Some(color_temperature),\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/tuya/mod.rs", "rank": 50, "score": 115987.2240774625 }, { "content": "pub fn ws(\n\n app_state: &Arc<AppState>,\n\n) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {\n\n warp::path(\"ws\")\n\n // The `ws()` filter will prepare the Websocket handshake.\n\n .and(warp::ws())\n\n .and(with_state(app_state))\n\n .map(|ws: warp::ws::Ws, app_state: Arc<AppState>| {\n\n // This will call our function if the handshake succeeds.\n\n ws.on_upgrade(move |socket| user_connected(socket, app_state))\n\n })\n\n}\n\n\n\n// https://github.com/seanmonstar/warp/blob/master/examples/websockets_chat.rs\n\nasync fn user_connected(ws: WebSocket, app_state: Arc<AppState>) {\n\n // Use a counter to assign a new unique ID for this user.\n\n let my_id = NEXT_USER_ID.fetch_add(1, Ordering::Relaxed);\n\n\n\n // Split the socket into a sender and receive of messages.\n\n let (mut user_ws_tx, mut user_ws_rx) = ws.split();\n", "file_path": "backend/src/api/ws.rs", "rank": 51, "score": 114860.62302947654 }, { "content": "pub fn actions(\n\n app_state: &Arc<AppState>,\n\n) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {\n\n warp::path(\"actions\").and(\n\n post_action(app_state).or(warp::get()\n\n // TODO: why is this needed\n\n .and(warp::path(\"asdasdasdasd\"))\n\n .map(|| Ok(warp::reply::json(&())))),\n\n )\n\n}\n\n\n", "file_path": "backend/src/api/actions.rs", "rank": 52, "score": 114860.62302947654 }, { "content": "struct DeviceKeyVisitor;\n\n\n\nimpl<'de> Visitor<'de> for DeviceKeyVisitor {\n\n type Value = DeviceKey;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a colon-separated pair of integers between 0 and 255\")\n\n }\n\n\n\n fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n if let Some((integration_id, device_id)) = s.split_once('/') {\n\n let integration_id = IntegrationId::from(integration_id.to_string());\n\n let device_id = DeviceId::from(device_id.to_string());\n\n\n\n Ok(DeviceKey::new(integration_id, device_id))\n\n } else {\n\n Err(de::Error::invalid_value(Unexpected::Str(s), &self))\n", "file_path": "types/src/device.rs", "rank": 53, "score": 108810.78911642214 }, { "content": "#[derive(Props, PartialEq)]\n\nstruct SceneRowProps {\n\n scene_id: SceneId,\n\n scene: FlattenedSceneConfig,\n\n #[props(!optional)]\n\n device_keys: Option<Vec<DeviceKey>>,\n\n}\n\n\n", "file_path": "frontend/src/scene_list.rs", "rank": 54, "score": 108043.74067691447 }, { "content": "/// Returns which DimmerSwitchButtonId is referred to in BridgeButtonEvent\n\nfn get_button_id(buttonevent: BridgeButtonEvent) -> DimmerSwitchButtonId {\n\n let str = buttonevent.to_string();\n\n let button_id = str.chars().next();\n\n\n\n match button_id {\n\n Some('1') => DimmerSwitchButtonId::On,\n\n Some('2') => DimmerSwitchButtonId::Up,\n\n Some('3') => DimmerSwitchButtonId::Down,\n\n Some('4') => DimmerSwitchButtonId::Off,\n\n _ => DimmerSwitchButtonId::Unknown,\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 55, "score": 107921.25045436108 }, { "content": "#[derive(Props, PartialEq)]\n\nstruct DeviceTileProps<'a> {\n\n device: &'a Device,\n\n}\n\n\n", "file_path": "frontend/src/device_list.rs", "rank": 56, "score": 105156.1383801537 }, { "content": "#[allow(non_snake_case)]\n\npub fn Dashboard(cx: Scope) -> Element {\n\n cx.render(rsx! {\n\n div {\n\n class: \"flex flex-col gap-2\",\n\n \"data-testid\": \"my-test\",\n\n\n\n Link {\n\n to: \"/scenes\",\n\n Tile { full_width: true, contents: cx.render(rsx! { \"Scenes\", div { class: \"{ARROW_STYLES}\", \">\" } }) }\n\n }\n\n Link {\n\n to: \"/groups\",\n\n Tile { full_width: true, contents: cx.render(rsx! { \"Groups\", div { class: \"{ARROW_STYLES}\", \">\" } }) }\n\n }\n\n Link {\n\n to: \"/devices\",\n\n Tile { full_width: true, contents: cx.render(rsx! { \"Devices\", div { class: \"{ARROW_STYLES}\", \">\" } }) }\n\n }\n\n }\n\n })\n\n}\n", "file_path": "frontend/src/dashboard.rs", "rank": 57, "score": 103875.93277906769 }, { "content": "pub fn cct_to_rgb(kelvin: f32) -> Rgb {\n\n let temp = kelvin as f64 / 100.0;\n\n\n\n let (red, green, blue) = if temp <= 66.0 {\n\n let red = 255.0;\n\n\n\n let green = 99.4708025861 * f64::log10(temp) - 161.1195681661;\n\n\n\n let blue = if temp <= 19.0 {\n\n 0.0\n\n } else {\n\n 138.5177312231 * f64::log10(temp - 10.0) - 305.0447927307\n\n };\n\n\n\n (red, green, blue)\n\n } else {\n\n let red = temp - 60.0;\n\n let red = 329.698727446 * f64::powf(red, -0.1332047592);\n\n\n\n let green = temp - 60.0;\n\n let green = 288.1221695283 * f64::log(green, -0.0755148492);\n\n\n\n let blue = 255.0;\n\n\n\n (red, green, blue)\n\n };\n\n\n\n Rgb::new(red as f32, green as f32, blue as f32)\n\n}\n", "file_path": "types/src/utils.rs", "rank": 58, "score": 102141.30334212616 }, { "content": "#[allow(non_snake_case)]\n\npub fn GroupList(cx: Scope) -> Element {\n\n let groups = use_read(&cx, GROUPS_ATOM);\n\n\n\n let groups: Vec<(GroupId, FlattenedGroupConfig)> = groups\n\n .iter()\n\n .filter(|(_, config)| config.hidden != Some(true))\n\n .map(|(group_id, config)| (group_id.clone(), config.clone()))\n\n .sorted_by(|a, b| a.1.name.cmp(&b.1.name))\n\n .collect();\n\n\n\n let groups = groups.iter().map(|(key, group)| {\n\n rsx! {\n\n GroupRow {\n\n key: \"{key}\",\n\n group_id: key.clone(),\n\n name: group.name.clone()\n\n }\n\n }\n\n });\n\n\n\n cx.render(rsx! {\n\n div {\n\n class: \"flex flex-col gap-2\",\n\n groups\n\n }\n\n })\n\n}\n", "file_path": "frontend/src/group_list.rs", "rank": 59, "score": 100495.67425758485 }, { "content": "pub fn mk_channel() -> (TxEventChannel, RxEventChannel) {\n\n let (tx, rx) = unbounded_channel::<Message>();\n\n\n\n let sender = Sender { sender: tx };\n\n\n\n (sender, rx)\n\n}\n", "file_path": "types/src/event.rs", "rank": 60, "score": 99580.86206131201 }, { "content": "#[derive(Debug)]\n\nstruct TuyaState {\n\n power_on: bool,\n\n brightness: Option<u32>,\n\n color_temperature: Option<u32>,\n\n color: Option<String>,\n\n}\n\n\n", "file_path": "backend/src/integrations/tuya/mod.rs", "rank": 61, "score": 99235.8782854555 }, { "content": "pub fn scale_hsv_value_to_display(hsv: Hsv) -> Hsv {\n\n let value = (hsv.value + 1.0) / 2.0;\n\n Hsv::new(hsv.hue, hsv.saturation, value)\n\n}\n\n\n", "file_path": "frontend/src/util.rs", "rank": 62, "score": 98932.36720701019 }, { "content": "fn get_devices(\n\n app_state: &Arc<AppState>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::get()\n\n .and(with_state(app_state))\n\n .map(|app_state: Arc<AppState>| {\n\n let devices = app_state.devices.get_devices();\n\n\n\n let response = DevicesResponse {\n\n devices: devices.0.values().cloned().collect(),\n\n };\n\n\n\n Ok(warp::reply::json(&response))\n\n })\n\n}\n\n\n", "file_path": "backend/src/api/devices.rs", "rank": 63, "score": 98794.7369541725 }, { "content": "fn put_device(\n\n app_state: &Arc<AppState>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::path!(\"devices\" / DeviceId)\n\n .and(warp::put())\n\n .and(warp::body::json())\n\n .and(with_state(app_state))\n\n .and_then(put_device_impl)\n\n}\n\n\n\nasync fn put_device_impl(\n\n device_id: DeviceId,\n\n device: Device,\n\n app_state: Arc<AppState>,\n\n) -> Result<impl warp::Reply, Infallible> {\n\n // Make sure device_id matches with provided device\n\n if device_id != device.id {\n\n return Ok(warp::reply::json(&DevicesResponse { devices: vec![] }));\n\n }\n\n\n", "file_path": "backend/src/api/devices.rs", "rank": 64, "score": 98794.7369541725 }, { "content": "#[allow(non_snake_case)]\n\npub fn Header(cx: Scope<HeaderProps>) -> Element {\n\n let groups = use_read(&cx, GROUPS_ATOM);\n\n\n\n let route = use_route(&cx);\n\n let mut segments = vec![];\n\n\n\n let mut n = 0;\n\n while let Some(segment) = route.nth_segment(n) {\n\n if !segment.is_empty() {\n\n segments.push(segment);\n\n }\n\n n += 1;\n\n }\n\n\n\n // Vec<String> -> &[&str]\n\n let segments = segments.iter().map(|s| &**s).collect_vec();\n\n let segments = segments.as_slice();\n\n\n\n let title = match segments {\n\n [] => \"homectl dashboard\".to_string(),\n", "file_path": "frontend/src/header.rs", "rank": 65, "score": 98575.96645965384 }, { "content": "pub fn cmp_hsv(a: &Hsv, b: &Hsv) -> Ordering {\n\n a.hue\n\n .to_positive_degrees()\n\n .partial_cmp(&b.hue.to_positive_degrees())\n\n .unwrap_or(Ordering::Greater)\n\n}\n\n\n\npub const ARROW_STYLES: &str = tw(\"text-right mr-2 leading-4 text-4xl flex-1\");\n\n\n\n/// Annotates tailwindcss utilities to be picked up by editor tooling.\n\npub const fn tw(class: &str) -> &str {\n\n class\n\n}\n", "file_path": "frontend/src/util.rs", "rank": 66, "score": 98486.54901740781 }, { "content": "/// Returns whether BridgeButtonEvent is in a pressed state or not\n\nfn get_button_state(buttonevent: BridgeButtonEvent) -> DimmerSwitchButtonPressType {\n\n let str = buttonevent.to_string();\n\n let long = str.chars().nth(2);\n\n let state = str.chars().nth(3);\n\n\n\n // TODO: match this against eventtypes in capabilities.inputs.events\n\n match (long, state) {\n\n (Some('0'), Some('0')) => DimmerSwitchButtonPressType::Short, // initial_press\n\n (Some('0'), Some('2')) => DimmerSwitchButtonPressType::Released, // short_release\n\n (Some('0'), Some('3')) => DimmerSwitchButtonPressType::Released, // long_release\n\n (Some('1'), Some('0')) => DimmerSwitchButtonPressType::Long, // long_press\n\n _ => DimmerSwitchButtonPressType::NotUsed,\n\n }\n\n\n\n // match state {\n\n // Some('0') => DimmerSwitchButtonPressType::NotUsed, // INITIAL_PRESSED\n\n // Some('1') => DimmerSwitchButtonPressType::NotUsed, // HOLD\n\n // Some('2') => DimmerSwitchButtonPressType::Short, // SHORT_RELEASED\n\n // Some('3') => DimmerSwitchButtonPressType::Long, // LONG_RELEASED\n\n // _ => DimmerSwitchButtonPressType::NotUsed,\n\n // }\n\n}\n\n\n", "file_path": "backend/src/integrations/hue/sensor_utils.rs", "rank": 67, "score": 95850.09721918011 }, { "content": "#[allow(non_snake_case)]\n\npub fn Redirect<'a>(cx: Scope<'a, RedirectProps<'a>>) -> Element<'a> {\n\n if let Some(service) = cx.consume_context::<RouterService>() {\n\n service.push_route(cx.props.to, None, None)\n\n }\n\n\n\n None\n\n}\n", "file_path": "frontend/src/redirect.rs", "rank": 68, "score": 92510.90014184726 }, { "content": "#[allow(non_snake_case)]\n\npub fn Tile<'a>(cx: Scope<'a, TileProps<'a>>) -> Element<'a> {\n\n let contents = &cx.props.contents;\n\n let width = if cx.props.full_width == Some(true) {\n\n \"calc(100% - .5rem)\"\n\n } else {\n\n \"calc(50% - .25rem)\"\n\n };\n\n\n\n let background_hsl = cx\n\n .props\n\n .gradient\n\n .clone()\n\n .unwrap_or_default()\n\n .iter()\n\n .map(|hsv| hsv_to_css_hsl_str(&Some(*hsv)))\n\n .collect_vec();\n\n\n\n // If there's only one item, duplicate it to create a valid gradient\n\n let background_hsl = if background_hsl.len() == 1 {\n\n vec![background_hsl[0].clone(), background_hsl[0].clone()]\n", "file_path": "frontend/src/tile.rs", "rank": 69, "score": 92510.90014184726 }, { "content": "#[allow(non_snake_case)]\n\npub fn Modal<'a>(cx: Scope<'a, ModalProps<'a>>) -> Element<'a> {\n\n let set_disable_scroll = use_set(&cx, DISABLE_SCROLL_ATOM);\n\n\n\n let cancel_bubble = move |evt: MouseEvent| {\n\n evt.cancel_bubble();\n\n };\n\n\n\n let close_modal = move |evt: MouseEvent| {\n\n evt.cancel_bubble();\n\n cx.props.modal_open.set(false);\n\n set_disable_scroll(false);\n\n };\n\n\n\n if **cx.props.modal_open {\n\n set_disable_scroll(true);\n\n }\n\n\n\n if !cx.props.modal_open {\n\n None\n\n } else {\n", "file_path": "frontend/src/modal.rs", "rank": 70, "score": 92510.90014184726 }, { "content": "#[allow(non_snake_case)]\n\nfn SceneRow(cx: Scope<SceneRowProps>) -> Element {\n\n let ws = use_ws_context(&cx);\n\n let name = &cx.props.scene.name;\n\n let scene_id = &cx.props.scene_id;\n\n let scene = &cx.props.scene;\n\n let device_keys = &cx.props.device_keys;\n\n\n\n let scene_colors: Vec<Hsv> = scene\n\n .devices\n\n .values()\n\n .filter_map(get_device_state_color)\n\n .map(scale_hsv_value_to_display)\n\n .sorted_by(cmp_hsv)\n\n .dedup()\n\n .collect();\n\n\n\n let activate_scene = {\n\n move |_| {\n\n let scene_id = scene_id.clone();\n\n let device_keys = device_keys.clone();\n", "file_path": "frontend/src/scene_list.rs", "rank": 71, "score": 90834.1490790663 }, { "content": "fn get_device_sensor_kind(device: &Device) -> Option<SensorKind> {\n\n match device.state {\n\n DeviceState::Sensor(sensor_kind) => Some(sensor_kind),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "backend/src/homectl_core/rules.rs", "rank": 72, "score": 87414.28599673242 }, { "content": "fn mk_circadian_device(circadian: &Circadian) -> Device {\n\n let state = DeviceState::Light(Light::new(\n\n true,\n\n get_circadian_brightness(circadian),\n\n Some(get_circadian_color(circadian)),\n\n Some(POLL_RATE),\n\n ));\n\n\n\n Device {\n\n id: DeviceId::new(\"color\"),\n\n name: circadian.config.device_name.clone(),\n\n integration_id: circadian.id.clone(),\n\n scene: None,\n\n state,\n\n }\n\n}\n", "file_path": "backend/src/integrations/circadian/mod.rs", "rank": 73, "score": 87361.20229809274 }, { "content": "fn mk_random_device(random: &Random) -> Device {\n\n let state = DeviceState::Light(Light::new(\n\n true,\n\n Some(1.0),\n\n Some(get_random_color()),\n\n Some(500),\n\n ));\n\n\n\n Device {\n\n id: DeviceId::new(\"color\"),\n\n name: random.config.device_name.clone(),\n\n integration_id: random.id.clone(),\n\n scene: None,\n\n state,\n\n }\n\n}\n", "file_path": "backend/src/integrations/random/mod.rs", "rank": 74, "score": 87361.20229809274 }, { "content": "#[allow(non_snake_case)]\n\nfn DeviceTile<'a>(cx: Scope<'a, DeviceTileProps<'a>>) -> Element<'a> {\n\n let name = &cx.props.device.name;\n\n let color = cx\n\n .props\n\n .device\n\n .state\n\n .get_color()\n\n .map(scale_hsv_value_to_display);\n\n let modal_open = use_state(&cx, || false);\n\n\n\n let gradient = if let Some(color) = color {\n\n vec![color]\n\n } else {\n\n vec![]\n\n };\n\n\n\n cx.render(rsx! {\n\n Tile {\n\n gradient: gradient,\n\n contents: cx.render(rsx! {\n", "file_path": "frontend/src/device_list.rs", "rank": 75, "score": 86899.60796139852 }, { "content": "fn get_triggered_routine_ids(\n\n routines: &RoutinesConfig,\n\n groups: &Groups,\n\n state: &DevicesState,\n\n) -> HashSet<RoutineId> {\n\n let triggered_routine_ids: HashSet<RoutineId> = routines\n\n .iter()\n\n .filter(\n\n |(_, routine)| match is_routine_triggered(state, groups, routine) {\n\n Ok(triggered) => triggered,\n\n Err(e) => {\n\n println!(\"Error while checking routine {:?} rules: {}\", routine, e);\n\n false\n\n }\n\n },\n\n )\n\n .map(|(routine_id, _)| routine_id.clone())\n\n .collect();\n\n\n\n triggered_routine_ids\n\n}\n\n\n", "file_path": "backend/src/homectl_core/rules.rs", "rank": 76, "score": 85795.15066901599 }, { "content": " LightMsg(LightMsg),\n\n}\n\n\n\npub async fn set_device_state(config: HueConfig, device: &Device) -> Result<(), Box<dyn Error>> {\n\n let body = match &device.state {\n\n DeviceState::OnOffDevice(state) => Ok(HueMsg::OnOffDeviceMsg(OnOffDeviceMsg {\n\n on: state.power,\n\n transitiontime: None,\n\n })),\n\n DeviceState::Light(state) => {\n\n // Hue repserents transition times as multiples of 100 ms\n\n let transitiontime = state\n\n .transition_ms\n\n .map(|transition_ms| ((transition_ms as f64) / 100.0) as u16);\n\n\n\n Ok(match state.color {\n\n Some(DeviceColor::Color(color)) => {\n\n let hsv = color;\n\n let color: Yxy = color.into();\n\n\n", "file_path": "backend/src/integrations/hue/lights.rs", "rank": 77, "score": 72755.3033514014 }, { "content": " // palette hue value is [0, 360[, Hue uses [0, 65536[\n\n // let hue = ((color.hue.to_positive_degrees() / 360.0) * 65536.0).floor() as u16;\n\n\n\n // palette sat value is [0, 1], Hue uses [0, 254]\n\n // let sat = (f32::min(color.saturation * 254.0, 1.0)).floor() as u16;\n\n\n\n // palette bri value is [0, 1], Hue uses [0, 254]\n\n // let bri = (f32::min(color.value, 1.0) * 254.0).floor() as u16;\n\n\n\n let x = color.x;\n\n let y = color.y;\n\n\n\n let xy = Some(vec![x, y]);\n\n // let bri = (color.luma * 254.0 * state.brightness.unwrap_or(1.0) as f32).floor()\n\n // as u32;\n\n let bri = (hsv.value * 254.0 * (state.brightness.unwrap_or(1.0) as f32)).floor()\n\n as u8;\n\n\n\n HueMsg::LightMsg(LightMsg {\n\n on: state.power,\n", "file_path": "backend/src/integrations/hue/lights.rs", "rank": 78, "score": 72753.83372581142 }, { "content": " xy,\n\n ct: None,\n\n bri,\n\n transitiontime,\n\n })\n\n }\n\n Some(DeviceColor::Cct(ref ct)) => {\n\n let bri = (254.0 * (state.brightness.unwrap_or(1.0) as f32)).floor() as u8;\n\n\n\n HueMsg::LightMsg(LightMsg {\n\n on: state.power,\n\n xy: None,\n\n ct: Some(f32::floor(1000000.0 / ct.get_cct()) as u16),\n\n bri,\n\n transitiontime,\n\n })\n\n }\n\n None => HueMsg::OnOffDeviceMsg(OnOffDeviceMsg {\n\n on: state.power,\n\n transitiontime,\n", "file_path": "backend/src/integrations/hue/lights.rs", "rank": 79, "score": 72751.34337885238 }, { "content": "\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct LightMsg {\n\n on: bool,\n\n bri: u8,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n xy: Option<Vec<f32>>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n ct: Option<u16>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n transitiontime: Option<u16>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(untagged)]\n\npub enum HueMsg {\n\n OnOffDeviceMsg(OnOffDeviceMsg),\n", "file_path": "backend/src/integrations/hue/lights.rs", "rank": 80, "score": 72747.53172411193 }, { "content": " let mut interval = time::interval(poll_rate);\n\n\n\n loop {\n\n interval.tick().await;\n\n\n\n let sender = sender.clone();\n\n let result = do_refresh_lights(config.clone(), integration_id.clone(), sender).await;\n\n\n\n match result {\n\n Ok(()) => {}\n\n Err(e) => println!(\"Error while polling lights: {:?}\", e),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct OnOffDeviceMsg {\n\n on: bool,\n\n transitiontime: Option<u16>,\n\n}\n", "file_path": "backend/src/integrations/hue/lights.rs", "rank": 81, "score": 72740.51757682586 }, { "content": "use homectl_types::{\n\n device::{Device, DeviceColor, DeviceState},\n\n event::{Message, TxEventChannel},\n\n integration::IntegrationId,\n\n};\n\n\n\nuse super::bridge::BridgeLights;\n\nuse super::{light_utils::bridge_light_to_device, HueConfig};\n\nuse anyhow::anyhow;\n\nuse palette::Yxy;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{error::Error, time::Duration};\n\nuse tokio::time;\n\n\n\npub async fn do_refresh_lights(\n\n config: HueConfig,\n\n integration_id: IntegrationId,\n\n sender: TxEventChannel,\n\n) -> Result<(), Box<dyn Error>> {\n\n let bridge_lights: BridgeLights = surf::get(&format!(\n", "file_path": "backend/src/integrations/hue/lights.rs", "rank": 82, "score": 72738.94588788047 }, { "content": " }),\n\n })\n\n }\n\n DeviceState::Sensor(_) => {\n\n // Do nothing\n\n return Ok(());\n\n }\n\n _ => Err(format!(\n\n \"Unsupported device type encountered in hue set_device_state: {:?}\",\n\n device.state\n\n )),\n\n }?;\n\n\n\n // println!(\"setting light \\\"{}\\\" state: {:?}\", device.name, serde_json::to_string(&body));\n\n\n\n surf::put(&format!(\n\n \"http://{}/api/{}/{}/state\",\n\n config.addr, config.username, device.id\n\n ))\n\n .body(surf::Body::from_json(&body).map_err(|err| anyhow!(err))?)\n\n .await\n\n .map_err(|err| anyhow!(err))?\n\n .body_string()\n\n .await\n\n .map_err(|err| anyhow!(err))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "backend/src/integrations/hue/lights.rs", "rank": 83, "score": 72734.2976135732 }, { "content": " \"http://{}/api/{}/lights\",\n\n config.addr, config.username\n\n ))\n\n .await\n\n .map_err(|err| anyhow!(err))?\n\n .body_json()\n\n .await\n\n .map_err(|err| anyhow!(err))?;\n\n\n\n for (light_id, bridge_light) in bridge_lights {\n\n let device = bridge_light_to_device(light_id, integration_id.clone(), bridge_light);\n\n\n\n sender.send(Message::IntegrationDeviceRefresh { device });\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn poll_lights(config: HueConfig, integration_id: IntegrationId, sender: TxEventChannel) {\n\n let poll_rate = Duration::from_millis(config.poll_rate_lights);\n", "file_path": "backend/src/integrations/hue/lights.rs", "rank": 84, "score": 72730.41062912058 }, { "content": " let hsv = Yxy::new(x, y, 1.0);\n\n let mut device_color: Hsv = hsv.into();\n\n device_color.value = 1.0;\n\n Some(DeviceColor::Color(device_color))\n\n })(),\n\n Some(ColorMode::Hs) => (move || {\n\n let hue = hue?;\n\n let sat = sat?;\n\n\n\n let device_color = Hsv::new(hue, sat, 1.0);\n\n Some(DeviceColor::Color(device_color))\n\n })(),\n\n None => None,\n\n };\n\n\n\n Light {\n\n power,\n\n brightness,\n\n color,\n\n transition_ms,\n\n }\n\n}\n\n\n", "file_path": "backend/src/integrations/hue/light_utils.rs", "rank": 85, "score": 70665.58225605894 }, { "content": "use super::bridge::{BridgeLight, ColorMode};\n\n\n\nuse homectl_types::{\n\n device::{CorrelatedColorTemperature, Device, DeviceColor, DeviceId, DeviceState, Light},\n\n integration::IntegrationId,\n\n};\n\nuse palette::{Hsv, Yxy};\n\n\n", "file_path": "backend/src/integrations/hue/light_utils.rs", "rank": 86, "score": 70656.16775948265 }, { "content": "#[derive(Deserialize)]\n\nstruct Robot {\n\n secret_key: String,\n\n serial: String,\n\n nucleo_url: String,\n\n}\n\n\n", "file_path": "backend/src/integrations/neato/api.rs", "rank": 87, "score": 65689.96459330368 }, { "content": "#[derive(Debug, Deserialize, Clone)]\n\nstruct PingMachine {\n\n id: String,\n\n ip: String,\n\n}\n\n\n\npub struct Ping {\n\n id: IntegrationId,\n\n config: PingConfig,\n\n sender: TxEventChannel,\n\n}\n\n\n\n#[async_trait]\n\nimpl Integration for Ping {\n\n fn new(id: &IntegrationId, config: &config::Value, sender: TxEventChannel) -> Result<Ping> {\n\n let config = config\n\n .clone()\n\n .try_into()\n\n .context(\"Failed to deserialize config of WakeOnLan integration\")?;\n\n Ok(Ping {\n\n id: id.clone(),\n", "file_path": "backend/src/integrations/ping/mod.rs", "rank": 88, "score": 64704.50460192344 }, { "content": "#[derive(Serialize)]\n\nstruct RobotMessage {\n\n #[serde(rename = \"reqId\")]\n\n req_id: String,\n\n cmd: String,\n\n params: Option<HouseCleaningParams>,\n\n}\n\n\n\nconst BASE_URL: &str = \"https://beehive.neatocloud.com\";\n\n\n", "file_path": "backend/src/integrations/neato/api.rs", "rank": 89, "score": 64700.32950219478 }, { "content": "#[derive(Deserialize)]\n\nstruct SessionsResponse {\n\n access_token: String,\n\n}\n\n\n", "file_path": "backend/src/integrations/neato/api.rs", "rank": 90, "score": 64700.32950219478 }, { "content": "#[derive(Serialize)]\n\nstruct AuthBody {\n\n email: String,\n\n password: String,\n\n}\n\n\n", "file_path": "backend/src/integrations/neato/api.rs", "rank": 91, "score": 64700.32950219478 }, { "content": "#[derive(Props, PartialEq)]\n\nstruct GroupRowProps {\n\n group_id: GroupId,\n\n name: String,\n\n}\n\n\n", "file_path": "frontend/src/group_list.rs", "rank": 92, "score": 64700.32950219478 }, { "content": "#[derive(Serialize)]\n\nstruct HouseCleaningParams {\n\n /// Should be set to 4 for persistent map\n\n category: u32,\n\n\n\n /// 1 is eco, 2 is turbo\n\n mode: u32,\n\n\n\n /// 1 is normal, 2 is extra care, 3 is deep. 3 requires mode = 2.\n\n #[serde(rename = \"navigationMode\")]\n\n navigation_mode: u32,\n\n}\n\n\n", "file_path": "backend/src/integrations/neato/api.rs", "rank": 93, "score": 63764.216722209116 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\nstruct WakeOnLanConfig {\n\n machines: Vec<WakeOnLanMachine>,\n\n}\n\n\n\npub struct WakeOnLan {\n\n id: IntegrationId,\n\n config: WakeOnLanConfig,\n\n sender: TxEventChannel,\n\n}\n\n\n\n#[async_trait]\n\nimpl Integration for WakeOnLan {\n\n fn new(\n\n id: &IntegrationId,\n\n config: &config::Value,\n\n sender: TxEventChannel,\n\n ) -> Result<WakeOnLan> {\n\n let config = config\n\n .clone()\n\n .try_into()\n", "file_path": "backend/src/integrations/wake_on_lan/mod.rs", "rank": 94, "score": 62881.57371380967 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\nstruct WakeOnLanMachine {\n\n id: String,\n\n mac: String,\n\n sleep_on_lan: Option<String>,\n\n}\n\n\n", "file_path": "backend/src/integrations/wake_on_lan/mod.rs", "rank": 95, "score": 62881.57371380967 }, { "content": "#[async_trait]\n\npub trait Integration {\n\n // rustc --explain E0038\n\n fn new(id: &IntegrationId, config: &config::Value, event_tx: TxEventChannel) -> Result<Self>\n\n where\n\n Self: Sized;\n\n\n\n async fn register(&mut self) -> Result<()>;\n\n async fn start(&mut self) -> Result<()>;\n\n async fn set_integration_device_state(&mut self, device: &Device) -> Result<()>;\n\n async fn run_integration_action(&mut self, payload: &IntegrationActionPayload) -> Result<()>;\n\n}\n", "file_path": "types/src/integration.rs", "rank": 96, "score": 61559.034093464696 }, { "content": "fn main() {\n\n dioxus::web::launch(app);\n\n}\n\n\n", "file_path": "frontend/src/main.rs", "rank": 97, "score": 57767.95730511054 }, { "content": "fn post_action(\n\n app_state: &Arc<AppState>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::path(\"trigger\")\n\n .and(warp::post())\n\n .and(warp::body::json())\n\n .and(with_state(app_state))\n\n .map(|action: Action, app_state: Arc<AppState>| {\n\n let sender = app_state.sender.clone();\n\n sender.send(Message::Action(action));\n\n\n\n Ok(warp::reply::json(&()))\n\n })\n\n}\n", "file_path": "backend/src/api/actions.rs", "rank": 98, "score": 55673.912431054065 }, { "content": "// TODO: Load integrations dynamically as plugins:\n\n// https://michael-f-bryan.github.io/rust-ffi-guide/dynamic_loading.html\n\nfn load_integration(\n\n module_name: &str,\n\n id: &IntegrationId,\n\n config: &config::Value,\n\n event_tx: TxEventChannel,\n\n) -> Result<Box<dyn Integration + Send>> {\n\n match module_name {\n\n \"boolean\" => Ok(Box::new(Boolean::new(id, config, event_tx)?)),\n\n \"circadian\" => Ok(Box::new(Circadian::new(id, config, event_tx)?)),\n\n \"random\" => Ok(Box::new(Random::new(id, config, event_tx)?)),\n\n \"timer\" => Ok(Box::new(Timer::new(id, config, event_tx)?)),\n\n \"dummy\" => Ok(Box::new(Dummy::new(id, config, event_tx)?)),\n\n \"lifx\" => Ok(Box::new(Lifx::new(id, config, event_tx)?)),\n\n \"hue\" => Ok(Box::new(Hue::new(id, config, event_tx)?)),\n\n \"neato\" => Ok(Box::new(Neato::new(id, config, event_tx)?)),\n\n \"ping\" => Ok(Box::new(Ping::new(id, config, event_tx)?)),\n\n \"tuya\" => Ok(Box::new(Tuya::new(id, config, event_tx)?)),\n\n \"wake_on_lan\" => Ok(Box::new(WakeOnLan::new(id, config, event_tx)?)),\n\n _ => Err(anyhow!(\"Unknown module name {}!\", module_name)),\n\n }\n\n}\n", "file_path": "backend/src/homectl_core/integrations.rs", "rank": 99, "score": 54714.18967868674 } ]
Rust
src/options.rs
manuelsteiner/rcproxy
bef7ced754a5d15328ca20e6ff46ca3018b729b1
use ipnet::IpNet; use log::LevelFilter; use std::collections::HashMap; use std::process::exit; use std::result::Result::Ok; use structopt::clap::arg_enum; use structopt::StructOpt; lazy_static! { pub static ref OPT: Opt = { let mut opt = Opt::from_args(); opt.headers = match opt.header.clone() { Some(headers) => { match create_header_map(&headers) { Ok(headers) => Some(headers), Err(_error) => { eprintln!("Error parsing additional response headers from arguments. Exiting."); exit(1); } } } None => None, }; if let Some(blacklist) = opt.blacklist_ip.clone() { let mut blacklist = blacklist; if blacklist.len() == 1 { blacklist = split_coma_separated_list(blacklist[0].as_str()); } if !validate_ip_blacklist(&blacklist) { eprintln!("Error parsing client IP blacklist. Exiting."); exit(1); } opt.blacklist_ip = Some(blacklist) } if let Some(filters) = opt.filter_url.clone() { let mut filters = filters; if filters.len() == 1 { filters = split_coma_separated_list(filters[0].as_str()); } match validate_and_extract_filters(&filters) { Ok((allow, deny)) => { opt.filters_allow = allow; opt.filters_deny = deny; }, Err(_) => { eprintln!("Error parsing URL filters. Exiting."); exit(1); } } } if opt.mime.len() == 1 { opt.mime = split_coma_separated_list(opt.mime[0].as_str()); } opt.mime_regex = create_mime_regex(&opt.mime); opt }; } #[derive(Debug, StructOpt)] #[structopt(name = "rcproxy")] pub struct Opt { #[structopt(short, long, default_value = "127.0.0.1:80", env = "RCPROXY_ADDRESS")] pub address: String, #[structopt( short = "n", long, default_value = "rcproxy", env = "RCPROXY_SERVER_NAME" )] pub server_name: String, #[structopt(short = "H", long)] pub header: Option<Vec<String>>, #[structopt(skip)] pub headers: Option<HashMap<String, String>>, #[structopt(short, long, env = "RCPROXY_BLACKLIST_IP")] pub blacklist_ip: Option<Vec<String>>, #[structopt(short, long, requires_all = &["filter-default"], env = "RCPROXY_FILTER_URL")] pub filter_url: Option<Vec<String>>, #[structopt(skip)] pub filters_allow: Vec<String>, #[structopt(skip)] pub filters_deny: Vec<String>, #[structopt(short = "d", long, possible_values = &DefaultFilterRule::variants(), case_insensitive = true, default_value = "Allow", env = "RCPROXY_FILTER_DEFAULT")] pub filter_default: DefaultFilterRule, #[structopt(short, long, env = "RCPROXY_KEY")] pub key: String, #[structopt(short = "s", long)] pub allow_https: bool, #[structopt(short = "c", long, default_value = "0", env = "RCPROXY_MAX_SIZE")] pub max_size: u32, #[structopt(short = "r", long, default_value = "5", env = "RCPROXY_MAX_REDIRECTS")] pub max_redirects: u8, #[structopt(short, long, default_value = "5", env = "RCPROXY_TIMEOUT")] pub timeout: u8, #[structopt(short, long, env = "RCPROXY_PROXY")] pub proxy: Option<String>, #[structopt(long, requires_all = &["proxy", "proxy-password"], env = "RCPROXY_PROXY_USERNAME")] pub proxy_username: Option<String>, #[structopt(long, requires_all = &["proxy", "proxy-username"], env = "RCPROXY_PROXY_PASSWORD")] pub proxy_password: Option<String>, #[structopt(short, long, default_value = "image/*", env = "RCPROXY_MIME")] pub mime: Vec<String>, #[structopt(skip)] pub mime_regex: String, #[structopt(short, long, possible_values = &LogLevel::variants(), case_insensitive = true, default_value = "Info", env = "RCPROXY_LOGLEVEL")] pub log_level: LogLevel, } arg_enum! { #[derive(Clone, Copy, Debug)] pub enum DefaultFilterRule { Allow, Deny, } } arg_enum! { #[derive(Clone, Copy, Debug)] pub enum LogLevel { Trace, Debug, Info, Warn, Error, } } impl From<LogLevel> for LevelFilter { fn from(other: LogLevel) -> LevelFilter { match other { LogLevel::Trace => LevelFilter::Trace, LogLevel::Debug => LevelFilter::Debug, LogLevel::Info => LevelFilter::Info, LogLevel::Warn => LevelFilter::Warn, LogLevel::Error => LevelFilter::Error, } } } fn split_coma_separated_list(string: &str) -> Vec<String> { string.split(',').map(str::trim).map(String::from).collect() } fn create_header_map(headers: &[String]) -> Result<HashMap<String, String>, ()> { let mut map = HashMap::new(); for header in headers { let parts: Vec<String> = header.split(": ").map(String::from).collect(); if parts.len() != 2 { eprintln!("Error parsing additional header value: \"{}\".", header); return Err(()); } let key = parts[0].trim().to_string(); let value = parts[1].trim().to_string(); map.insert(key, value); } Ok(map) } fn validate_ip_blacklist(blocks: &[String]) -> bool { for block in blocks.iter() { if block.parse::<IpNet>().is_err() { eprintln!("Error parsing IP range: \"{}\"", block); return false; } } true } fn validate_and_extract_filters(filters: &[String]) -> Result<(Vec<String>, Vec<String>), ()> { let mut filters_allow = Vec::<String>::new(); let mut filters_deny = Vec::<String>::new(); for filter in filters.iter() { if filter.len() < 3 { return Err(()); } match filter.get(..2).unwrap() { "a:" => filters_allow.push(filter.get(2..).unwrap().to_string()), "d:" => filters_deny.push(filter.get(2..).unwrap().to_string()), _ => return Err(()), } } Ok((filters_allow, filters_deny)) } fn create_mime_regex(mimes: &[String]) -> String { mimes.join("|").replace("*", ".+") } #[cfg(test)] mod tests { use super::*; #[test] fn test_split_coma_separated_list() { let strings = vec!["value1", "127.0.0.0/8, 10.0.0.0/24", "image/*, audio/*"]; let results: Vec<Vec<String>> = vec![ vec!["value1".to_string()], vec!["127.0.0.0/8".to_string(), "10.0.0.0/24".to_string()], vec!["image/*".to_string(), "audio/*".to_string()], ]; for (index, string) in strings.iter().enumerate() { assert_eq!(split_coma_separated_list(&string), results[index]); } } #[test] fn test_create_mime_regex() { let mimes: Vec<String> = vec!["image/*".to_string(), "audio/*".to_string()]; let mime_regex = "image/.+|audio/.+"; assert_eq!(create_mime_regex(&mimes), mime_regex); } #[test] fn test_create_header_map() { let headers: Vec<String> = vec!["key1: value1".to_string(), "key2: value2".to_string()]; assert!(create_header_map(&headers).is_ok()); let headers = create_header_map(&headers).unwrap(); assert_eq!(headers.len(), 2); assert!(headers.contains_key("key1")); assert!(headers.contains_key("key2")); assert_eq!(headers.get("key1"), Some(&"value1".to_string())); assert_eq!(headers.get("key2"), Some(&"value2".to_string())); } #[test] fn test_create_header_map_error() { let headers: Vec<String> = vec![ "key1: value1".to_string(), "key2: value2: error".to_string(), ]; assert!(create_header_map(&headers).is_err()); } #[test] fn test_validate_ip_blacklist() { let blacklist: Vec<String> = vec!["127.0.0.0/8".to_string(), "::1/128".to_string()]; assert_eq!(validate_ip_blacklist(&blacklist), true) } #[test] fn test_validate_ip_blacklist_error() { let blacklist: Vec<String> = vec!["127.0.0.0.1/8".to_string(), "::1/128".to_string()]; assert_eq!(validate_ip_blacklist(&blacklist), false) } #[test] fn test_validate_and_extract_filters_allow_and_deny() { let filters: Vec<String> = vec![ "a:.*allow.com/.*".to_string(), "d:.*deny.com/.*".to_string(), ]; let filters_allow = vec![".*allow.com/.*".to_string()]; let filters_deny = vec![".*deny.com/.*".to_string()]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_allow_only() { let filters: Vec<String> = vec![ "a:.*allow.com/.*".to_string(), "a:.*allow2.com/.*".to_string(), ]; let filters_allow = vec![".*allow.com/.*".to_string(), ".*allow2.com/.*".to_string()]; let filters_deny: Vec<String> = vec![]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_deny_only() { let filters: Vec<String> = vec![ "d:.*deny.com/.*".to_string(), "d:.*deny2.com/.*".to_string(), ]; let filters_allow: Vec<String> = vec![]; let filters_deny = vec![".*deny.com/.*".to_string(), ".*deny2.com/.*".to_string()]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_error() { let filters: Vec<String> = vec!["ad:.*deny.com/.*".to_string()]; assert_eq!(validate_and_extract_filters(&filters), Err(())); } }
use ipnet::IpNet; use log::LevelFilter; use std::collections::HashMap; use std::process::exit; use std::result::Result::Ok; use structopt::clap::arg_enum; use structopt::StructOpt; lazy_static! { pub static ref OPT: Opt = { let mut opt = Opt::from_args(); opt.headers = match opt.header.clone() { Some(headers) => { match create_header_map(&headers) { Ok(headers) => Some(headers), Err(_error) => { eprintln!("Error parsing additional response headers from arguments. Exiting."); exit(1); } } } None => None, }; if let Some(blacklist) = opt.blacklist_ip.clone() { let mut blacklist = blacklist; if blacklist.len() == 1 { blacklist = split_coma_separated_list(blacklist[0].as_str()); } if !validate_ip_blacklist(&blacklist) { eprintln!("Error parsing client IP blacklist. Exiting."); exit(1); } opt.blacklist_ip = Some(blacklist) } if let Some(filters) = opt.filter_url.clone() { let mut filters = filters; if filters.len() == 1 { filters = split_coma_separated_list(filters[0].as_str()); } match validate_and_extract_filters(&filters) { Ok((allow, deny)) => { opt.filters_allow = allow; opt.filters_deny = deny; }, Err(_) => { eprintln!("Error parsing URL filters. Exiting."); exit(1); } } } if opt.mime.len() == 1 { opt.mime = split_coma_separated_list(opt.mime[0].as_str()); } opt.mime_regex = create_mime_regex(&opt.mime); opt }; } #[derive(Debug, StructOpt)] #[structopt(name = "rcproxy")] pub struct Opt { #[structopt(short, long, default_value = "127.0.0.1:80", env = "RCPROXY_ADDRESS")] pub address: String, #[structopt( short = "n", long, default_value = "rcproxy", env = "RCPROXY_SERVER_NAME" )] pub server_name: String, #[structopt(short = "H", long)] pub header: Option<Vec<String>>, #[structopt(skip)] pub headers: Option<HashMap<String, String>>, #[structopt(short, long, env = "RCPROXY_BLACKLIST_IP")] pub blacklist_ip: Option<Vec<String>>, #[structopt(short, long, requires_all = &["filter-default"], env = "RCPROXY_FILTER_URL")] pub filter_url: Option<Vec<String>>, #[structopt(skip)] pub filters_allow: Vec<String>, #[structopt(skip)] pub filters_deny: Vec<String>, #[structopt(short = "d", long, possible_values = &DefaultFilterRule::variants(), case_insensitive = true, default_value = "Allow", env = "RCPROXY_FILTER_DEFAULT")] pub filter_default: DefaultFilterRule, #[structopt(short, long, env = "RCPROXY_KEY")] pub key: String, #[structopt(short = "s", long)] pub allow_https: bool, #[structopt(short = "c", long, default_value = "0", env = "RCPROXY_MAX_SIZE")] pub max_size: u32, #[structopt(short = "r", long, default_value = "5", env = "RCPROXY_MAX_REDIRECTS")] pub max_redirects: u8, #[structopt(short, long, default_value = "5", env = "RCPROXY_TIMEOUT")] pub timeout: u8, #[structopt(short, long, env = "RCPROXY_PROXY")] pub proxy: Option<String>, #[structopt(long, requires_all = &["proxy", "proxy-password"], env = "RCPROXY_PROXY_USERNAME")] pub proxy_username: Option<String>, #[structopt(long, requires_all = &["proxy", "proxy-username"], env = "RCPROXY_PROXY_PASSWORD")] pub proxy_password: Option<String>, #[structopt(short, long, default_value = "image/*", env = "RCPROXY_MIME")] pub mime: Vec<String>, #[structopt(skip)] pub mime_regex: String, #[structopt(short, long, possible_values = &LogLevel::variants(), case_insensitive = true, default_value = "Info", env = "RCPROXY_LOGLEVEL")] pub log_level: LogLevel, } arg_enum! { #[derive(Clone, Copy, Debug)] pub enum DefaultFilterRule { Allow, Deny, } } arg_enum! { #[derive(Clone, Copy, Debug)] pub enum LogLevel { Trace, Debug, Info, Warn, Error, } } impl From<LogLevel> for LevelFilter { fn from(other: LogLevel) -> LevelFilter { match other { LogLevel::Trace => LevelFilter::Trace, LogLevel::Debug => LevelFilter::Debug, LogLevel::Info => LevelFilter::Info, LogLevel::Warn => LevelFilter::Warn, LogLevel::Error => LevelFilter::Error, } } } fn split_coma_separated_list(string: &str) -> Vec<String> { string.split(',').map(str::trim).map(String::from).collect() } fn create_header_map(headers: &[String]) -> Result<HashMap<String, String>, ()> { let mut map = HashMap::new(); for header in headers { let parts: Vec<String> = header.split(": ").map(String::from).collect(); if parts.len() != 2 { eprintln!("Error parsing additional header value: \"{}\".", header); return Err(()); } let key = parts[0].trim().to_string(); let value = parts[1].trim().to_string(); map.insert(key, value); } Ok(map) } fn validate_ip_blacklist(blocks: &[String]) -> bool { for block in blocks.iter() { if block.parse::<IpNet>().is_err() { eprintln!("Error parsing IP range: \"{}\"", block); return false; } } true } fn validate_and_extract_filters(filters: &[String]) -> Result<(Vec<String>, Vec<String>), ()> { let mut filters_allow = Vec::<String>::new(); let mut filters_deny = Vec::<String>::new(); for filter in filters.iter() { if filter.len() < 3 { return Err(()); } match filter.get(..2).unwrap() { "a:" => filters_allow.push(filter.get(2..).unwrap().to_string()), "d:" => filters_deny.push(filter.get(2..).unwrap().to_string()), _ => return Err(()), } } Ok((filters_allow, filters_deny)) } fn create_mime_regex(mimes: &[String]) -> String { mimes.join("|").replace("*", ".+") } #[cfg(test)] mod tests { use super::*; #[test] fn test_split_coma_separated_list() { let strings = vec!["value1", "127.0.0.0/8, 10.0.0.0/24", "image/*, audio/*"]; let results: Vec<Vec<String>> = vec![ vec!["value1".to_string()], vec!["127.0.0.0/8".to_string(), "10.0.0.0/24".to_string()], vec!["image/*".to_string(), "audio/*".to_string()], ]; for (index, string) in strings.iter().enumerate() { assert_eq!(split_coma_separated_list(&string), results[index]); } } #[test] fn test_create_mime_regex() { let mimes: Vec<String> = vec!["image/*".to_string(), "audio/*".to_string()]; let mime_regex = "image/.+|audio/.+"; assert_eq!(create_mime_regex(&mimes), mime_regex); } #[test] fn test_create_header_map() { let headers: Vec<String> = vec!["key1: value1".to_string(), "key2: value2".to_string()]; assert!(create_header_map(&headers).is_ok()); let headers = create_header_map(&headers).unwrap(); assert_eq!(headers.len(), 2); assert!(headers.contains_key("key1")); assert!(headers.contains_key("key2")); assert_eq!(headers.get("key1"), Some(&"value1".to_string())); assert_eq!(headers.get("key2"), Some(&"value2".to_string())); } #[test] fn test_create_header_map_error() { let headers: Vec<String> = vec![ "key1: value1".to_string(), "key2: value2: error".to_string(), ]; assert!(create_header_map(&headers).is_err()); } #[test] fn test_validate_ip_blacklist() { let blacklist: Vec<String> = vec!["127.0.0.0/8".to_string(), "::1/128".to_string()]; assert_eq!(validate_ip_blacklist(&blacklist), true) } #[test] fn test_validate_ip_blacklist_error() { let blacklist: Vec<String> = vec!["127.0.0.0.1/8".to_string(), "::1/128".to_string()]; assert_eq!(validate_ip_blacklist(&blacklist), false) } #[test] fn test_validate_and_extract_filters_allow_and_deny() { let filters: Vec<String> = vec![ "a:.*allow.com/.*".to_string(), "d:.*deny.com/.*".to_string(), ]; let filters_allow = vec![".*allow.com/.*".to_string()]; let filters_deny = vec![".*deny.com/.*".to_string()]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_allow_only() { let filters: Vec<String> = vec![ "a:.*allow.com/.*".to_string(), "a:.*allow2.com/.*".to_string(), ]; let filters_allow = vec![".*allow.com/.*".to_string(), ".*allow2.com/.*".to_string()]; let filters_deny: Vec<String> = vec![]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); } #[test] fn test_validate_and_extract_filters_deny_only() { let filters: Vec<String> = vec![ "d:.*deny.com/.*".to_string(), "d:.*deny2.com/.*".to_string(), ]; let filters_allow: Vec<String> = vec![]; let filters_deny = vec![".*deny.co
#[test] fn test_validate_and_extract_filters_error() { let filters: Vec<String> = vec!["ad:.*deny.com/.*".to_string()]; assert_eq!(validate_and_extract_filters(&filters), Err(())); } }
m/.*".to_string(), ".*deny2.com/.*".to_string()]; assert_eq!( validate_and_extract_filters(&filters), Ok((filters_allow, filters_deny)) ); }
function_block-function_prefixed
[ { "content": "fn decode_and_validate_url(encoded_url: &str, allow_https: bool) -> Result<String, ()> {\n\n let url = match hex::decode(encoded_url) {\n\n Ok(url) => url,\n\n Err(_error) => {\n\n debug!(\"URL parameter is not HEX encoded.\");\n\n\n\n match base64::decode(encoded_url) {\n\n Ok(url) => url,\n\n Err(_error) => {\n\n debug!(\"Malformed request (URL parameter is neither HEX nor Base64 encoded).\");\n\n return Err(());\n\n }\n\n }\n\n }\n\n };\n\n\n\n let url = match str::from_utf8(&url) {\n\n Ok(url) => url,\n\n Err(_error) => {\n\n debug!(\"Malformed request (URL parameter does not represent a string).\");\n", "file_path": "src/server.rs", "rank": 0, "score": 146213.87290945166 }, { "content": "fn validate_client_address(address: &IpAddr, blacklist: &[IpNet]) -> bool {\n\n for block in blacklist.iter() {\n\n if block.contains(address) {\n\n debug!(\"Block {} contains client address {}\", block, address);\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 2, "score": 137436.0242367794 }, { "content": "fn validate_hmac(url: &str, digest: &str, key: &str) -> bool {\n\n let mut mac = Hmac::<Sha256>::new_from_slice(key.as_bytes()).unwrap();\n\n mac.update(url.as_bytes());\n\n\n\n let code = match hex::decode(digest) {\n\n Ok(code) => code,\n\n Err(_error) => {\n\n debug!(\"Malformed request (Digest parameter not Hex encoded).\");\n\n return false;\n\n }\n\n };\n\n\n\n return match mac.verify(&code) {\n\n Ok(_url) => true,\n\n Err(_error) => {\n\n debug!(\"Invalid HMAC (The digest parameter does not match the digest computed from the URL with the provided secret key).\");\n\n false\n\n }\n\n };\n\n}\n", "file_path": "src/server.rs", "rank": 3, "score": 135806.39916285893 }, { "content": "fn add_headers(headers: &mut HeaderMap) {\n\n for (key, value) in HEADERS.iter() {\n\n headers.insert(key, value.clone());\n\n }\n\n}\n\n\n\nasync fn get_content_handler(state: State) -> HandlerResult {\n\n let extractor = PathExtractor::borrow_from(&state);\n\n let headers = HeaderMap::borrow_from(&state);\n\n\n\n let mut response_bad_request = create_response(\n\n &state,\n\n StatusCode::BAD_REQUEST,\n\n mime::TEXT_PLAIN,\n\n \"400 Bad Request\",\n\n );\n\n\n\n let response_bad_request_headers = response_bad_request.headers_mut();\n\n add_headers(response_bad_request_headers);\n\n\n", "file_path": "src/server.rs", "rank": 7, "score": 98745.04286953346 }, { "content": "fn signals() -> impl Future<Output = Result<(), ()>> {\n\n let sigterm = async {\n\n signal(SignalKind::terminate()).unwrap().recv().await;\n\n println!(\"Handling SIGTERM.\");\n\n Ok::<(), ()>(())\n\n };\n\n\n\n let sigint = async {\n\n signal(SignalKind::interrupt()).unwrap().recv().await;\n\n println!(\"Handling SIGINT.\");\n\n Ok::<(), ()>(())\n\n };\n\n\n\n async {\n\n future::select(sigterm.boxed(), sigint.boxed()).await;\n\n Ok::<(), ()>(())\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 9, "score": 82357.58826842597 }, { "content": "pub fn setup() -> Router {\n\n debug!(\n\n \"Initialising HTTP client with {} seconds timeout and {} maximum redirects.\",\n\n OPT.timeout.to_string(),\n\n OPT.max_redirects.to_string()\n\n );\n\n lazy_static::initialize(&CLIENT);\n\n\n\n lazy_static::initialize(&HEADERS);\n\n\n\n lazy_static::initialize(&IP_BLACKLIST);\n\n\n\n debug!(\"Initialising HTTP server.\");\n\n router()\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 10, "score": 60474.14744643197 }, { "content": "fn validate_content_url(\n\n url: &str,\n\n filter_allow: &[String],\n\n filter_deny: &[String],\n\n filter_default: DefaultFilterRule,\n\n) -> Result<bool, ()> {\n\n for allow in filter_allow {\n\n let regex = match Regex::new(allow) {\n\n Ok(regex) => regex,\n\n Err(_error) => {\n\n debug!(\"Discovered invalid regex during parsing.\");\n\n return Err(());\n\n }\n\n };\n\n\n\n if regex.is_match(url) {\n\n debug!(\"Allow filter {} contains URL {}\", allow, url);\n\n return Ok(true);\n\n }\n\n }\n", "file_path": "src/server.rs", "rank": 11, "score": 47585.49141493375 }, { "content": "#[derive(Deserialize, StateData, StaticResponseExtender)]\n\nstruct PathExtractor {\n\n digest: String,\n\n url: String,\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 12, "score": 38561.63917094596 }, { "content": "fn print_configuration() {\n\n debug!(\"Configuration.\");\n\n debug!(\"---\");\n\n debug!(\"Address: {}\", OPT.address);\n\n debug!(\"Server name: {}\", OPT.server_name);\n\n if let Some(headers) = &OPT.headers {\n\n let mut header_string = String::new();\n\n let mut it = headers.iter().peekable();\n\n\n\n while let Some((key, value)) = it.next() {\n\n header_string.push_str(format!(\"\\\"{}: {}\\\"\", key, value).as_str());\n\n if it.peek().is_some() {\n\n header_string.push_str(\", \");\n\n }\n\n }\n\n debug!(\"Additional headers: {}\", header_string);\n\n }\n\n if let Some(blacklist) = &OPT.blacklist_ip {\n\n debug!(\"Blacklist IP ranges: {}\", &blacklist.join(\", \"));\n\n }\n", "file_path": "src/main.rs", "rank": 13, "score": 28809.348144791322 }, { "content": "fn router() -> Router {\n\n build_simple_router(|route| {\n\n route\n\n .get(\"/:digest/:url\")\n\n .with_path_extractor::<PathExtractor>()\n\n .to_async(get_content_handler);\n\n })\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 14, "score": 27996.717776092424 }, { "content": " allowed_mime_regex: &'a str,\n\n proxy: &Option<String>,\n\n proxy_username: &Option<String>,\n\n proxy_password: &Option<String>,\n\n ) -> Result<HttpClient<'a>, ()> {\n\n let mut client = reqwest::Client::builder()\n\n .connect_timeout(Duration::from_secs(timeout.into()))\n\n .redirect(Policy::limited(max_redirects.into()));\n\n\n\n if let Some(proxy) = proxy {\n\n debug!(\"Setting client HTTP proxy to {}.\", proxy);\n\n\n\n let mut proxy = match reqwest::Proxy::all(proxy) {\n\n Ok(proxy) => proxy,\n\n Err(_error) => {\n\n debug!(\"Error building HTTP client proxy.\");\n\n return Err(());\n\n }\n\n };\n\n\n", "file_path": "src/client.rs", "rank": 15, "score": 23110.415363556887 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn mock_client(mime_regex: &str) -> HttpClient {\n\n HttpClient::new(0, 4, 0, mime_regex, &None, &None, &None).unwrap()\n\n }\n\n\n\n #[test]\n\n fn test_validate_mime_concrete_type() {\n\n let mime_regex = \"image/png|audio/mpeg\";\n\n let content_mimes = vec![\"image/png\", \"audio/mpeg\"];\n\n\n\n let client = mock_client(mime_regex);\n\n\n\n for content_mime in content_mimes {\n\n assert_eq!(client.validate_mime(content_mime), true);\n", "file_path": "src/client.rs", "rank": 16, "score": 23109.064157817236 }, { "content": "use gotham::hyper::body::Bytes;\n\nuse log::debug;\n\nuse mime::Mime;\n\nuse regex::Regex;\n\nuse reqwest::header::{CONTENT_LENGTH, CONTENT_TYPE};\n\nuse reqwest::redirect::Policy;\n\nuse reqwest::{Client, Response, StatusCode};\n\nuse std::time::Duration;\n\n\n\npub struct HttpClient<'a> {\n\n client: Client,\n\n max_size: u32,\n\n allowed_mime_regex: &'a str,\n\n}\n\n\n\nimpl<'a> HttpClient<'a> {\n\n pub fn new(\n\n max_redirects: u8,\n\n timeout: u8,\n\n max_size: u32,\n", "file_path": "src/client.rs", "rank": 17, "score": 23107.918915158385 }, { "content": " let content_type_header = match content_response.headers().get(CONTENT_TYPE) {\n\n Some(header) => header,\n\n None => {\n\n debug!(\"Content type header is not set in response (URL: {}).\", url);\n\n return Err(());\n\n }\n\n };\n\n\n\n let content_type = match content_type_header.to_str() {\n\n Ok(content_type) => content_type,\n\n Err(_error) => {\n\n debug!(\"Error parsing content type for URL {}.\", url);\n\n return Err(());\n\n }\n\n };\n\n\n\n Ok(content_type)\n\n }\n\n\n\n fn extract_content_length(url: &str, content_response: &Response) -> Result<u32, ()> {\n", "file_path": "src/client.rs", "rank": 18, "score": 23105.542846349137 }, { "content": " let http_client = HttpClient {\n\n client,\n\n max_size,\n\n allowed_mime_regex,\n\n };\n\n\n\n Ok(http_client)\n\n }\n\n\n\n pub async fn get(&self, url: &str) -> Result<(Mime, Bytes), StatusCode> {\n\n let request = match self.client.get(url).build() {\n\n Ok(request) => request,\n\n Err(_error) => {\n\n debug!(\"Error building HTTP request.\");\n\n return Err(StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n };\n\n\n\n let content_response = match self.client.execute(request).await {\n\n Ok(response) => response,\n", "file_path": "src/client.rs", "rank": 19, "score": 23105.21734475106 }, { "content": " let content_length_header = match content_response.headers().get(CONTENT_LENGTH) {\n\n Some(header) => header,\n\n None => {\n\n debug!(\n\n \"Content length header is not set in response (URL: {}).\",\n\n url\n\n );\n\n return Err(());\n\n }\n\n };\n\n\n\n let content_length = match content_length_header.to_str() {\n\n Ok(content_length) => content_length,\n\n Err(_error) => {\n\n debug!(\"Error parsing content length for URL {}.\", url);\n\n return Err(());\n\n }\n\n };\n\n\n\n let content_length = match content_length.parse::<u32>() {\n", "file_path": "src/client.rs", "rank": 20, "score": 23103.998228529646 }, { "content": " Ok(content_length) => content_length,\n\n Err(_error) => {\n\n debug!(\"Error parsing content length as number for URL {}.\", url);\n\n return Err(());\n\n }\n\n };\n\n\n\n Ok(content_length)\n\n }\n\n\n\n fn validate_mime(&self, content_mime: &str) -> bool {\n\n let regex = match Regex::new(self.allowed_mime_regex) {\n\n Ok(regex) => regex,\n\n Err(_error) => {\n\n debug!(\"Discovered invalid regex during parsing.\");\n\n return false;\n\n }\n\n };\n\n\n\n regex.is_match(content_mime)\n", "file_path": "src/client.rs", "rank": 21, "score": 23103.347707562196 }, { "content": " self.max_size.to_string()\n\n );\n\n return Err(StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n }\n\n\n\n debug!(\"Getting content body of URL {}.\", url);\n\n\n\n let content = match content_response.bytes().await {\n\n Ok(content) => content,\n\n Err(_error) => {\n\n debug!(\"Error fetching content body of URL {}.\", url);\n\n return Err(StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n };\n\n\n\n Ok((mime, content))\n\n }\n\n\n\n fn extract_mime<'b>(url: &str, content_response: &'b Response) -> Result<&'b str, ()> {\n", "file_path": "src/client.rs", "rank": 22, "score": 23103.265842795576 }, { "content": " let content_type = match HttpClient::extract_mime(url, &content_response) {\n\n Ok(content_type) => content_type,\n\n Err(()) => {\n\n debug!(\"Error getting content MIME of URL {}.\", url);\n\n return Err(StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n };\n\n\n\n debug!(\"Checking if source MIME {} is allowed.\", content_type);\n\n\n\n if !self.validate_mime(content_type) {\n\n debug!(\"MIME {} is not allowed.\", content_type);\n\n return Err(StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n\n\n debug!(\"Parsing content MIME {} of URL {}.\", content_type, url);\n\n\n\n let mime = match content_type.parse() {\n\n Ok(mime) => mime,\n\n Err(_error) => {\n", "file_path": "src/client.rs", "rank": 23, "score": 23102.416297363372 }, { "content": " }\n\n }\n\n\n\n #[test]\n\n fn test_validate_mime_sub_type() {\n\n let mime_regex = \"image/.+|audio/.+\";\n\n let content_mimes = vec![\"image/png\", \"audio/mpeg\"];\n\n\n\n let client = mock_client(mime_regex);\n\n\n\n for content_mime in content_mimes {\n\n assert_eq!(client.validate_mime(content_mime), true);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_validate_mime_invalid_exact_type() {\n\n let mime_regex = \"image/png|audio/mpeg\";\n\n let content_mimes = vec![\"image/jpeg\", \"audio/aac\"];\n\n\n", "file_path": "src/client.rs", "rank": 24, "score": 23102.24178647056 }, { "content": " debug!(\"Error parsing content MIME of URL {}.\", url);\n\n return Err(StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n };\n\n\n\n if self.max_size > 0 {\n\n debug!(\"Extracting content length of URL {}.\", url);\n\n\n\n let content_length = match HttpClient::extract_content_length(url, &content_response) {\n\n Ok(content_length) => content_length,\n\n Err(()) => {\n\n debug!(\"Error getting content length of URL {}.\", url);\n\n return Err(StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n };\n\n\n\n if content_length > self.max_size {\n\n debug!(\n\n \"Content length of URL {} exceeds the maximum allowed value of {} bytes.\",\n\n url,\n", "file_path": "src/client.rs", "rank": 25, "score": 23101.22225360487 }, { "content": " if let (Some(username), Some(password)) = (proxy_username, proxy_password) {\n\n debug!(\n\n \"Setting client HTTP proxy username to {} and password to {}.\",\n\n username, password\n\n );\n\n\n\n proxy = proxy.basic_auth(username, password);\n\n }\n\n\n\n client = client.proxy(proxy);\n\n }\n\n\n\n let client = match client.build() {\n\n Ok(client) => client,\n\n Err(_error) => {\n\n debug!(\"Error building HTTP client.\");\n\n return Err(());\n\n }\n\n };\n\n\n", "file_path": "src/client.rs", "rank": 26, "score": 23099.347318550677 }, { "content": " Err(_error) => {\n\n debug!(\"Error fetching URL {}.\", url);\n\n return Err(StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n };\n\n\n\n match content_response.status() {\n\n StatusCode::OK => (),\n\n StatusCode::NOT_FOUND => {\n\n debug!(\"Requested URL {} returned NOT FOUND status code.\", url);\n\n return Err(StatusCode::NOT_FOUND);\n\n }\n\n _ => {\n\n debug!(\"Requested URL {} returned non OK status code.\", url);\n\n return Err(StatusCode::NOT_FOUND);\n\n }\n\n };\n\n\n\n debug!(\"Extracting content MIME of URL {}.\", url);\n\n\n", "file_path": "src/client.rs", "rank": 27, "score": 23097.909775538017 }, { "content": " let client = mock_client(mime_regex);\n\n\n\n for content_mime in content_mimes {\n\n assert_eq!(client.validate_mime(content_mime), false);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_validate_mime_invalid_sub_type() {\n\n let mime_regex = \"image/.+|audio/.+\";\n\n let content_mimes = vec![\"application/json\", \"text/plain\"];\n\n\n\n let client = mock_client(mime_regex);\n\n\n\n for content_mime in content_mimes {\n\n assert_eq!(client.validate_mime(content_mime), false);\n\n }\n\n }\n\n}\n", "file_path": "src/client.rs", "rank": 28, "score": 23097.542100278817 }, { "content": "# rcproxy\n\nrcproxy is a content proxy to securely serve HTTP assets via HTTPS. It is implemented in Rust and heavily inspired by [camo](https://github.com/atmos/camo) and [go-camo](https://github.com/cactus/go-camo).\n\n\n\n# Features\n\n- Supports Base64 and HEX encoded URLs\n\n- Enable or disable proxying of HTTPS assets\n\n- Allows setting MIME types that are allowed to be proxied\n\n- Allows source IP blacklists (from which no requests will be processed)\n\n- Supports for target regexp white- and blacklists (to control which content is being proxied)\n\n- Allows setting additional response headers\n\n- Source content can be restricted in file size (only assets up to the maximum file size are proxied)\n\n- Allows restrictions of maximum redirects while fetching sources\n\n- A maximum timeout for fetching content can be set\n\n- Usage of a proxy (additionally with username and password authentication) is supported for fetching content\n\n\n\n# TODOs, Nice to Haves\n\n- Evaluate different HTTP client and server frameworks in Rust. Maybe switch from Gotham.\n\n- Support streaming for larger content\n\n- Support SSL connections natively (without the need of a reverse proxy)\n\n- Implement caching\n\n- Environment variable support for additional headers (currently only supported as command line arguments)\n\n- Encapsulate server config as well as the client struct somehow, in order to get rid of static elements (and lazy static). Tricky because the client needs to be used in the HTTP handlers of the web framework and server configs need to be used in multiple places\n\n- Add if !cfg!(test) to exclude headers and ip blacklists evaluation from command line argument if running in test environment? Would allow referencing static map and vector in testing without having a separate variable in the test setup\n", "file_path": "readme.md", "rank": 29, "score": 14310.873430735837 }, { "content": "MIT License\n\n\n\nCopyright (c) 2021 Manuel Steiner\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n", "file_path": "license.md", "rank": 30, "score": 14294.139287197107 }, { "content": "use url::Url;\n\n\n\nlazy_static! {\n\n static ref CLIENT: HttpClient<'static> = {\n\n match HttpClient::new(\n\n OPT.max_redirects,\n\n OPT.timeout,\n\n OPT.max_size,\n\n &OPT.mime_regex,\n\n &OPT.proxy,\n\n &OPT.proxy_username,\n\n &OPT.proxy_password,\n\n ) {\n\n Ok(client) => client,\n\n Err(()) => {\n\n error!(\"Error initialising HTTP client. Exiting\");\n\n exit(-1);\n\n }\n\n }\n\n };\n", "file_path": "src/server.rs", "rank": 39, "score": 28.308226950879742 }, { "content": "\n\n static ref HEADERS: HeaderMap = {\n\n let mut map = HeaderMap::new();\n\n map.insert(SERVER, OPT.server_name.parse().unwrap());\n\n map.insert(X_CONTENT_TYPE_OPTIONS, \"nosniff\".parse().unwrap());\n\n map.insert(X_XSS_PROTECTION, \"1; mode=block\".parse().unwrap());\n\n map.insert(CONTENT_SECURITY_POLICY, \"default-src 'none'; img-src data:; style-src 'unsafe-inline'\".parse().unwrap());\n\n\n\n if let Some(headers) = &OPT.headers {\n\n for (key, value) in headers {\n\n map.insert(key.as_str(), value.parse().unwrap());\n\n }\n\n }\n\n\n\n map\n\n };\n\n\n\n static ref IP_BLACKLIST: Vec<IpNet> = {\n\n let mut list = vec![\n\n // \"127.0.0.0/8\".parse::<IpNet>().unwrap(), // loopback\n", "file_path": "src/server.rs", "rank": 41, "score": 26.373264660085002 }, { "content": " validate_content_url(url, &filters_allow, &filters_deny, default_filter),\n\n Err(())\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_validate_client_address() {\n\n let client_address: IpAddr = \"1.1.1.1\".parse().unwrap();\n\n let blacklist = create_ip_blacklist();\n\n\n\n assert_eq!(validate_client_address(&client_address, &blacklist), true);\n\n }\n\n\n\n #[test]\n\n fn test_validate_client_address_ip4_loopback() {\n\n let client_address: IpAddr = \"127.0.0.1\".parse().unwrap();\n\n let blacklist = create_ip_blacklist();\n\n\n\n assert_eq!(validate_client_address(&client_address, &blacklist), true);\n\n }\n", "file_path": "src/server.rs", "rank": 42, "score": 24.054080656298833 }, { "content": " if !OPT.filters_allow.is_empty() {\n\n debug!(\"Allow filter rules: {}\", &OPT.filters_allow.join(\", \"));\n\n }\n\n if !OPT.filters_deny.is_empty() {\n\n debug!(\"Deny filter rules: {}\", &OPT.filters_deny.join(\", \"));\n\n }\n\n debug!(\"Default filter rule: {}\", OPT.filter_default);\n\n debug!(\"Key: {}\", OPT.key);\n\n debug!(\"Allow HTTPS: {}\", OPT.allow_https);\n\n debug!(\"Mimes: {}\", OPT.mime.join(\", \"));\n\n debug!(\"Max size: {}\", OPT.max_size.to_string());\n\n debug!(\"Max redirects: {}\", OPT.max_redirects.to_string());\n\n debug!(\"Timeout: {}\", OPT.timeout.to_string());\n\n if let Some(proxy) = &OPT.proxy {\n\n debug!(\"Proxy: {}\", proxy);\n\n }\n\n if let (Some(username), Some(password)) = (&OPT.proxy_username, &OPT.proxy_password) {\n\n debug!(\"Proxy username: {}\", username);\n\n debug!(\"Proxy password: {}\", password);\n\n }\n\n debug!(\"Log level: {}\", OPT.log_level.to_string());\n\n debug!(\"---\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 45, "score": 21.38577901415453 }, { "content": " #[test]\n\n fn test_validate_content_url_allow() {\n\n let url = \"https://www.example.com/image.png\";\n\n let filters_allow = vec![\".*example.com/.*\".to_string()];\n\n let filters_deny = vec![\".*example2.com/.*\".to_string()];\n\n let default_filter = DefaultFilterRule::Deny;\n\n\n\n assert_eq!(\n\n validate_content_url(url, &filters_allow, &filters_deny, default_filter),\n\n Ok(true)\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_validate_content_url_deny() {\n\n let url = \"https://www.example2.com/image.png\";\n\n let filters_allow = vec![\".*example.com/.*\".to_string()];\n\n let filters_deny = vec![\".*example2.com/.*\".to_string()];\n\n let default_filter = DefaultFilterRule::Allow;\n\n\n", "file_path": "src/server.rs", "rank": 46, "score": 21.293598154077706 }, { "content": " debug!(\n\n \"Handling content proxy request from client {} (User Agent: {}).\",\n\n client_socket_address,\n\n headers[\"User-Agent\"].to_str().unwrap_or(\"?\")\n\n );\n\n\n\n debug!(\n\n \"Checking request URL validity (Encoded: {}).\",\n\n &extractor.url\n\n );\n\n\n\n let url = match decode_and_validate_url(&extractor.url, OPT.allow_https) {\n\n Ok(url) => url,\n\n Err(_error) => {\n\n debug!(\"The requested URL is invalid.\");\n\n return Ok((state, response_bad_request));\n\n }\n\n };\n\n\n\n debug!(\"Checking content URL validity ({})\", url);\n", "file_path": "src/server.rs", "rank": 47, "score": 21.028639688147315 }, { "content": " fn test_validate_content_url_deny_default() {\n\n let url = \"https://www.example3.com/image.png\";\n\n let filters_allow = vec![\".*example.com/.*\".to_string()];\n\n let filters_deny = vec![\".*example2.com/.*\".to_string()];\n\n let default_filter = DefaultFilterRule::Deny;\n\n\n\n assert_eq!(\n\n validate_content_url(url, &filters_allow, &filters_deny, default_filter),\n\n Ok(false)\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_validate_content_url_error() {\n\n let url = \"https://www.example3.com/image.png\";\n\n let filters_allow = vec![\"[\".to_string()];\n\n let filters_deny = vec![];\n\n let default_filter = DefaultFilterRule::Allow;\n\n\n\n assert_eq!(\n", "file_path": "src/server.rs", "rank": 48, "score": 20.79206250348776 }, { "content": " info!(\n\n \"Client {} (User Agent: {}) requesting URL {}.\",\n\n headers[\"Host\"].to_str().unwrap_or(\"?\"),\n\n headers[\"User-Agent\"].to_str().unwrap_or(\"?\"),\n\n url\n\n );\n\n\n\n debug!(\n\n \"Checking HMAC validity (URL: {}, HMAC: {}).\",\n\n url, &extractor.digest\n\n );\n\n\n\n if !validate_hmac(&url, &extractor.digest, &OPT.key) {\n\n debug!(\"The HMAC validation failed.\");\n\n return Ok((state, response_bad_request));\n\n }\n\n\n\n let (mime, content) = match CLIENT.get(&url).await {\n\n Ok((mime, content)) => (mime, content),\n\n Err(StatusCode::NOT_FOUND) => return Ok((state, response_not_found)),\n", "file_path": "src/server.rs", "rank": 50, "score": 20.309891075897507 }, { "content": " assert_eq!(\n\n validate_content_url(url, &filters_allow, &filters_deny, default_filter),\n\n Ok(false)\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_validate_content_url_allow_default() {\n\n let url = \"https://www.example3.com/image.png\";\n\n let filters_allow = vec![\".*example.com/.*\".to_string()];\n\n let filters_deny = vec![\".*example2.com/.*\".to_string()];\n\n let default_filter = DefaultFilterRule::Allow;\n\n\n\n assert_eq!(\n\n validate_content_url(url, &filters_allow, &filters_deny, default_filter),\n\n Ok(true)\n\n )\n\n }\n\n\n\n #[test]\n", "file_path": "src/server.rs", "rank": 51, "score": 19.88040672419669 }, { "content": "\n\n #[test]\n\n fn test_validate_client_address_ip6_loopback() {\n\n let client_address: IpAddr = \"::1\".parse().unwrap();\n\n let blacklist = create_ip_blacklist();\n\n\n\n assert_eq!(validate_client_address(&client_address, &blacklist), true);\n\n }\n\n\n\n #[test]\n\n fn test_validate_client_address_ip4_link_local() {\n\n let client_address: IpAddr = \"169.254.0.1\".parse().unwrap();\n\n let blacklist = create_ip_blacklist();\n\n\n\n assert_eq!(validate_client_address(&client_address, &blacklist), false);\n\n }\n\n\n\n #[test]\n\n fn test_validate_client_address_rfc1918() {\n\n let mut client_addresses: Vec<IpAddr> = Vec::new();\n", "file_path": "src/server.rs", "rank": 52, "score": 19.78476154197582 }, { "content": "\n\n for deny in filter_deny {\n\n let regex = match Regex::new(deny) {\n\n Ok(regex) => regex,\n\n Err(_error) => {\n\n debug!(\"Discovered invalid regex during parsing.\");\n\n return Err(());\n\n }\n\n };\n\n\n\n if regex.is_match(url) {\n\n debug!(\"Deny filter {} contains URL {}\", deny, url);\n\n return Ok(false);\n\n }\n\n }\n\n\n\n debug!(\n\n \"No filter matched. Applying default allow rule: {}\",\n\n filter_default\n\n );\n\n\n\n match filter_default {\n\n DefaultFilterRule::Allow => Ok(true),\n\n DefaultFilterRule::Deny => Ok(false),\n\n }\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 53, "score": 19.53926210857265 }, { "content": "\n\n if !OPT.filters_allow.is_empty() || !OPT.filters_deny.is_empty() {\n\n match validate_content_url(\n\n &url,\n\n &OPT.filters_allow,\n\n &OPT.filters_deny,\n\n OPT.filter_default,\n\n ) {\n\n Ok(true) => (),\n\n Ok(false) => {\n\n debug!(\"Content URL {} is not allowed.\", url);\n\n return Ok((state, response_not_found));\n\n }\n\n Err(_) => {\n\n debug!(\"Error checking content URL validity.\",);\n\n return Ok((state, response_internal_server_error));\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/server.rs", "rank": 54, "score": 19.300519106321495 }, { "content": " return Err(());\n\n }\n\n };\n\n\n\n let parsed_url = match Url::parse(&url) {\n\n Ok(url) => url,\n\n Err(_error) => {\n\n debug!(\"Malformed request (URL parameter is not a valid URL).\");\n\n return Err(());\n\n }\n\n };\n\n\n\n return match parsed_url.scheme.as_str() {\n\n \"http\" => Ok(url.to_string()),\n\n \"https\" if allow_https => Ok(url.to_string()),\n\n \"https\" if !allow_https => {\n\n debug!(\"Invalid request (Proxying HTTPS URLs is disabled).\");\n\n Err(())\n\n }\n\n _ => {\n\n debug!(\"Invalid request (URL parameter is neither HTTP nor HTTPS).\");\n\n Err(())\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 57, "score": 18.731331272086322 }, { "content": "use crate::options::DefaultFilterRule;\n\nuse crate::options::OPT;\n\nuse crate::HttpClient;\n\nuse gotham::handler::HandlerResult;\n\nuse gotham::helpers::http::response::create_response;\n\nuse gotham::hyper::header::{\n\n CONTENT_SECURITY_POLICY, SERVER, X_CONTENT_TYPE_OPTIONS, X_XSS_PROTECTION,\n\n};\n\nuse gotham::hyper::{HeaderMap, StatusCode};\n\nuse gotham::router::builder::*;\n\nuse gotham::router::Router;\n\nuse gotham::state::{client_addr, FromState, State};\n\nuse hmac::{Hmac, Mac, NewMac};\n\nuse ipnet::IpNet;\n\nuse log::{debug, error, info};\n\nuse regex::Regex;\n\nuse sha2::Sha256;\n\nuse std::net::IpAddr;\n\nuse std::process::exit;\n\nuse std::str;\n", "file_path": "src/server.rs", "rank": 58, "score": 18.330415461953073 }, { "content": " Err(StatusCode::INTERNAL_SERVER_ERROR) => {\n\n return Ok((state, response_internal_server_error))\n\n }\n\n Err(_error) => return Ok((state, response_internal_server_error)),\n\n };\n\n\n\n debug!(\n\n \"Serving requested URL {} to client {}.\",\n\n url,\n\n headers[\"Host\"].to_str().unwrap_or(\"?\")\n\n );\n\n\n\n let mut response = create_response(&state, StatusCode::OK, mime, content);\n\n\n\n let response_headers = response.headers_mut();\n\n add_headers(response_headers);\n\n\n\n Ok((state, response))\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 59, "score": 17.90393565304348 }, { "content": " let client_socket_address = match client_addr(&state) {\n\n Some(address) => address,\n\n None => {\n\n debug!(\"Error getting request client address.\");\n\n return Ok((state, response_bad_request));\n\n }\n\n };\n\n\n\n let client_ip_address = client_socket_address.ip();\n\n\n\n debug!(\n\n \"Checking request client address validity ({})\",\n\n client_ip_address\n\n );\n\n\n\n if !validate_client_address(&client_ip_address, &IP_BLACKLIST) {\n\n debug!(\"The client address validation failed.\");\n\n return Ok((state, response_bad_request));\n\n }\n\n\n", "file_path": "src/server.rs", "rank": 60, "score": 17.897607270824473 }, { "content": "#[macro_use]\n\nextern crate gotham_derive;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\nmod client;\n\nmod options;\n\nmod server;\n\n\n\nuse crate::client::HttpClient;\n\nuse futures::prelude::*;\n\nuse log::{debug, info, LevelFilter};\n\nuse options::OPT;\n\nuse tokio::signal::unix::{signal, SignalKind};\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n env_logger::builder()\n", "file_path": "src/main.rs", "rank": 61, "score": 17.45602428326993 }, { "content": " fn test_validate_client_address_deprecated_ip6_site_local() {\n\n let client_address: IpAddr = \"fec0::1\".parse().unwrap();\n\n let blacklist = create_ip_blacklist();\n\n\n\n assert_eq!(validate_client_address(&client_address, &blacklist), false);\n\n }\n\n\n\n #[test]\n\n fn test_validate_client_address_ip6_ula() {\n\n let client_address: IpAddr = \"fc00::1\".parse().unwrap();\n\n let blacklist = create_ip_blacklist();\n\n\n\n assert_eq!(validate_client_address(&client_address, &blacklist), false);\n\n }\n\n\n\n #[test]\n\n fn test_validate_client_address_ip4_mapped_ip6() {\n\n let client_address: IpAddr = \"::ffff:0:2\".parse().unwrap();\n\n let blacklist = create_ip_blacklist();\n\n\n", "file_path": "src/server.rs", "rank": 63, "score": 16.30714162236656 }, { "content": " assert_eq!(validate_client_address(&client_address, &blacklist), false);\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_base64_http() {\n\n let decoded_url = \"http://www.google.com\";\n\n let encoded_url = \"aHR0cDovL3d3dy5nb29nbGUuY29t\";\n\n let allow_https = true;\n\n\n\n assert_eq!(\n\n decode_and_validate_url(&encoded_url, allow_https),\n\n Ok(decoded_url.to_string())\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_base64_https() {\n\n let decoded_url = \"https://www.google.com\";\n\n let encoded_url = \"aHR0cHM6Ly93d3cuZ29vZ2xlLmNvbQ\";\n\n let allow_https = true;\n", "file_path": "src/server.rs", "rank": 64, "score": 15.835728928940346 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn create_ip_blacklist() -> Vec<IpNet> {\n\n vec![\n\n // \"127.0.0.0/8\".parse::<IpNet>().unwrap(), // loopback\n\n \"169.254.0.0/16\".parse::<IpNet>().unwrap(), // ipv4 link local\n\n \"10.0.0.0/8\".parse::<IpNet>().unwrap(), // rfc1918\n\n \"172.16.0.0/12\".parse::<IpNet>().unwrap(), // rfc1918\n\n \"192.168.0.0/16\".parse::<IpNet>().unwrap(), // rfc1918\n\n // \"::1/128\".parse::<IpNet>().unwrap(), // ipv6 loopback\n\n \"fe80::/10\".parse::<IpNet>().unwrap(), // ipv6 link local\n\n \"fec0::/10\".parse::<IpNet>().unwrap(), // deprecated ipv6 site-local\n\n \"fc00::/7\".parse::<IpNet>().unwrap(), // ipv6 ULA\n\n \"::ffff:0:0/96\".parse::<IpNet>().unwrap(), // IPv4-mapped IPv6 address\n\n ]\n\n }\n\n\n", "file_path": "src/server.rs", "rank": 65, "score": 14.535194588800607 }, { "content": " client_addresses.push(\"10.0.0.1\".parse().unwrap());\n\n client_addresses.push(\"172.16.0.1\".parse().unwrap());\n\n client_addresses.push(\"192.168.0.1\".parse().unwrap());\n\n\n\n let blacklist = create_ip_blacklist();\n\n\n\n for client_address in client_addresses.iter() {\n\n assert_eq!(validate_client_address(&client_address, &blacklist), false);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_validate_client_address_ip6_link_local() {\n\n let client_address: IpAddr = \"fe80::1\".parse().unwrap();\n\n let blacklist = create_ip_blacklist();\n\n\n\n assert_eq!(validate_client_address(&client_address, &blacklist), false);\n\n }\n\n\n\n #[test]\n", "file_path": "src/server.rs", "rank": 66, "score": 14.40324303243581 }, { "content": " let allow_https = false;\n\n\n\n assert_eq!(decode_and_validate_url(&encoded_url, allow_https), Err(()));\n\n }\n\n\n\n #[test]\n\n fn test_validate_hmac() {\n\n let url = \"https://www.google.com\";\n\n let digest = \"5fe8a55870ffb6903ac0987bf70245b8c5e23eca66cd794c61788266c8972c1a\";\n\n let key = \"secret\";\n\n\n\n assert_eq!(validate_hmac(&url, &digest, &key), true);\n\n }\n\n\n\n #[test]\n\n fn test_validate_hmac_invalid_hex() {\n\n let url = \"https://www.google.com\";\n\n // changed last character to invalid hex\n\n let digest = \"5fe8a55870ffb6903ac0987bf70245b8c5e23eca66cd794c61788266c8972c1k\";\n\n let key = \"secret\";\n", "file_path": "src/server.rs", "rank": 67, "score": 12.874541603012561 }, { "content": " .filter_level(LevelFilter::from(OPT.log_level))\n\n .init();\n\n\n\n print_configuration();\n\n\n\n debug!(\"Setting up signal handlers.\");\n\n\n\n let signals = signals();\n\n\n\n info!(\"Starting content proxy server.\");\n\n info!(\"Listening for requests on {}\", OPT.address);\n\n\n\n let server = gotham::init_server(&OPT.address, server::setup());\n\n\n\n future::select(server.boxed(), signals.boxed()).await;\n\n println!(\"Shutting down gracefully.\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 68, "score": 12.286569484896319 }, { "content": " \"169.254.0.0/16\".parse::<IpNet>().unwrap(), // ipv4 link local\n\n \"10.0.0.0/8\".parse::<IpNet>().unwrap(), // rfc1918\n\n \"172.16.0.0/12\".parse::<IpNet>().unwrap(), // rfc1918\n\n \"192.168.0.0/16\".parse::<IpNet>().unwrap(), // rfc1918\n\n // list.push(\"::1/128\".parse::<IpNet>().unwrap(), // ipv6 loopback\n\n \"fe80::/10\".parse::<IpNet>().unwrap(), // ipv6 link local\n\n \"fec0::/10\".parse::<IpNet>().unwrap(), // deprecated ipv6 site-local\n\n \"fc00::/7\".parse::<IpNet>().unwrap(), // ipv6 ULA\n\n \"::ffff:0:0/96\".parse::<IpNet>().unwrap(), // IPv4-mapped IPv6 address\n\n ];\n\n\n\n if let Some(blacklist) = &OPT.blacklist_ip {\n\n for block in blacklist {\n\n list.push(block.parse::<IpNet>().unwrap());\n\n }\n\n }\n\n\n\n list\n\n };\n\n}\n\n\n\n#[derive(Deserialize, StateData, StaticResponseExtender)]\n", "file_path": "src/server.rs", "rank": 69, "score": 12.201867127822267 }, { "content": " let allow_https = true;\n\n\n\n assert_eq!(decode_and_validate_url(&encoded_url, allow_https), Err(()));\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_base64_invalid_url_https_disabled() {\n\n // \"https://www.google.com\";\n\n let encoded_url = \"aHR0cHM6Ly93d3cuZ29vZ2xlLmNvbQ\";\n\n let allow_https = false;\n\n\n\n assert_eq!(decode_and_validate_url(&encoded_url, allow_https), Err(()));\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_hex_http() {\n\n let decoded_url = \"http://www.google.com\";\n\n let encoded_url = \"687474703A2F2F7777772E676F6F676C652E636F6D\";\n\n let allow_https = true;\n\n\n", "file_path": "src/server.rs", "rank": 70, "score": 11.443900283023517 }, { "content": "\n\n assert_eq!(\n\n decode_and_validate_url(&encoded_url, allow_https),\n\n Ok(decoded_url.to_string())\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_base64_invalid_base64() {\n\n // \"https://www.google.com\";\n\n let encoded_url = \"aHR0cHM6Ly93d3cuZ29vZ2xlLmNvbQi\"; // last character added\n\n let allow_https = true;\n\n\n\n assert_eq!(decode_and_validate_url(&encoded_url, allow_https), Err(()));\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_base64_invalid_url_format() {\n\n // \"ftp://www.google.com\";\n\n let encoded_url = \"ZnRwOi8vd3d3Lmdvb2dsZS5jb20\";\n", "file_path": "src/server.rs", "rank": 71, "score": 11.324505987306244 }, { "content": " assert_eq!(\n\n decode_and_validate_url(&encoded_url, allow_https),\n\n Ok(decoded_url.to_string())\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_hex_https() {\n\n let decoded_url = \"https://www.google.com\";\n\n let encoded_url = \"68747470733A2F2F7777772E676F6F676C652E636F6D\";\n\n let allow_https = true;\n\n\n\n assert_eq!(\n\n decode_and_validate_url(&encoded_url, allow_https),\n\n Ok(decoded_url.to_string())\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_hex_invalid_hex() {\n", "file_path": "src/server.rs", "rank": 72, "score": 11.235750268631486 }, { "content": " let mut response_not_found = create_response(\n\n &state,\n\n StatusCode::NOT_FOUND,\n\n mime::TEXT_PLAIN,\n\n \"404 Not Found\",\n\n );\n\n\n\n let response_not_found_headers = response_not_found.headers_mut();\n\n add_headers(response_not_found_headers);\n\n\n\n let mut response_internal_server_error = create_response(\n\n &state,\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n mime::TEXT_PLAIN,\n\n \"500 Internal Server Error\",\n\n );\n\n\n\n let response_internal_server_error_headers = response_internal_server_error.headers_mut();\n\n add_headers(response_internal_server_error_headers);\n\n\n", "file_path": "src/server.rs", "rank": 73, "score": 10.783608588770122 }, { "content": " // \"https://www.google.com\";\n\n let encoded_url = \"68747470733A2F2F7777772E676F6F676C652E636F6DG\"; // last character added\n\n let allow_https = true;\n\n\n\n assert_eq!(decode_and_validate_url(&encoded_url, allow_https), Err(()));\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_hex_invalid_url_format() {\n\n // \"ftp://www.google.com\";\n\n let encoded_url = \"6674703A2F2F7777772E676F6F676C652E636F6D\";\n\n let allow_https = true;\n\n\n\n assert_eq!(decode_and_validate_url(&encoded_url, allow_https), Err(()));\n\n }\n\n\n\n #[test]\n\n fn test_decode_and_validate_url_hex_invalid_url_https_disabled() {\n\n // \"https://www.google.com\";\n\n let encoded_url = \"68747470733A2F2F7777772E676F6F676C652E636F6D\";\n", "file_path": "src/server.rs", "rank": 74, "score": 10.585318930246615 }, { "content": "\n\n assert_eq!(validate_hmac(&url, &digest, &key), false);\n\n }\n\n\n\n #[test]\n\n fn test_validate_hmac_invalid_code() {\n\n let url = \"https://www.google.com\";\n\n // changed last character to a different value\n\n let digest = \"5fe8a55870ffb6903ac0987bf70245b8c5e23eca66cd794c61788266c8972c1b\";\n\n let key = \"secret\";\n\n\n\n assert_eq!(validate_hmac(&url, &digest, &key), false);\n\n }\n\n}\n", "file_path": "src/server.rs", "rank": 75, "score": 9.684560475986393 } ]
Rust
packages/vm/src/limited.rs
venkattejaRaavi/cosmwasm
73c72d4eccd5028e18fd60e77f44d975647a13bd
use std::collections::{BTreeSet, HashSet}; use std::iter::FromIterator; pub trait LimitedDisplay { fn to_string_limited(&self, max_length: usize) -> String; } impl<E: Ord + AsRef<str>> LimitedDisplay for BTreeSet<E> { fn to_string_limited(&self, max_length: usize) -> String { collection_to_string_limited(self.iter(), max_length, "{", "}") } } impl<E: Ord + AsRef<str>> LimitedDisplay for HashSet<E> { fn to_string_limited(&self, max_length: usize) -> String { let sorted = BTreeSet::from_iter(self); sorted.to_string_limited(max_length) } } impl<E: AsRef<str>> LimitedDisplay for Vec<E> { fn to_string_limited(&self, max_length: usize) -> String { collection_to_string_limited(self.iter(), max_length, "[", "]") } } fn collection_to_string_limited<E: AsRef<str>, I: ExactSizeIterator<Item = E>>( iter: I, max_length: usize, opening: &str, closing: &str, ) -> String { let elements_count = iter.len(); let mut out = String::with_capacity(max_length * 130 / 100); let mut first = true; out.push_str(opening); let mut lengths_stack = Vec::<usize>::new(); for element in iter { lengths_stack.push(out.len()); if first { out.push('"'); first = false; } else { out.push_str(", \""); } out.push_str(element.as_ref()); out.push('"'); if out.len() > max_length { break; }; } if out.len() + closing.len() <= max_length { out.push_str(closing); out } else { loop { let previous_length = lengths_stack .pop() .expect("Cannot remove hide enough elements to fit in length limit."); let skipped = elements_count - lengths_stack.len(); let remaining = elements_count - skipped; let skipped_text = if remaining == 0 { format!("... {} elements", skipped) } else { format!(", ... {} more", skipped) }; if previous_length + skipped_text.len() + closing.len() <= max_length { out.truncate(previous_length); out.push_str(&skipped_text); out.push_str(closing); return out; } } } } #[cfg(test)] mod test { use super::*; #[test] fn works_for_btreeset() { let set = BTreeSet::<String>::new(); assert_eq!(set.to_string_limited(100), "{}"); assert_eq!(set.to_string_limited(20), "{}"); assert_eq!(set.to_string_limited(2), "{}"); let fruits = BTreeSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!( fruits.to_string_limited(100), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(33), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(32), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!( fruits.to_string_limited(31), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!(fruits.to_string_limited(30), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(21), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(20), "{... 3 elements}"); assert_eq!(fruits.to_string_limited(16), "{... 3 elements}"); } #[test] fn works_for_hashset() { let set = HashSet::<String>::new(); assert_eq!(set.to_string_limited(100), "{}"); assert_eq!(set.to_string_limited(20), "{}"); assert_eq!(set.to_string_limited(2), "{}"); let fruits = HashSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!( fruits.to_string_limited(100), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(33), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(32), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!( fruits.to_string_limited(31), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!(fruits.to_string_limited(30), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(21), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(20), "{... 3 elements}"); assert_eq!(fruits.to_string_limited(16), "{... 3 elements}"); } #[test] #[should_panic(expected = "Cannot remove hide enough elements to fit in length limit.")] fn panics_if_limit_is_too_small_empty() { let set = HashSet::<String>::new(); assert_eq!(set.to_string_limited(1), "{}"); } #[test] #[should_panic(expected = "Cannot remove hide enough elements to fit in length limit.")] fn panics_if_limit_is_too_small_nonempty() { let fruits = HashSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!(fruits.to_string_limited(15), "{... 3 elements}"); } #[test] fn works_for_vectors() { let list = Vec::<String>::new(); assert_eq!(list.to_string_limited(100), "[]"); assert_eq!(list.to_string_limited(20), "[]"); assert_eq!(list.to_string_limited(2), "[]"); let fruits = vec![ "banana".to_string(), "apple".to_string(), "watermelon".to_string(), ]; assert_eq!( fruits.to_string_limited(100), "[\"banana\", \"apple\", \"watermelon\"]" ); assert_eq!( fruits.to_string_limited(33), "[\"banana\", \"apple\", \"watermelon\"]" ); assert_eq!( fruits.to_string_limited(32), "[\"banana\", \"apple\", ... 1 more]" ); assert_eq!( fruits.to_string_limited(31), "[\"banana\", \"apple\", ... 1 more]" ); assert_eq!(fruits.to_string_limited(30), "[\"banana\", ... 2 more]"); assert_eq!(fruits.to_string_limited(22), "[\"banana\", ... 2 more]"); assert_eq!(fruits.to_string_limited(21), "[... 3 elements]"); assert_eq!(fruits.to_string_limited(16), "[... 3 elements]"); } }
use std::collections::{BTreeSet, HashSet}; use std::iter::FromIterator; pub trait LimitedDisplay { fn to_string_limited(&self, max_length: usize) -> String; } impl<E: Ord + AsRef<str>> LimitedDisplay for BTreeSet<E> { fn to_string_limited(&self, max_length: usize) -> String { collection_to_string_limited(self.iter(), max_length, "{", "}") } } impl<E: Ord + AsRef<str>> LimitedDisplay for HashSet<E> { fn to_string_limited(&self, max_length: usize) -> String { let sorted = BTreeSet::from_iter(self); sorted.to_string_limited(max_length) } } impl<E: AsRef<str>> LimitedDisplay for Vec<E> { fn to_string_limited(&self, max_length: usize) -> String { collection_to_string_limited(self.iter(), max_length, "[", "]") } } fn collection_to_string_limited<E: AsRef<str>, I: ExactSizeIterator<Item = E>>( iter: I, max_length: usize, opening: &str, closing: &str, ) -> String { let elements_count = iter.len(); let mut out = String::with_capacity(max_length * 130 / 100); let mut first = true; out.push_str(opening); let mut lengths_stack = Vec::<usize>::new(); for element in iter { lengths_stack.push(out.len()); if first { out.push('"'); first = false; } else { out.push_str(", \""); } out.push_str(element.as_ref()); out.push('"'); if out.len() > max_length { break; }; } if out.len() + closing.len() <= max_length { out.push_str(closing); out } else { loop { let previous_length = lengths_stack .pop() .expect("Cannot remove hide enough elements to fit in length limit."); let skipped = elements_count - lengths_stack.len(); let remaining = elements_count - skipped; let skipped_text = if remaining == 0 { format!("... {} elements", skipped) } else { format!(", ... {} more", skipped) }; if previous_length + skipped_text.len() + closing.len() <= max_length { out.truncate(previous_length); out.push_str(&skipped_text); out.push_str(closing); return out; } } } } #[cfg(test)] mod test { use super::*; #[test] fn works_for_btreeset() { let set = BTreeSet::<String>::new(); assert_eq!(set.to_string_limited(100), "{}"); assert_eq!(set.to_string_limited(20), "{}"); assert_eq!(set.to_string_limited(2), "{}"); let fruits = BTreeSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .
#[test] fn works_for_hashset() { let set = HashSet::<String>::new(); assert_eq!(set.to_string_limited(100), "{}"); assert_eq!(set.to_string_limited(20), "{}"); assert_eq!(set.to_string_limited(2), "{}"); let fruits = HashSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!( fruits.to_string_limited(100), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(33), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(32), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!( fruits.to_string_limited(31), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!(fruits.to_string_limited(30), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(21), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(20), "{... 3 elements}"); assert_eq!(fruits.to_string_limited(16), "{... 3 elements}"); } #[test] #[should_panic(expected = "Cannot remove hide enough elements to fit in length limit.")] fn panics_if_limit_is_too_small_empty() { let set = HashSet::<String>::new(); assert_eq!(set.to_string_limited(1), "{}"); } #[test] #[should_panic(expected = "Cannot remove hide enough elements to fit in length limit.")] fn panics_if_limit_is_too_small_nonempty() { let fruits = HashSet::from_iter( [ "watermelon".to_string(), "apple".to_string(), "banana".to_string(), ] .iter() .cloned(), ); assert_eq!(fruits.to_string_limited(15), "{... 3 elements}"); } #[test] fn works_for_vectors() { let list = Vec::<String>::new(); assert_eq!(list.to_string_limited(100), "[]"); assert_eq!(list.to_string_limited(20), "[]"); assert_eq!(list.to_string_limited(2), "[]"); let fruits = vec![ "banana".to_string(), "apple".to_string(), "watermelon".to_string(), ]; assert_eq!( fruits.to_string_limited(100), "[\"banana\", \"apple\", \"watermelon\"]" ); assert_eq!( fruits.to_string_limited(33), "[\"banana\", \"apple\", \"watermelon\"]" ); assert_eq!( fruits.to_string_limited(32), "[\"banana\", \"apple\", ... 1 more]" ); assert_eq!( fruits.to_string_limited(31), "[\"banana\", \"apple\", ... 1 more]" ); assert_eq!(fruits.to_string_limited(30), "[\"banana\", ... 2 more]"); assert_eq!(fruits.to_string_limited(22), "[\"banana\", ... 2 more]"); assert_eq!(fruits.to_string_limited(21), "[... 3 elements]"); assert_eq!(fruits.to_string_limited(16), "[... 3 elements]"); } }
cloned(), ); assert_eq!( fruits.to_string_limited(100), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(33), "{\"apple\", \"banana\", \"watermelon\"}" ); assert_eq!( fruits.to_string_limited(32), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!( fruits.to_string_limited(31), "{\"apple\", \"banana\", ... 1 more}" ); assert_eq!(fruits.to_string_limited(30), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(21), "{\"apple\", ... 2 more}"); assert_eq!(fruits.to_string_limited(20), "{... 3 elements}"); assert_eq!(fruits.to_string_limited(16), "{... 3 elements}"); }
function_block-function_prefix_line
[ { "content": "/// Takes a comma-separated string, splits it by commas, removes empty elements and returns a set of features.\n\n/// This can be used e.g. to initialize the cache.\n\npub fn features_from_csv(csv: &str) -> HashSet<String> {\n\n HashSet::from_iter(\n\n csv.split(',')\n\n .map(|x| x.trim().to_string())\n\n .filter(|f| !f.is_empty()),\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/features.rs", "rank": 1, "score": 322409.22090064036 }, { "content": "pub fn to_snake_case(name: &str) -> String {\n\n let mut out = String::new();\n\n for (index, ch) in name.char_indices() {\n\n if index != 0 && ch.is_uppercase() {\n\n out.push('_');\n\n }\n\n out.push(ch.to_ascii_lowercase());\n\n }\n\n out\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn to_snake_case_leaves_snake_case_untouched() {\n\n assert_eq!(to_snake_case(\"\"), \"\");\n\n assert_eq!(to_snake_case(\"a\"), \"a\");\n\n assert_eq!(to_snake_case(\"abc\"), \"abc\");\n", "file_path": "packages/schema/src/casing.rs", "rank": 2, "score": 267444.07721549156 }, { "content": "/// Implementation for check_wasm, based on static analysis of the bytecode.\n\n/// This is used for code upload, to perform check before compiling the Wasm.\n\npub fn required_features_from_module(module: &Module) -> HashSet<String> {\n\n match module.export_section() {\n\n None => HashSet::new(),\n\n Some(export_section) => {\n\n HashSet::from_iter(export_section.entries().iter().filter_map(|entry| {\n\n if let Internal::Function(_) = entry.internal() {\n\n let name = entry.field();\n\n if name.starts_with(REQUIRES_PREFIX) && name.len() > REQUIRES_PREFIX.len() {\n\n let (_, required_feature) = name.split_at(REQUIRES_PREFIX.len());\n\n return Some(required_feature.to_string());\n\n }\n\n }\n\n None\n\n }))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "packages/vm/src/features.rs", "rank": 3, "score": 252207.20592999458 }, { "content": "pub fn required_features_from_wasmer_instance(wasmer_instance: &WasmerInstance) -> HashSet<String> {\n\n HashSet::from_iter(wasmer_instance.exports().filter_map(|(mut name, export)| {\n\n if let Export::Function { .. } = export {\n\n if name.starts_with(REQUIRES_PREFIX) && name.len() > REQUIRES_PREFIX.len() {\n\n let required_feature = name.split_off(REQUIRES_PREFIX.len());\n\n return Some(required_feature);\n\n }\n\n }\n\n None\n\n }))\n\n}\n\n\n", "file_path": "packages/vm/src/features.rs", "rank": 4, "score": 243427.62575104617 }, { "content": "/// Creates a memory region of capacity `size` and length 0. Returns a pointer to the Region.\n\n/// This is the same as the `allocate` export, but designed to be called internally.\n\npub fn alloc(size: usize) -> *mut Region {\n\n let data: Vec<u8> = Vec::with_capacity(size);\n\n let data_ptr = data.as_ptr() as usize;\n\n\n\n let region = build_region_from_components(\n\n u32::try_from(data_ptr).expect(\"pointer doesn't fit in u32\"),\n\n u32::try_from(data.capacity()).expect(\"capacity doesn't fit in u32\"),\n\n 0,\n\n );\n\n mem::forget(data);\n\n Box::into_raw(region)\n\n}\n\n\n", "file_path": "packages/std/src/memory.rs", "rank": 6, "score": 231709.69896173058 }, { "content": "/// Checks if the data is valid wasm and compatibility with the CosmWasm API (imports and exports)\n\npub fn check_wasm(wasm_code: &[u8], supported_features: &HashSet<String>) -> VmResult<()> {\n\n let module = deserialize(wasm_code)?;\n\n check_wasm_memories(&module)?;\n\n check_wasm_exports(&module)?;\n\n check_wasm_imports(&module, SUPPORTED_IMPORTS)?;\n\n check_wasm_features(&module, supported_features)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/compatibility.rs", "rank": 7, "score": 231548.47604069224 }, { "content": "// use this if you want to override the auto-detected name of the object.\n\n// very useful when creating an alias for a type-alias.\n\npub fn export_schema_with_title(schema: &mut RootSchema, out_dir: &PathBuf, title: &str) {\n\n // set the title explicitly on the schemas metadata\n\n let metadata = &mut schema.schema.metadata;\n\n if let Some(data) = metadata {\n\n data.title = Some(title.to_string());\n\n }\n\n write_schema(schema, out_dir, &title);\n\n}\n\n\n", "file_path": "packages/schema/src/export.rs", "rank": 8, "score": 227044.75805437163 }, { "content": "pub fn setup_context<S: Storage, Q: Querier>(gas_limit: u64) -> (*mut c_void, fn(*mut c_void)) {\n\n (\n\n create_unmanaged_context_data::<S, Q>(gas_limit),\n\n destroy_unmanaged_context_data::<S, Q>,\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 9, "score": 226440.66604162107 }, { "content": "/// Set the amount of gas units that can be used in the context.\n\npub fn set_gas_left(_ctx: &mut Ctx, _amount: u64) {}\n\n\n", "file_path": "packages/vm/src/backends/cranelift.rs", "rank": 10, "score": 215173.416502956 }, { "content": "/// Set the amount of gas units that can be used in the context.\n\npub fn set_gas_left(ctx: &mut Ctx, amount: u64) {\n\n if amount > MAX_GAS_LIMIT {\n\n panic!(\n\n \"Attempted to set gas limit larger than max gas limit (got: {}; maximum: {}).\",\n\n amount, MAX_GAS_LIMIT\n\n );\n\n } else {\n\n let used = MAX_GAS_LIMIT - amount;\n\n metering::set_points_used_ctx(ctx, used);\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/backends/singlepass.rs", "rank": 11, "score": 215173.416502956 }, { "content": "pub fn mock_instance_with_gas_limit(\n\n wasm: &[u8],\n\n gas_limit: u64,\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n gas_limit,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct MockInstanceOptions<'a> {\n\n // dependencies\n\n pub balances: &'a [(&'a HumanAddr, &'a [Coin])],\n\n /// This option is merged into balances and might override an existing value\n\n pub contract_balance: Option<&'a [Coin]>,\n\n /// When set, all calls to the API fail with BackendError::Unknown containing this message\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 12, "score": 212750.55983439353 }, { "content": "pub fn compiler_for_backend(backend: &str) -> Option<Box<dyn Compiler>> {\n\n match backend {\n\n #[cfg(any(feature = \"cranelift\", feature = \"default-cranelift\"))]\n\n \"cranelift\" => Some(cranelift::compiler()),\n\n\n\n #[cfg(any(feature = \"singlepass\", feature = \"default-singlepass\"))]\n\n \"singlepass\" => Some(singlepass::compiler()),\n\n\n\n _ => None,\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct InsufficientGasLeft;\n\n\n", "file_path": "packages/vm/src/backends/mod.rs", "rank": 13, "score": 212531.55508570583 }, { "content": "/// do_query should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\n///\n\n/// - `M`: message type for request\n\n/// - `E`: error type for responses\n\npub fn do_query<M, E>(\n\n query_fn: &dyn Fn(Deps, Env, M) -> Result<QueryResponse, E>,\n\n env_ptr: u32,\n\n msg_ptr: u32,\n\n) -> u32\n\nwhere\n\n M: DeserializeOwned + JsonSchema,\n\n E: ToString,\n\n{\n\n let res = _do_query(query_fn, env_ptr as *mut Region, msg_ptr as *mut Region);\n\n let v = to_vec(&res).unwrap();\n\n release_buffer(v) as u32\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 14, "score": 207109.93739230896 }, { "content": "/// do_init should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\n///\n\n/// - `M`: message type for request\n\n/// - `C`: custom response message type (see CosmosMsg)\n\n/// - `E`: error type for responses\n\npub fn do_init<M, C, E>(\n\n init_fn: &dyn Fn(DepsMut, Env, MessageInfo, M) -> Result<InitResponse<C>, E>,\n\n env_ptr: u32,\n\n info_ptr: u32,\n\n msg_ptr: u32,\n\n) -> u32\n\nwhere\n\n M: DeserializeOwned + JsonSchema,\n\n C: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema,\n\n E: ToString,\n\n{\n\n let res = _do_init(\n\n init_fn,\n\n env_ptr as *mut Region,\n\n info_ptr as *mut Region,\n\n msg_ptr as *mut Region,\n\n );\n\n let v = to_vec(&res).unwrap();\n\n release_buffer(v) as u32\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 15, "score": 200047.28033783176 }, { "content": "/// do_migrate should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\n///\n\n/// - `M`: message type for request\n\n/// - `C`: custom response message type (see CosmosMsg)\n\n/// - `E`: error type for responses\n\npub fn do_migrate<M, C, E>(\n\n migrate_fn: &dyn Fn(DepsMut, Env, MessageInfo, M) -> Result<MigrateResponse<C>, E>,\n\n env_ptr: u32,\n\n info_ptr: u32,\n\n msg_ptr: u32,\n\n) -> u32\n\nwhere\n\n M: DeserializeOwned + JsonSchema,\n\n C: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema,\n\n E: ToString,\n\n{\n\n let res = _do_migrate(\n\n migrate_fn,\n\n env_ptr as *mut Region,\n\n info_ptr as *mut Region,\n\n msg_ptr as *mut Region,\n\n );\n\n let v = to_vec(&res).unwrap();\n\n release_buffer(v) as u32\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 16, "score": 200047.28033783176 }, { "content": "/// do_handle should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\n///\n\n/// - `M`: message type for request\n\n/// - `C`: custom response message type (see CosmosMsg)\n\n/// - `E`: error type for responses\n\npub fn do_handle<M, C, E>(\n\n handle_fn: &dyn Fn(DepsMut, Env, MessageInfo, M) -> Result<HandleResponse<C>, E>,\n\n env_ptr: u32,\n\n info_ptr: u32,\n\n msg_ptr: u32,\n\n) -> u32\n\nwhere\n\n M: DeserializeOwned + JsonSchema,\n\n C: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema,\n\n E: ToString,\n\n{\n\n let res = _do_handle(\n\n handle_fn,\n\n env_ptr as *mut Region,\n\n info_ptr as *mut Region,\n\n msg_ptr as *mut Region,\n\n );\n\n let v = to_vec(&res).unwrap();\n\n release_buffer(v) as u32\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 17, "score": 200047.28033783176 }, { "content": "/// Decreases gas left by the given amount.\n\n/// If the amount exceeds the available gas, the remaining gas is set to 0 and\n\n/// an InsufficientGasLeft error is returned.\n\npub fn decrease_gas_left(ctx: &mut Ctx, amount: u64) -> Result<(), InsufficientGasLeft> {\n\n let remaining = get_gas_left(ctx);\n\n if amount > remaining {\n\n set_gas_left(ctx, 0);\n\n Err(InsufficientGasLeft)\n\n } else {\n\n set_gas_left(ctx, remaining - amount);\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"default-cranelift\")]\n\npub use cranelift::{compile, get_gas_left, set_gas_left, BACKEND_NAME};\n\n\n\n#[cfg(feature = \"default-singlepass\")]\n\npub use singlepass::{compile, get_gas_left, set_gas_left, BACKEND_NAME};\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"default-singlepass\")]\n\nmod test {\n", "file_path": "packages/vm/src/backends/mod.rs", "rank": 18, "score": 198094.17835216288 }, { "content": "/// Expects a (fixed size) Region struct at ptr, which is read. This links to the\n\n/// memory region, which is copied in the second step.\n\n/// Errors if the length of the region exceeds `max_length`.\n\npub fn read_region(ctx: &Ctx, ptr: u32, max_length: usize) -> VmResult<Vec<u8>> {\n\n let region = get_region(ctx, ptr)?;\n\n\n\n if region.length > to_u32(max_length)? {\n\n return Err(\n\n CommunicationError::region_length_too_big(region.length as usize, max_length).into(),\n\n );\n\n }\n\n\n\n let memory = ctx.memory(0);\n\n match WasmPtr::<u8, Array>::new(region.offset).deref(memory, 0, region.length) {\n\n Some(cells) => {\n\n // In case you want to do some premature optimization, this shows how to cast a `&'mut [Cell<u8>]` to `&mut [u8]`:\n\n // https://github.com/wasmerio/wasmer/blob/0.13.1/lib/wasi/src/syscalls/mod.rs#L79-L81\n\n let len = region.length as usize;\n\n let mut result = vec![0u8; len];\n\n for i in 0..len {\n\n result[i] = cells[i].get();\n\n }\n\n Ok(result)\n", "file_path": "packages/vm/src/memory.rs", "rank": 19, "score": 193467.86671407864 }, { "content": "/// Runs a series of IO tests, hammering especially on allocate and deallocate.\n\n/// This could be especially useful when run with some kind of leak detector.\n\npub fn test_io<S: Storage, A: Api + 'static, Q: Querier>(instance: &mut Instance<S, A, Q>) {\n\n let sizes: Vec<usize> = vec![0, 1, 3, 10, 200, 2000, 5 * 1024];\n\n let bytes: Vec<u8> = vec![0x00, 0xA5, 0xFF];\n\n\n\n for size in sizes.into_iter() {\n\n for byte in bytes.iter() {\n\n let original = vec![*byte; size];\n\n let wasm_ptr = instance\n\n .allocate(original.len())\n\n .expect(\"Could not allocate memory\");\n\n instance\n\n .write_memory(wasm_ptr, &original)\n\n .expect(\"Could not write data\");\n\n let wasm_data = instance.read_memory(wasm_ptr, size).expect(\"error reading\");\n\n assert_eq!(\n\n original, wasm_data,\n\n \"failed for size {}; expected: {:?}; actual: {:?}\",\n\n size, original, wasm_data\n\n );\n\n instance\n\n .deallocate(wasm_ptr)\n\n .expect(\"Could not deallocate memory\");\n\n }\n\n }\n\n}\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 20, "score": 193320.01150166526 }, { "content": "pub fn set_storage_readonly<S: Storage, Q: Querier>(ctx: &mut Ctx, new_value: bool) {\n\n let mut context_data = get_context_data_mut::<S, Q>(ctx);\n\n context_data.storage_readonly = new_value;\n\n}\n\n\n\npub(crate) fn with_func_from_context<S, Q, Args, Rets, Callback, CallbackData>(\n\n ctx: &mut Ctx,\n\n name: &str,\n\n callback: Callback,\n\n) -> VmResult<CallbackData>\n\nwhere\n\n S: Storage,\n\n Q: Querier,\n\n Args: WasmTypeList,\n\n Rets: WasmTypeList,\n\n Callback: FnOnce(Func<Args, Rets, Wasm>) -> VmResult<CallbackData>,\n\n{\n\n let ctx_data = get_context_data::<S, Q>(ctx);\n\n match ctx_data.wasmer_instance {\n\n Some(instance_ptr) => {\n", "file_path": "packages/vm/src/context.rs", "rank": 21, "score": 192903.25018551468 }, { "content": "pub fn do_remove<S: Storage, Q: Querier>(ctx: &mut Ctx, key_ptr: u32) -> VmResult<()> {\n\n if is_storage_readonly::<S, Q>(ctx) {\n\n return Err(VmError::write_access_denied());\n\n }\n\n\n\n let key = read_region(ctx, key_ptr, MAX_LENGTH_DB_KEY)?;\n\n\n\n let (result, gas_info) =\n\n with_storage_from_context::<S, Q, _, _>(ctx, |store| Ok(store.remove(&key)))?;\n\n process_gas_info::<S, Q>(ctx, gas_info)?;\n\n result?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/imports.rs", "rank": 22, "score": 190596.18664453068 }, { "content": "#[cfg(feature = \"iterator\")]\n\npub fn maybe_read_region(ctx: &Ctx, ptr: u32, max_length: usize) -> VmResult<Option<Vec<u8>>> {\n\n if ptr == 0 {\n\n Ok(None)\n\n } else {\n\n read_region(ctx, ptr, max_length).map(Some)\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/memory.rs", "rank": 23, "score": 186265.5496341927 }, { "content": "#[cfg(feature = \"iterator\")]\n\npub fn do_next<S: Storage, Q: Querier>(ctx: &mut Ctx, iterator_id: u32) -> VmResult<u32> {\n\n let (result, gas_info) =\n\n with_storage_from_context::<S, Q, _, _>(ctx, |store| Ok(store.next(iterator_id)))?;\n\n process_gas_info::<S, Q>(ctx, gas_info)?;\n\n\n\n // Empty key will later be treated as _no more element_.\n\n let (key, value) = result?.unwrap_or_else(|| (Vec::<u8>::new(), Vec::<u8>::new()));\n\n\n\n // Build value || key || keylen\n\n let keylen_bytes = to_u32(key.len())?.to_be_bytes();\n\n let mut out_data = value;\n\n out_data.reserve(key.len() + 4);\n\n out_data.extend(key);\n\n out_data.extend_from_slice(&keylen_bytes);\n\n\n\n write_to_contract::<S, Q>(ctx, &out_data)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "packages/vm/src/imports.rs", "rank": 24, "score": 185980.77231364447 }, { "content": "/// Similar to alloc, but instead of creating a new vector it consumes an existing one and returns\n\n/// a pointer to the Region (preventing the memory from being freed until explicitly called later).\n\n///\n\n/// The resulting Region has capacity = length, i.e. the buffer's capacity is ignored.\n\npub fn release_buffer(buffer: Vec<u8>) -> *mut Region {\n\n let region = build_region(&buffer);\n\n mem::forget(buffer);\n\n Box::into_raw(region)\n\n}\n\n\n\n/// Return the data referenced by the Region and\n\n/// deallocates the Region (and the vector when finished).\n\n/// Warning: only use this when you are sure the caller will never use (or free) the Region later\n\n///\n\n/// # Safety\n\n///\n\n/// The ptr must refer to a valid Region, which was previously returned by alloc,\n\n/// and not yet deallocated. This call will deallocate the Region and return an owner vector\n\n/// to the caller containing the referenced data.\n\n///\n\n/// Naturally, calling this function twice on the same pointer will double deallocate data\n\n/// and lead to a crash. Make sure to call it exactly once (either consuming the input in\n\n/// the wasm code OR deallocating the buffer from the caller).\n\npub unsafe fn consume_region(ptr: *mut Region) -> Vec<u8> {\n", "file_path": "packages/std/src/memory.rs", "rank": 25, "score": 184408.61689488223 }, { "content": "/// balances are state of the erc20 tokens\n\npub fn balances(storage: &mut dyn Storage) -> Bucket<Uint128> {\n\n bucket(storage, PREFIX_BALANCE)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 26, "score": 181227.14719395724 }, { "content": "pub fn config(storage: &mut dyn Storage) -> Singleton<State> {\n\n singleton(storage, CONFIG_KEY)\n\n}\n\n\n", "file_path": "contracts/reflect/src/state.rs", "rank": 27, "score": 181227.14719395724 }, { "content": "/// claims are the claims to money being unbonded\n\npub fn claims(storage: &mut dyn Storage) -> Bucket<Uint128> {\n\n bucket(storage, PREFIX_CLAIMS)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 28, "score": 181227.14719395724 }, { "content": "fn check_wasm_features(module: &Module, supported_features: &HashSet<String>) -> VmResult<()> {\n\n let required_features = required_features_from_module(module);\n\n if !required_features.is_subset(supported_features) {\n\n // We switch to BTreeSet to get a sorted error message\n\n let unsupported = BTreeSet::from_iter(required_features.difference(&supported_features));\n\n return Err(VmError::static_validation_err(format!(\n\n \"Wasm contract requires unsupported features: {}\",\n\n unsupported.to_string_limited(200)\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::errors::VmError;\n\n use parity_wasm::elements::Internal;\n\n use std::iter::FromIterator;\n\n\n", "file_path": "packages/vm/src/compatibility.rs", "rank": 29, "score": 179743.41522797255 }, { "content": "pub fn mock_instance(\n\n wasm: &[u8],\n\n contract_balance: &[Coin],\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n contract_balance: Some(contract_balance),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 30, "score": 179615.27358014722 }, { "content": "/// nextval increments the counter by 1 and returns the new value.\n\n/// On the first time it is called (no sequence info in db) it will return 1.\n\npub fn nextval(seq: &mut Singleton<u64>) -> StdResult<u64> {\n\n let val = currval(&seq)? + 1;\n\n seq.save(&val)?;\n\n Ok(val)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use cosmwasm_std::testing::MockStorage;\n\n\n\n #[test]\n\n fn walk_through_sequence() {\n\n let mut store = MockStorage::new();\n\n let mut seq = sequence(&mut store, b\"seq\");\n\n\n\n assert_eq!(currval(&seq).unwrap(), 0);\n\n assert_eq!(nextval(&mut seq).unwrap(), 1);\n\n assert_eq!(nextval(&mut seq).unwrap(), 2);\n\n assert_eq!(nextval(&mut seq).unwrap(), 3);\n", "file_path": "packages/storage/src/sequence.rs", "rank": 31, "score": 178861.8060810045 }, { "content": "pub fn total_supply(storage: &mut dyn Storage) -> Singleton<Supply> {\n\n singleton(storage, KEY_TOTAL_SUPPLY)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 32, "score": 178851.58426554513 }, { "content": "/// A drop-in replacement for cosmwasm_std::testing::mock_dependencies\n\n/// this uses our CustomQuerier.\n\npub fn mock_dependencies_with_custom_querier(\n\n contract_balance: &[Coin],\n\n) -> OwnedDeps<MockStorage, MockApi, MockQuerier<SpecialQuery>> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n let custom_querier: MockQuerier<SpecialQuery> =\n\n MockQuerier::new(&[(&contract_addr, contract_balance)])\n\n .with_custom_handler(|query| SystemResult::Ok(custom_query_execute(&query)));\n\n OwnedDeps {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: custom_querier,\n\n }\n\n}\n\n\n", "file_path": "contracts/reflect/src/testing.rs", "rank": 33, "score": 176777.36057263383 }, { "content": "/// Initializes the querier along with the mock_dependencies.\n\n/// Sets all balances provided (yoy must explicitly set contract balance if desired)\n\npub fn mock_backend_with_balances(\n\n balances: &[(&HumanAddr, &[Coin])],\n\n) -> Backend<MockStorage, MockApi, MockQuerier> {\n\n Backend {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: MockQuerier::new(balances),\n\n }\n\n}\n\n\n\n/// Zero-pads all human addresses to make them fit the canonical_length and\n\n/// trims off zeros for the reverse operation.\n\n/// This is not really smart, but allows us to see a difference (and consistent length for canonical adddresses).\n\n#[derive(Copy, Clone)]\n\npub struct MockApi {\n\n /// Length of canonical addresses created with this API. Contracts should not make any assumtions\n\n /// what this value is.\n\n pub canonical_length: usize,\n\n /// When set, all calls to the API fail with BackendError::Unknown containing this message\n\n backend_error: Option<&'static str>,\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 34, "score": 176773.74068568676 }, { "content": "/// A drop-in replacement for cosmwasm_vm::testing::mock_dependencies\n\n/// that supports SpecialQuery.\n\npub fn mock_dependencies_with_custom_querier(\n\n contract_balance: &[Coin],\n\n) -> Backend<MockStorage, MockApi, MockQuerier<SpecialQuery>> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n let custom_querier: MockQuerier<SpecialQuery> =\n\n MockQuerier::new(&[(&contract_addr, contract_balance)])\n\n .with_custom_handler(|query| SystemResult::Ok(custom_query_execute(query)));\n\n\n\n Backend {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: custom_querier,\n\n }\n\n}\n\n\n", "file_path": "contracts/reflect/tests/integration.rs", "rank": 35, "score": 176772.76371933613 }, { "content": "pub fn mock_instance_with_balances(\n\n wasm: &[u8],\n\n balances: &[(&HumanAddr, &[Coin])],\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n balances,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 36, "score": 176768.3168034028 }, { "content": "pub fn mock_instance_with_options(\n\n wasm: &[u8],\n\n options: MockInstanceOptions,\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n check_wasm(wasm, &options.supported_features).unwrap();\n\n let contract_address = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n\n\n // merge balances\n\n let mut balances = options.balances.to_vec();\n\n if let Some(contract_balance) = options.contract_balance {\n\n // Remove old entry if exists\n\n if let Some(pos) = balances.iter().position(|item| *item.0 == contract_address) {\n\n balances.remove(pos);\n\n }\n\n balances.push((&contract_address, contract_balance));\n\n }\n\n\n\n let api = if let Some(backend_error) = options.backend_error {\n\n MockApi::new_failing(backend_error)\n\n } else {\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 37, "score": 176768.3168034028 }, { "content": "pub fn invest_info(storage: &mut dyn Storage) -> Singleton<InvestmentInfo> {\n\n singleton(storage, KEY_INVESTMENT)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 38, "score": 176577.50424532962 }, { "content": "pub fn token_info(storage: &mut dyn Storage) -> Singleton<TokenInfoResponse> {\n\n singleton(storage, KEY_TOKEN_INFO)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 39, "score": 174398.54017824295 }, { "content": "pub fn mock_instance_with_failing_api(\n\n wasm: &[u8],\n\n contract_balance: &[Coin],\n\n backend_error: &'static str,\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n contract_balance: Some(contract_balance),\n\n backend_error: Some(backend_error),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 40, "score": 174054.34255324633 }, { "content": "/// A shorthand constructor for Coin\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use cosmwasm_std::{coin, BankMsg, CosmosMsg, HandleResponse};\n\n/// # use cosmwasm_std::testing::{mock_env, mock_info};\n\n/// # let env = mock_env();\n\n/// # let info = mock_info(\"sender\", &[]);\n\n/// let tip = vec![\n\n/// coin(123, \"ucosm\"),\n\n/// coin(24, \"ustake\"),\n\n/// ];\n\n///\n\n/// let mut response: HandleResponse = Default::default();\n\n/// response.messages = vec![CosmosMsg::Bank(BankMsg::Send {\n\n/// from_address: env.contract.address,\n\n/// to_address: info.sender,\n\n/// amount: tip,\n\n/// })];\n\n/// ```\n\npub fn coin<S: Into<String>>(amount: u128, denom: S) -> Coin {\n\n Coin::new(amount, denom)\n\n}\n\n\n", "file_path": "packages/std/src/coins.rs", "rank": 41, "score": 173711.31893185474 }, { "content": "/// Returns a default enviroment with height, time, chain_id, and contract address\n\n/// You can submit as is to most contracts, or modify height/time if you want to\n\n/// test for expiration.\n\n///\n\n/// This is intended for use in test code only.\n\npub fn mock_env() -> Env {\n\n Env {\n\n block: BlockInfo {\n\n height: 12_345,\n\n time: 1_571_797_419,\n\n time_nanos: 879305533,\n\n chain_id: \"cosmos-testnet-14002\".to_string(),\n\n },\n\n contract: ContractInfo {\n\n address: HumanAddr::from(MOCK_CONTRACT_ADDR),\n\n },\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 42, "score": 173562.6283236691 }, { "content": "/// Prints a debug message to console.\n\n/// This does not charge gas, so debug printing should be disabled when used in a blockchain module.\n\npub fn print_debug_message(ctx: &mut Ctx, message_ptr: u32) -> VmResult<()> {\n\n let message_data = read_region(ctx, message_ptr, MAX_LENGTH_DEBUG)?;\n\n let msg = String::from_utf8_lossy(&message_data);\n\n println!(\"{}\", msg);\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/imports.rs", "rank": 43, "score": 172313.3690478903 }, { "content": "/// Calculates the raw key prefix for a given namespace as documented\n\n/// in https://github.com/webmaster128/key-namespacing#length-prefixed-keys\n\npub fn to_length_prefixed(namespace: &[u8]) -> Vec<u8> {\n\n let mut out = Vec::with_capacity(namespace.len() + 2);\n\n out.extend_from_slice(&encode_length(namespace));\n\n out.extend_from_slice(namespace);\n\n out\n\n}\n\n\n", "file_path": "packages/storage/src/length_prefixed.rs", "rank": 44, "score": 169835.36698841222 }, { "content": "/// A shortcut constructor for a set of one denomination of coins\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use cosmwasm_std::{coins, BankMsg, CosmosMsg, HandleResponse};\n\n/// # use cosmwasm_std::testing::{mock_env, mock_info};\n\n/// # let env = mock_env();\n\n/// # let info = mock_info(\"sender\", &[]);\n\n/// let tip = coins(123, \"ucosm\");\n\n///\n\n/// let mut response: HandleResponse = Default::default();\n\n/// response.messages = vec![CosmosMsg::Bank(BankMsg::Send {\n\n/// from_address: env.contract.address,\n\n/// to_address: info.sender,\n\n/// amount: tip,\n\n/// })];\n\n/// ```\n\npub fn coins<S: Into<String>>(amount: u128, denom: S) -> Vec<Coin> {\n\n vec![coin(amount, denom)]\n\n}\n\n\n", "file_path": "packages/std/src/coins.rs", "rank": 45, "score": 168935.46070482695 }, { "content": "/// Creates InstanceOptions for testing\n\npub fn mock_instance_options() -> InstanceOptions {\n\n InstanceOptions {\n\n gas_limit: DEFAULT_GAS_LIMIT,\n\n print_debug: DEFAULT_PRINT_DEBUG,\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 46, "score": 168249.74826543476 }, { "content": "/// Calculates the raw key prefix for a given nested namespace\n\n/// as documented in https://github.com/webmaster128/key-namespacing#nesting\n\npub fn to_length_prefixed_nested(namespaces: &[&[u8]]) -> Vec<u8> {\n\n let mut size = 0;\n\n for &namespace in namespaces {\n\n size += namespace.len() + 2;\n\n }\n\n\n\n let mut out = Vec::with_capacity(size);\n\n for &namespace in namespaces {\n\n out.extend_from_slice(&encode_length(namespace));\n\n out.extend_from_slice(namespace);\n\n }\n\n out\n\n}\n\n\n", "file_path": "packages/storage/src/length_prefixed.rs", "rank": 47, "score": 167563.56369189167 }, { "content": "pub fn digit_sum(input: &[u8]) -> usize {\n\n input.iter().fold(0, |sum, val| sum + (*val as usize))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::query::Delegation;\n\n use crate::{coin, coins, from_binary, Decimal, HumanAddr};\n\n\n\n #[test]\n\n fn mock_info_arguments() {\n\n let name = HumanAddr(\"my name\".to_string());\n\n\n\n // make sure we can generate with &str, &HumanAddr, and HumanAddr\n\n let a = mock_info(\"my name\", &coins(100, \"atom\"));\n\n let b = mock_info(&name, &coins(100, \"atom\"));\n\n let c = mock_info(name, &coins(100, \"atom\"));\n\n\n\n // and the results are the same\n", "file_path": "packages/std/src/mock.rs", "rank": 48, "score": 165950.37037496828 }, { "content": "/// Api are callbacks to system functions implemented outside of the wasm modules.\n\n/// Currently it just supports address conversion but we could add eg. crypto functions here.\n\n///\n\n/// This is a trait to allow mocks in the test code. Its members have a read-only\n\n/// reference to the Api instance to allow accessing configuration.\n\n/// Implementations must not have mutable state, such that an instance can freely\n\n/// be copied and shared between threads without affecting the behaviour.\n\n/// Given an Api instance, all members should return the same value when called with the same\n\n/// arguments. In particular this means the result must not depend in the state of the chain.\n\n/// If you need to access chaim state, you probably want to use the Querier.\n\n/// Side effects (such as logging) are allowed.\n\n///\n\n/// We can use feature flags to opt-in to non-essential methods\n\n/// for backwards compatibility in systems that don't have them all.\n\npub trait Api {\n\n fn canonical_address(&self, human: &HumanAddr) -> StdResult<CanonicalAddr>;\n\n fn human_address(&self, canonical: &CanonicalAddr) -> StdResult<HumanAddr>;\n\n /// Emits a debugging message that is handled depending on the environment (typically printed to console or ignored).\n\n /// Those messages are not persisted to chain.\n\n fn debug(&self, message: &str);\n\n}\n\n\n\n/// A short-hand alias for the two-level query result (1. accessing the contract, 2. executing query in the contract)\n\npub type QuerierResult = SystemResult<ContractResult<Binary>>;\n\n\n", "file_path": "packages/std/src/traits.rs", "rank": 49, "score": 164345.79658993642 }, { "content": "pub trait Querier {\n\n /// raw_query is all that must be implemented for the Querier.\n\n /// This allows us to pass through binary queries from one level to another without\n\n /// knowing the custom format, or we can decode it, with the knowledge of the allowed\n\n /// types. People using the querier probably want one of the simpler auto-generated\n\n /// helper methods\n\n fn raw_query(&self, bin_request: &[u8]) -> QuerierResult;\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct QuerierWrapper<'a>(&'a dyn Querier);\n\n\n\nimpl<'a> QuerierWrapper<'a> {\n\n pub fn new(querier: &'a dyn Querier) -> Self {\n\n QuerierWrapper(querier)\n\n }\n\n\n\n /// This allows us to pass through binary queries from one level to another without\n\n /// knowing the custom format, or we can decode it, with the knowledge of the allowed\n\n /// types. You probably want one of the simpler auto-generated helper methods\n", "file_path": "packages/std/src/traits.rs", "rank": 50, "score": 164332.95822700084 }, { "content": "/// Storage provides read and write access to a persistent storage.\n\n/// If you only want to provide read access, provide `&Storage`\n\npub trait Storage {\n\n /// Returns None when key does not exist.\n\n /// Returns Some(Vec<u8>) when key exists.\n\n ///\n\n /// Note: Support for differentiating between a non-existent key and a key with empty value\n\n /// is not great yet and might not be possible in all backends. But we're trying to get there.\n\n fn get(&self, key: &[u8]) -> Option<Vec<u8>>;\n\n\n\n #[cfg(feature = \"iterator\")]\n\n /// Allows iteration over a set of key/value pairs, either forwards or backwards.\n\n ///\n\n /// The bound `start` is inclusive and `end` is exclusive.\n\n ///\n\n /// If `start` is lexicographically greater than or equal to `end`, an empty range is described, mo matter of the order.\n\n fn range<'a>(\n\n &'a self,\n\n start: Option<&[u8]>,\n\n end: Option<&[u8]>,\n\n order: Order,\n\n ) -> Box<dyn Iterator<Item = KV> + 'a>;\n\n\n\n fn set(&mut self, key: &[u8], value: &[u8]);\n\n /// Removes a database entry at `key`.\n\n ///\n\n /// The current interface does not allow to differentiate between a key that existed\n\n /// before and one that didn't exist. See https://github.com/CosmWasm/cosmwasm/issues/290\n\n fn remove(&mut self, key: &[u8]);\n\n}\n\n\n", "file_path": "packages/std/src/traits.rs", "rank": 51, "score": 164332.95822700084 }, { "content": "/// Sequence creates a custom Singleton to hold an empty sequence\n\npub fn sequence<'a>(storage: &'a mut dyn Storage, key: &[u8]) -> Singleton<'a, u64> {\n\n Singleton::new(storage, key)\n\n}\n\n\n", "file_path": "packages/storage/src/sequence.rs", "rank": 52, "score": 164016.39688821757 }, { "content": "/// An alias of PrefixedStorage::new for less verbose usage\n\npub fn prefixed<'a>(storage: &'a mut dyn Storage, namespace: &[u8]) -> PrefixedStorage<'a> {\n\n PrefixedStorage::new(storage, namespace)\n\n}\n\n\n", "file_path": "packages/storage/src/prefixed_storage.rs", "rank": 53, "score": 163988.35941111523 }, { "content": "// query mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn query<S, A, Q, M>(\n\n instance: &mut Instance<S, A, Q>,\n\n env: Env,\n\n msg: M,\n\n) -> ContractResult<QueryResponse>\n\nwhere\n\n S: Storage + 'static,\n\n A: Api + 'static,\n\n Q: Querier + 'static,\n\n M: Serialize + JsonSchema,\n\n{\n\n let serialized_msg = to_vec(&msg).expect(\"Testing error: Could not seralize request message\");\n\n call_query(instance, &env, &serialized_msg).expect(\"VM error\")\n\n}\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 54, "score": 162957.11229101755 }, { "content": "pub fn remove_schemas(schemas_dir: &path::Path) -> Result<(), io::Error> {\n\n let file_paths = fs::read_dir(schemas_dir)?\n\n .filter_map(Result::ok) // skip read errors on entries\n\n .map(|entry| entry.path())\n\n .filter(|path| is_regular_file(path).unwrap_or(false)) // skip directories and symlinks\n\n .filter(|path| !is_hidden(path)) // skip hidden\n\n .filter(|path| is_json(path)) // skip non JSON\n\n ;\n\n\n\n for file_path in file_paths {\n\n println!(\"Removing {:?} …\", file_path);\n\n fs::remove_file(file_path)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::ffi::OsStr;\n", "file_path": "packages/schema/src/remove.rs", "rank": 55, "score": 161254.16609362164 }, { "content": "/// An alias of Singleton::new for less verbose usage\n\npub fn singleton<'a, T>(storage: &'a mut dyn Storage, key: &[u8]) -> Singleton<'a, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n\n Singleton::new(storage, key)\n\n}\n\n\n", "file_path": "packages/storage/src/singleton.rs", "rank": 56, "score": 159830.7982098252 }, { "content": "/// An alias of Bucket::new for less verbose usage\n\npub fn bucket<'a, T>(storage: &'a mut dyn Storage, namespace: &[u8]) -> Bucket<'a, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n\n Bucket::new(storage, namespace)\n\n}\n\n\n", "file_path": "packages/storage/src/bucket.rs", "rank": 57, "score": 159830.7982098252 }, { "content": "/// Creates a back reference from a contact to its partent instance\n\npub fn set_wasmer_instance<S: Storage, Q: Querier>(\n\n ctx: &mut Ctx,\n\n wasmer_instance: Option<NonNull<WasmerInstance>>,\n\n) {\n\n let context_data = ctx.data as *mut ContextData<S, Q>;\n\n unsafe {\n\n (*context_data).wasmer_instance = wasmer_instance;\n\n }\n\n}\n\n\n\n/// Returns the original storage and querier as owned instances, and closes any remaining\n\n/// iterators. This is meant to be called when recycling the instance.\n\npub(crate) fn move_out_of_context<S: Storage, Q: Querier>(\n\n source: &mut Ctx,\n\n) -> (Option<S>, Option<Q>) {\n\n let b = get_context_data_mut::<S, Q>(source);\n\n (b.storage.take(), b.querier.take())\n\n}\n\n\n\n/// Moves owned instances of storage and querier into the context.\n\n/// Should be followed by exactly one call to move_out_of_context when the instance is finished.\n\npub(crate) fn move_into_context<S: Storage, Q: Querier>(target: &mut Ctx, storage: S, querier: Q) {\n\n let b = get_context_data_mut::<S, Q>(target);\n\n b.storage = Some(storage);\n\n b.querier = Some(querier);\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 58, "score": 158452.2491628298 }, { "content": "// handle mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn handle<S, A, Q, M, U>(\n\n instance: &mut Instance<S, A, Q>,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: M,\n\n) -> ContractResult<HandleResponse<U>>\n\nwhere\n\n S: Storage + 'static,\n\n A: Api + 'static,\n\n Q: Querier + 'static,\n\n M: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n let serialized_msg = to_vec(&msg).expect(\"Testing error: Could not seralize request message\");\n\n call_handle(instance, &env, &info, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 59, "score": 158199.07908066222 }, { "content": "// init mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn init<S, A, Q, M, U>(\n\n instance: &mut Instance<S, A, Q>,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: M,\n\n) -> ContractResult<InitResponse<U>>\n\nwhere\n\n S: Storage + 'static,\n\n A: Api + 'static,\n\n Q: Querier + 'static,\n\n M: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n let serialized_msg = to_vec(&msg).expect(\"Testing error: Could not seralize request message\");\n\n call_init(instance, &env, &info, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 60, "score": 158199.07908066222 }, { "content": "// migrate mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn migrate<S, A, Q, M, U>(\n\n instance: &mut Instance<S, A, Q>,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: M,\n\n) -> ContractResult<MigrateResponse<U>>\n\nwhere\n\n S: Storage + 'static,\n\n A: Api + 'static,\n\n Q: Querier + 'static,\n\n M: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n let serialized_msg = to_vec(&msg).expect(\"Testing error: Could not seralize request message\");\n\n call_migrate(instance, &env, &info, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 61, "score": 158199.07908066222 }, { "content": "#[test]\n\nfn push_and_pop() {\n\n let (mut deps, info) = create_contract();\n\n let _: HandleResponse = handle(\n\n &mut deps,\n\n mock_env(),\n\n info.clone(),\n\n HandleMsg::Enqueue { value: 25 },\n\n )\n\n .unwrap();\n\n let _: HandleResponse = handle(\n\n &mut deps,\n\n mock_env(),\n\n info.clone(),\n\n HandleMsg::Enqueue { value: 17 },\n\n )\n\n .unwrap();\n\n let res: HandleResponse =\n\n handle(&mut deps, mock_env(), info.clone(), HandleMsg::Dequeue {}).unwrap();\n\n // ensure we popped properly\n\n assert!(res.data.is_some());\n\n let data = res.data.unwrap();\n\n let item: Item = from_slice(data.as_slice()).unwrap();\n\n assert_eq!(item.value, 25);\n\n\n\n assert_eq!(get_count(&mut deps), 1);\n\n assert_eq!(get_sum(&mut deps), 17);\n\n}\n\n\n", "file_path": "contracts/queue/tests/integration.rs", "rank": 62, "score": 155520.9871420056 }, { "content": "pub fn process_gas_info<S: Storage, Q: Querier>(ctx: &mut Ctx, info: GasInfo) -> VmResult<()> {\n\n decrease_gas_left(ctx, info.cost)?;\n\n account_for_externally_used_gas::<S, Q>(ctx, info.externally_used)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 63, "score": 154331.18822994595 }, { "content": "/// Access to the VM's backend storage, i.e. the chain\n\npub trait Storage {\n\n /// Returns Err on error.\n\n /// Returns Ok(None) when key does not exist.\n\n /// Returns Ok(Some(Vec<u8>)) when key exists.\n\n ///\n\n /// Note: Support for differentiating between a non-existent key and a key with empty value\n\n /// is not great yet and might not be possible in all backends. But we're trying to get there.\n\n fn get(&self, key: &[u8]) -> BackendResult<Option<Vec<u8>>>;\n\n\n\n /// Allows iteration over a set of key/value pairs, either forwards or backwards.\n\n /// Returns an interator ID that is unique within the Storage instance.\n\n ///\n\n /// The bound `start` is inclusive and `end` is exclusive.\n\n ///\n\n /// If `start` is lexicographically greater than or equal to `end`, an empty range is described, mo matter of the order.\n\n ///\n\n /// This call must not change data in the storage, but creating and storing a new iterator can be a mutating operation on\n\n /// the Storage implementation.\n\n /// The implementation must ensure that iterator IDs are assigned in a deterministic manner as this is\n\n /// environment data that is injected into the contract.\n", "file_path": "packages/vm/src/backend.rs", "rank": 64, "score": 154019.0506366846 }, { "content": "pub trait Querier {\n\n /// This is all that must be implemented for the Querier.\n\n /// This allows us to pass through binary queries from one level to another without\n\n /// knowing the custom format, or we can decode it, with the knowledge of the allowed\n\n /// types.\n\n ///\n\n /// The gas limit describes how much VM gas this particular query is allowed\n\n /// to comsume when measured separately from the rest of the contract.\n\n /// The returned gas info (in BackendResult) can exceed the gas limit in cases\n\n /// where the query could not be aborted exactly at the limit.\n\n fn query_raw(\n\n &self,\n\n request: &[u8],\n\n gas_limit: u64,\n\n ) -> BackendResult<SystemResult<ContractResult<Binary>>>;\n\n}\n\n\n\n/// A result type for calling into the backend. Such a call can cause\n\n/// non-negligible computational cost in both success and faiure case and must always have gas information\n\n/// attached.\n", "file_path": "packages/vm/src/backend.rs", "rank": 65, "score": 154019.0506366846 }, { "content": "/// Reads a storage entry from the VM's storage into Wasm memory\n\npub fn do_read<S: Storage, Q: Querier>(ctx: &mut Ctx, key_ptr: u32) -> VmResult<u32> {\n\n let key = read_region(ctx, key_ptr, MAX_LENGTH_DB_KEY)?;\n\n\n\n let (result, gas_info) =\n\n with_storage_from_context::<S, Q, _, _>(ctx, |store| Ok(store.get(&key)))?;\n\n process_gas_info::<S, Q>(ctx, gas_info)?;\n\n let value = result?;\n\n\n\n let out_data = match value {\n\n Some(data) => data,\n\n None => return Ok(0),\n\n };\n\n write_to_contract::<S, Q>(ctx, &out_data)\n\n}\n\n\n", "file_path": "packages/vm/src/imports.rs", "rank": 66, "score": 153962.5993220527 }, { "content": "pub fn custom_query_execute(query: &SpecialQuery) -> ContractResult<Binary> {\n\n let msg = match query {\n\n SpecialQuery::Ping {} => \"pong\".to_string(),\n\n SpecialQuery::Capitalized { text } => text.to_uppercase(),\n\n };\n\n to_binary(&SpecialResponse { msg }).into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use cosmwasm_std::{from_binary, QuerierWrapper, QueryRequest};\n\n\n\n #[test]\n\n fn custom_query_execute_ping() {\n\n let res = custom_query_execute(&SpecialQuery::Ping {}).unwrap();\n\n let response: SpecialResponse = from_binary(&res).unwrap();\n\n assert_eq!(response.msg, \"pong\");\n\n }\n\n\n", "file_path": "contracts/reflect/src/testing.rs", "rank": 67, "score": 153252.06204254262 }, { "content": "pub fn do_query_chain<S: Storage, Q: Querier>(ctx: &mut Ctx, request_ptr: u32) -> VmResult<u32> {\n\n let request = read_region(ctx, request_ptr, MAX_LENGTH_QUERY_CHAIN_REQUEST)?;\n\n\n\n let gas_remaining = get_gas_left(ctx);\n\n let (result, gas_info) = with_querier_from_context::<S, Q, _, _>(ctx, |querier| {\n\n Ok(querier.query_raw(&request, gas_remaining))\n\n })?;\n\n process_gas_info::<S, Q>(ctx, gas_info)?;\n\n let serialized = to_vec(&result?)?;\n\n write_to_contract::<S, Q>(ctx, &serialized)\n\n}\n\n\n", "file_path": "packages/vm/src/imports.rs", "rank": 68, "score": 152180.2616399616 }, { "content": "/// Creates a new Attribute.\n\npub fn attr<K: ToString, V: ToString>(key: K, value: V) -> Attribute {\n\n Attribute {\n\n key: key.to_string(),\n\n value: value.to_string(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::addresses::HumanAddr;\n\n use crate::Uint128;\n\n\n\n #[test]\n\n fn attr_works_for_different_types() {\n\n let expeceted = Attribute {\n\n key: \"foo\".to_string(),\n\n value: \"42\".to_string(),\n\n };\n\n\n\n assert_eq!(attr(\"foo\", \"42\"), expeceted);\n\n assert_eq!(attr(\"foo\".to_string(), \"42\"), expeceted);\n\n assert_eq!(attr(\"foo\", \"42\".to_string()), expeceted);\n\n assert_eq!(attr(\"foo\", HumanAddr::from(\"42\")), expeceted);\n\n assert_eq!(attr(\"foo\", Uint128(42)), expeceted);\n\n assert_eq!(attr(\"foo\", 42), expeceted);\n\n }\n\n}\n", "file_path": "packages/std/src/results/attribute.rs", "rank": 69, "score": 152046.6160151756 }, { "content": "pub fn get_gas_state_mut<'a, 'b, S: Storage + 'b, Q: Querier + 'b>(\n\n ctx: &'a mut Ctx,\n\n) -> &'b mut GasState {\n\n &mut get_context_data_mut::<S, Q>(ctx).gas_state\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 70, "score": 148259.9705979593 }, { "content": "fn get_count(deps: &mut Instance<MockStorage, MockApi, MockQuerier>) -> u32 {\n\n let data = query(deps, mock_env(), QueryMsg::Count {}).unwrap();\n\n let res: CountResponse = from_binary(&data).unwrap();\n\n res.count\n\n}\n\n\n", "file_path": "contracts/queue/tests/integration.rs", "rank": 71, "score": 147936.06983766705 }, { "content": "fn get_sum(deps: &mut Instance<MockStorage, MockApi, MockQuerier>) -> i32 {\n\n let data = query(deps, mock_env(), QueryMsg::Sum {}).unwrap();\n\n let res: SumResponse = from_binary(&data).unwrap();\n\n res.sum\n\n}\n\n\n", "file_path": "contracts/queue/tests/integration.rs", "rank": 72, "score": 147936.06983766705 }, { "content": "/// A trait that is required to avoid conflicts with other query types like BankQuery and WasmQuery\n\n/// in generic implementations.\n\n/// You need to implement it in your custom query type.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use cosmwasm_std::CustomQuery;\n\n/// # use schemars::JsonSchema;\n\n/// # use serde::{Deserialize, Serialize};\n\n/// #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\n/// #[serde(rename_all = \"snake_case\")]\n\n/// pub enum MyCustomQuery {\n\n/// Ping {},\n\n/// Capitalized { text: String },\n\n/// }\n\n///\n\n/// impl CustomQuery for MyCustomQuery {}\n\n/// ```\n\npub trait CustomQuery: Serialize {}\n\n\n\nimpl CustomQuery for Empty {}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum WasmQuery {\n\n /// this queries the public API of another contract at a known address (with known ABI)\n\n /// return value is whatever the contract returns (caller should know)\n\n Smart {\n\n contract_addr: HumanAddr,\n\n /// msg is the json-encoded QueryMsg struct\n\n msg: Binary,\n\n },\n\n /// this queries the raw kv-store of the contract.\n\n /// returns the raw, unparsed data stored at that key, which may be an empty vector if not present\n\n Raw {\n\n contract_addr: HumanAddr,\n\n /// Key is the raw key used in the contracts Storage\n\n key: Binary,\n", "file_path": "packages/std/src/query.rs", "rank": 73, "score": 146202.16821517146 }, { "content": "fn create_unmanaged_context_data<S: Storage, Q: Querier>(gas_limit: u64) -> *mut c_void {\n\n let data = ContextData::<S, Q> {\n\n gas_state: GasState::with_limit(gas_limit),\n\n storage: None,\n\n storage_readonly: true,\n\n querier: None,\n\n wasmer_instance: None,\n\n };\n\n let heap_data = Box::new(data); // move from stack to heap\n\n Box::into_raw(heap_data) as *mut c_void // give up ownership\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 74, "score": 144487.5765133401 }, { "content": "pub fn init(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n msg: InitMsg,\n\n) -> Result<InitResponse, HackError> {\n\n deps.api.debug(\"here we go 🚀\");\n\n\n\n deps.storage.set(\n\n CONFIG_KEY,\n\n &to_vec(&State {\n\n verifier: deps.api.canonical_address(&msg.verifier)?,\n\n beneficiary: deps.api.canonical_address(&msg.beneficiary)?,\n\n funder: deps.api.canonical_address(&info.sender)?,\n\n })?,\n\n );\n\n\n\n // This adds some unrelated event attribute for testing purposes\n\n let mut ctx = Context::new();\n\n ctx.add_attribute(\"Let the\", \"hacking begin\");\n\n Ok(ctx.try_into()?)\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 75, "score": 143693.44843219136 }, { "content": "pub fn migrate(\n\n deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n msg: MigrateMsg,\n\n) -> Result<MigrateResponse, HackError> {\n\n let data = deps\n\n .storage\n\n .get(CONFIG_KEY)\n\n .ok_or_else(|| StdError::not_found(\"State\"))?;\n\n let mut config: State = from_slice(&data)?;\n\n config.verifier = deps.api.canonical_address(&msg.verifier)?;\n\n deps.storage.set(CONFIG_KEY, &to_vec(&config)?);\n\n\n\n Ok(MigrateResponse::default())\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 76, "score": 143693.44843219136 }, { "content": "pub fn handle(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: HandleMsg,\n\n) -> Result<HandleResponse<CustomMsg>, ReflectError> {\n\n match msg {\n\n HandleMsg::ReflectMsg { msgs } => try_reflect(deps, env, info, msgs),\n\n HandleMsg::ChangeOwner { owner } => try_change_owner(deps, env, info, owner),\n\n }\n\n}\n\n\n", "file_path": "contracts/reflect/src/contract.rs", "rank": 77, "score": 143693.44843219136 }, { "content": "pub fn init(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n _msg: InitMsg,\n\n) -> StdResult<InitResponse<CustomMsg>> {\n\n let state = State {\n\n owner: deps.api.canonical_address(&info.sender)?,\n\n };\n\n\n\n config(deps.storage).save(&state)?;\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/reflect/src/contract.rs", "rank": 78, "score": 143693.44843219136 }, { "content": "pub fn handle(\n\n _deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n _msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n Err(StdError::generic_err(\n\n \"You can only use this contract for migrations\",\n\n ))\n\n}\n\n\n", "file_path": "contracts/burner/src/contract.rs", "rank": 79, "score": 143693.44843219136 }, { "content": "pub fn handle(\n\n deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::Enqueue { value } => enqueue(deps, value),\n\n HandleMsg::Dequeue {} => dequeue(deps),\n\n }\n\n}\n\n\n\nconst FIRST_KEY: u8 = 0;\n\n\n", "file_path": "contracts/queue/src/contract.rs", "rank": 80, "score": 143693.44843219136 }, { "content": "pub fn migrate(\n\n deps: DepsMut,\n\n env: Env,\n\n _info: MessageInfo,\n\n msg: MigrateMsg,\n\n) -> StdResult<MigrateResponse> {\n\n // delete all state\n\n let keys: Vec<_> = deps\n\n .storage\n\n .range(None, None, Order::Ascending)\n\n .map(|(k, _)| k)\n\n .collect();\n\n let count = keys.len();\n\n for k in keys {\n\n deps.storage.remove(&k);\n\n }\n\n\n\n // get balance and send all to recipient\n\n let balance = deps.querier.query_all_balances(&env.contract.address)?;\n\n let send = BankMsg::Send {\n", "file_path": "contracts/burner/src/contract.rs", "rank": 81, "score": 143693.44843219136 }, { "content": "pub fn handle(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: HandleMsg,\n\n) -> Result<HandleResponse, StakingError> {\n\n match msg {\n\n HandleMsg::Transfer { recipient, amount } => {\n\n Ok(transfer(deps, env, info, recipient, amount)?)\n\n }\n\n HandleMsg::Bond {} => Ok(bond(deps, env, info)?),\n\n HandleMsg::Unbond { amount } => Ok(unbond(deps, env, info, amount)?),\n\n HandleMsg::Claim {} => Ok(claim(deps, env, info)?),\n\n HandleMsg::Reinvest {} => Ok(reinvest(deps, env, info)?),\n\n HandleMsg::_BondAllTokens {} => _bond_all_tokens(deps, env, info),\n\n }\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 82, "score": 143693.44843219136 }, { "content": "pub fn unbond(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n amount: Uint128,\n\n) -> StdResult<HandleResponse> {\n\n let sender_raw = deps.api.canonical_address(&info.sender)?;\n\n\n\n let invest = invest_info_read(deps.storage).load()?;\n\n // ensure it is big enough to care\n\n if amount < invest.min_withdrawal {\n\n return Err(StdError::generic_err(format!(\n\n \"Must unbond at least {} {}\",\n\n invest.min_withdrawal, invest.bond_denom\n\n )));\n\n }\n\n // calculate tax and remainer to unbond\n\n let tax = amount * invest.exit_tax;\n\n\n\n // deduct all from the account\n", "file_path": "contracts/staking/src/contract.rs", "rank": 83, "score": 143693.44843219136 }, { "content": "pub fn handle(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: HandleMsg,\n\n) -> Result<HandleResponse, HackError> {\n\n match msg {\n\n HandleMsg::Release {} => do_release(deps, env, info),\n\n HandleMsg::CpuLoop {} => do_cpu_loop(),\n\n HandleMsg::StorageLoop {} => do_storage_loop(deps),\n\n HandleMsg::MemoryLoop {} => do_memory_loop(),\n\n HandleMsg::AllocateLargeMemory {} => do_allocate_large_memory(),\n\n HandleMsg::Panic {} => do_panic(),\n\n HandleMsg::UserErrorsInApiCalls {} => do_user_errors_in_api_calls(deps.api),\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 84, "score": 143693.44843219136 }, { "content": "// init is a no-op, just empty data\n\npub fn init(\n\n _deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n _msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/queue/src/contract.rs", "rank": 85, "score": 143693.44843219136 }, { "content": "pub fn init(\n\n _deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n _msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n Err(StdError::generic_err(\n\n \"You can only use this contract for migrations\",\n\n ))\n\n}\n\n\n", "file_path": "contracts/burner/src/contract.rs", "rank": 86, "score": 143693.44843219136 }, { "content": "pub fn transfer(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n recipient: HumanAddr,\n\n send: Uint128,\n\n) -> StdResult<HandleResponse> {\n\n let rcpt_raw = deps.api.canonical_address(&recipient)?;\n\n let sender_raw = deps.api.canonical_address(&info.sender)?;\n\n\n\n let mut accounts = balances(deps.storage);\n\n accounts.update(&sender_raw, |balance: Option<Uint128>| {\n\n balance.unwrap_or_default() - send\n\n })?;\n\n accounts.update(&rcpt_raw, |balance: Option<Uint128>| -> StdResult<_> {\n\n Ok(balance.unwrap_or_default() + send)\n\n })?;\n\n\n\n let res = HandleResponse {\n\n messages: vec![],\n", "file_path": "contracts/staking/src/contract.rs", "rank": 87, "score": 143693.44843219136 }, { "content": "fn _do_query<M, E>(\n\n query_fn: &dyn Fn(Deps, Env, M) -> Result<QueryResponse, E>,\n\n env_ptr: *mut Region,\n\n msg_ptr: *mut Region,\n\n) -> ContractResult<QueryResponse>\n\nwhere\n\n M: DeserializeOwned + JsonSchema,\n\n E: ToString,\n\n{\n\n let env: Vec<u8> = unsafe { consume_region(env_ptr) };\n\n let msg: Vec<u8> = unsafe { consume_region(msg_ptr) };\n\n\n\n let env: Env = try_into_contract_result!(from_slice(&env));\n\n let msg: M = try_into_contract_result!(from_slice(&msg));\n\n\n\n let deps = make_dependencies();\n\n query_fn(deps.as_ref(), env, msg).into()\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 88, "score": 142902.45983675527 }, { "content": "/// All external requirements that can be injected for unit tests.\n\n/// It sets the given balance for the contract itself, nothing else\n\npub fn mock_dependencies(\n\n contract_balance: &[Coin],\n\n) -> OwnedDeps<MockStorage, MockApi, MockQuerier> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n OwnedDeps {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: MockQuerier::new(&[(&contract_addr, contract_balance)]),\n\n }\n\n}\n\n\n", "file_path": "packages/std/src/mock.rs", "rank": 89, "score": 141614.69819867713 }, { "content": "pub fn try_reflect(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n msgs: Vec<CosmosMsg<CustomMsg>>,\n\n) -> Result<HandleResponse<CustomMsg>, ReflectError> {\n\n let state = config(deps.storage).load()?;\n\n\n\n let sender = deps.api.canonical_address(&info.sender)?;\n\n if sender != state.owner {\n\n return Err(ReflectError::NotCurrentOwner {\n\n expected: state.owner,\n\n actual: sender,\n\n });\n\n }\n\n\n\n if msgs.is_empty() {\n\n return Err(ReflectError::MessagesEmpty);\n\n }\n\n let res = HandleResponse {\n\n messages: msgs,\n\n attributes: vec![attr(\"action\", \"reflect\")],\n\n data: None,\n\n };\n\n Ok(res)\n\n}\n\n\n", "file_path": "contracts/reflect/src/contract.rs", "rank": 90, "score": 141605.66255045464 }, { "content": "pub fn _bond_all_tokens(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n) -> Result<HandleResponse, StakingError> {\n\n // this is just meant as a call-back to ourself\n\n if info.sender != env.contract.address {\n\n return Err(Unauthorized {}.build());\n\n }\n\n\n\n // find how many tokens we have to bond\n\n let invest = invest_info_read(deps.storage).load()?;\n\n let mut balance = deps\n\n .querier\n\n .query_balance(&env.contract.address, &invest.bond_denom)?;\n\n\n\n // we deduct pending claims from our account balance before reinvesting.\n\n // if there is not enough funds, we just return a no-op\n\n match total_supply(deps.storage).update(|mut supply| {\n\n balance.amount = (balance.amount - supply.claims)?;\n", "file_path": "contracts/staking/src/contract.rs", "rank": 91, "score": 141605.66255045464 }, { "content": "/// All external requirements that can be injected for unit tests.\n\n/// It sets the given balance for the contract itself, nothing else\n\npub fn mock_backend(contract_balance: &[Coin]) -> Backend<MockStorage, MockApi, MockQuerier> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n Backend {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: MockQuerier::new(&[(&contract_addr, contract_balance)]),\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 92, "score": 140937.29716229928 }, { "content": "/// Initializes the querier along with the mock_dependencies.\n\n/// Sets all balances provided (yoy must explicitly set contract balance if desired)\n\npub fn mock_dependencies_with_balances(\n\n balances: &[(&HumanAddr, &[Coin])],\n\n) -> OwnedDeps<MockStorage, MockApi, MockQuerier> {\n\n OwnedDeps {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: MockQuerier::new(balances),\n\n }\n\n}\n\n\n\n// Use MemoryStorage implementation (which is valid in non-testcode)\n\n// We can later make simplifications here if needed\n\npub type MockStorage = MemoryStorage;\n\n\n\n// MockPrecompiles zero pads all human addresses to make them fit the canonical_length\n\n// it trims off zeros for the reverse operation.\n\n// not really smart, but allows us to see a difference (and consistent length for canonical adddresses)\n\n#[derive(Copy, Clone)]\n\npub struct MockApi {\n\n /// Length of canonical addresses created with this API. Contracts should not make any assumtions\n", "file_path": "packages/std/src/mock.rs", "rank": 93, "score": 139625.60035783015 }, { "content": "pub fn try_change_owner(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n owner: HumanAddr,\n\n) -> Result<HandleResponse<CustomMsg>, ReflectError> {\n\n let api = deps.api;\n\n config(deps.storage).update(|mut state| {\n\n let sender = api.canonical_address(&info.sender)?;\n\n if sender != state.owner {\n\n return Err(ReflectError::NotCurrentOwner {\n\n expected: state.owner,\n\n actual: sender,\n\n });\n\n }\n\n state.owner = api.canonical_address(&owner)?;\n\n Ok(state)\n\n })?;\n\n Ok(HandleResponse {\n\n attributes: vec![attr(\"action\", \"change_owner\"), attr(\"owner\", owner)],\n\n ..HandleResponse::default()\n\n })\n\n}\n\n\n", "file_path": "contracts/reflect/src/contract.rs", "rank": 94, "score": 139620.17647554618 }, { "content": "/// Just set sender and sent funds for the message. The essential for\n\n/// This is intended for use in test code only.\n\npub fn mock_info<U: Into<HumanAddr>>(sender: U, sent: &[Coin]) -> MessageInfo {\n\n MessageInfo {\n\n sender: sender.into(),\n\n sent_funds: sent.to_vec(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::BackendError;\n\n use cosmwasm_std::{coins, Binary};\n\n\n\n #[test]\n\n fn mock_info_arguments() {\n\n let name = HumanAddr(\"my name\".to_string());\n\n\n\n // make sure we can generate with &str, &HumanAddr, and HumanAddr\n\n let a = mock_info(\"my name\", &coins(100, \"atom\"));\n\n let b = mock_info(&name, &coins(100, \"atom\"));\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 95, "score": 139351.29575985973 }, { "content": "#[cfg(feature = \"iterator\")]\n\nfn range_bounds(start: Option<&[u8]>, end: Option<&[u8]>) -> impl RangeBounds<Vec<u8>> {\n\n (\n\n start.map_or(Bound::Unbounded, |x| Bound::Included(x.to_vec())),\n\n end.map_or(Bound::Unbounded, |x| Bound::Excluded(x.to_vec())),\n\n )\n\n}\n\n\n\n#[cfg(feature = \"iterator\")]\n", "file_path": "packages/vm/src/testing/storage.rs", "rank": 96, "score": 139219.31287162553 }, { "content": "/// Api are callbacks to system functions defined outside of the wasm modules.\n\n/// This is a trait to allow Mocks in the test code.\n\n///\n\n/// Currently it just supports address conversion, we could add eg. crypto functions here.\n\n/// These should all be pure (stateless) functions. If you need state, you probably want\n\n/// to use the Querier.\n\n///\n\n/// We can use feature flags to opt-in to non-essential methods\n\n/// for backwards compatibility in systems that don't have them all.\n\npub trait Api: Copy + Clone + Send {\n\n fn canonical_address(&self, human: &HumanAddr) -> BackendResult<CanonicalAddr>;\n\n fn human_address(&self, canonical: &CanonicalAddr) -> BackendResult<HumanAddr>;\n\n}\n\n\n", "file_path": "packages/vm/src/backend.rs", "rank": 97, "score": 137951.83043047108 }, { "content": "fn _do_handle<M, C, E>(\n\n handle_fn: &dyn Fn(DepsMut, Env, MessageInfo, M) -> Result<HandleResponse<C>, E>,\n\n env_ptr: *mut Region,\n\n info_ptr: *mut Region,\n\n msg_ptr: *mut Region,\n\n) -> ContractResult<HandleResponse<C>>\n\nwhere\n\n M: DeserializeOwned + JsonSchema,\n\n C: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema,\n\n E: ToString,\n\n{\n\n let env: Vec<u8> = unsafe { consume_region(env_ptr) };\n\n let info: Vec<u8> = unsafe { consume_region(info_ptr) };\n\n let msg: Vec<u8> = unsafe { consume_region(msg_ptr) };\n\n\n\n let env: Env = try_into_contract_result!(from_slice(&env));\n\n let info: MessageInfo = try_into_contract_result!(from_slice(&info));\n\n let msg: M = try_into_contract_result!(from_slice(&msg));\n\n\n\n let mut deps = make_dependencies();\n\n handle_fn(deps.as_mut(), env, info, msg).into()\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 98, "score": 137816.22562836047 }, { "content": "fn _do_init<M, C, E>(\n\n init_fn: &dyn Fn(DepsMut, Env, MessageInfo, M) -> Result<InitResponse<C>, E>,\n\n env_ptr: *mut Region,\n\n info_ptr: *mut Region,\n\n msg_ptr: *mut Region,\n\n) -> ContractResult<InitResponse<C>>\n\nwhere\n\n M: DeserializeOwned + JsonSchema,\n\n C: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema,\n\n E: ToString,\n\n{\n\n let env: Vec<u8> = unsafe { consume_region(env_ptr) };\n\n let info: Vec<u8> = unsafe { consume_region(info_ptr) };\n\n let msg: Vec<u8> = unsafe { consume_region(msg_ptr) };\n\n\n\n let env: Env = try_into_contract_result!(from_slice(&env));\n\n let info: MessageInfo = try_into_contract_result!(from_slice(&info));\n\n let msg: M = try_into_contract_result!(from_slice(&msg));\n\n\n\n let mut deps = make_dependencies();\n\n init_fn(deps.as_mut(), env, info, msg).into()\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 99, "score": 137816.22562836047 } ]
Rust
sync/src/relayer/tests/compact_block_verifier.rs
orangemio/ckb
5b3664e162c840f421469279e7e68fe9dcad75dd
use crate::relayer::compact_block::{CompactBlock, ShortTransactionID}; use crate::relayer::compact_block_verifier::{PrefilledVerifier, ShortIdsVerifier}; use crate::relayer::error::Error; use ckb_core::transaction::{CellOutput, IndexTransaction, TransactionBuilder}; use ckb_core::Capacity; use ckb_protocol::{short_transaction_id, short_transaction_id_keys}; fn new_index_transaction(index: usize) -> IndexTransaction { let transaction = TransactionBuilder::default() .output(CellOutput::new( Capacity::bytes(index).unwrap(), Default::default(), Default::default(), None, )) .build(); IndexTransaction { index, transaction } } #[test] fn test_unordered_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![0, 1, 2, 4, 3] .into_iter() .map(new_index_transaction) .collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::UnorderedPrefilledTransactions), ); } #[test] fn test_ordered_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (0..5).map(new_index_transaction).collect(); block.prefilled_transactions = prefilled; assert_eq!(PrefilledVerifier::new().verify(&block), Ok(()),); } #[test] fn test_overflow_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![0, 1, 2, 5] .into_iter() .map(new_index_transaction) .collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::OverflowPrefilledTransactions), ); } #[test] fn test_cellbase_not_prefilled() { let block = CompactBlock::default(); assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::CellbaseNotPrefilled) ); let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (1..5).map(new_index_transaction).collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::CellbaseNotPrefilled), ); } #[test] fn test_duplicated_short_ids() { let mut block = CompactBlock::default(); let mut short_ids: Vec<ShortTransactionID> = (1..5) .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); short_ids.push(short_ids[0]); block.short_ids = short_ids; assert_eq!( ShortIdsVerifier::new().verify(&block), Err(Error::DuplicatedShortIds), ); } #[test] fn test_intersected_short_ids() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (0..=5).map(new_index_transaction).collect(); let short_ids: Vec<ShortTransactionID> = (5..9) .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); block.prefilled_transactions = prefilled; block.short_ids = short_ids; assert_eq!( ShortIdsVerifier::new().verify(&block), Err(Error::IntersectedPrefilledTransactions), ); } #[test] fn test_normal() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![1, 2, 5] .into_iter() .map(new_index_transaction) .collect(); let short_ids: Vec<ShortTransactionID> = vec![0, 3, 4] .into_iter() .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); block.prefilled_transactions = prefilled; block.short_ids = short_ids; assert_eq!(ShortIdsVerifier::new().verify(&block), Ok(()),); }
use crate::relayer::compact_block::{CompactBlock, ShortTransactionID}; use crate::relayer::compact_block_verifier::{PrefilledVerifier, ShortIdsVerifier}; use crate::relayer::error::Error; use ckb_core::transaction::{CellOutput, IndexTransaction, TransactionBuilder}; use ckb_core::Capacity; use ckb_protocol::{short_transaction_id, short_transaction_id_keys}; fn new_index_transaction(index: usize) -> IndexTransaction { let transaction = TransactionBuilder::default() .output(CellOutput::new( Capacity::bytes(index).unwrap(), Default::default(), Default::default(), None, )) .build(); IndexTransaction { index, transaction } } #[test] fn test_unordered_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![0, 1, 2, 4, 3] .into_iter() .map(new_index_transaction) .collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::UnorderedPrefilledTransactions), ); } #[test] fn test_ordered_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (0..5).map(new_index_transaction).collect(); block.prefilled_transactions = prefilled; assert_eq!(PrefilledVerifier::new().verify(&block), Ok(()),); } #[test] fn test_overflow_prefilled() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![0, 1, 2, 5] .into_iter() .map(new_index_transaction) .collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::OverflowPrefilledTransactions), ); } #[test] fn test_cellbase_not_prefilled() { let block = CompactBlock::default(); assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::CellbaseNotPre
#[test] fn test_duplicated_short_ids() { let mut block = CompactBlock::default(); let mut short_ids: Vec<ShortTransactionID> = (1..5) .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); short_ids.push(short_ids[0]); block.short_ids = short_ids; assert_eq!( ShortIdsVerifier::new().verify(&block), Err(Error::DuplicatedShortIds), ); } #[test] fn test_intersected_short_ids() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (0..=5).map(new_index_transaction).collect(); let short_ids: Vec<ShortTransactionID> = (5..9) .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); block.prefilled_transactions = prefilled; block.short_ids = short_ids; assert_eq!( ShortIdsVerifier::new().verify(&block), Err(Error::IntersectedPrefilledTransactions), ); } #[test] fn test_normal() { let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = vec![1, 2, 5] .into_iter() .map(new_index_transaction) .collect(); let short_ids: Vec<ShortTransactionID> = vec![0, 3, 4] .into_iter() .map(new_index_transaction) .map(|tx| { let (key0, key1) = short_transaction_id_keys(block.header.nonce(), block.nonce); short_transaction_id(key0, key1, &tx.transaction.witness_hash()) }) .collect(); block.prefilled_transactions = prefilled; block.short_ids = short_ids; assert_eq!(ShortIdsVerifier::new().verify(&block), Ok(()),); }
filled) ); let mut block = CompactBlock::default(); let prefilled: Vec<IndexTransaction> = (1..5).map(new_index_transaction).collect(); block.prefilled_transactions = prefilled; assert_eq!( PrefilledVerifier::new().verify(&block), Err(Error::CellbaseNotPrefilled), ); }
function_block-function_prefixed
[ { "content": "// Build compact block based on core block, and specific prefilled indices\n\npub fn build_compact_block_with_prefilled(block: &Block, prefilled: Vec<usize>) -> Bytes {\n\n let prefilled = prefilled.into_iter().collect();\n\n let fbb = &mut FlatBufferBuilder::new();\n\n let message = RelayMessage::build_compact_block(fbb, &block, &prefilled);\n\n fbb.finish(message, None);\n\n fbb.finished_data().into()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 1, "score": 265735.3063038492 }, { "content": "// Build compact block based on core block\n\npub fn build_compact_block(block: &Block) -> Bytes {\n\n let fbb = &mut FlatBufferBuilder::new();\n\n let message = RelayMessage::build_compact_block(fbb, &block, &HashSet::new());\n\n fbb.finish(message, None);\n\n fbb.finished_data().into()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 2, "score": 213997.51418154323 }, { "content": "#[test]\n\nfn test_transaction_conflict_in_same_block() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let mut chain: Vec<Block> = Vec::new();\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n parent = new_block.header().to_owned();\n\n chain.push(new_block);\n\n }\n\n\n\n let last_cell_base = &chain.last().unwrap().transactions()[0];\n\n let tx1 = create_transaction(last_cell_base.hash(), 1);\n\n let tx1_hash = tx1.hash().to_owned();\n", "file_path": "chain/src/tests/basic.rs", "rank": 3, "score": 193729.53318574172 }, { "content": "#[test]\n\nfn test_transaction_spend_in_same_block() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let mut chain: Vec<Block> = Vec::new();\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n parent = new_block.header().to_owned();\n\n chain.push(new_block);\n\n }\n\n\n\n let last_cell_base = &chain.last().unwrap().transactions()[0];\n\n let last_cell_base_hash = last_cell_base.hash().to_owned();\n\n let tx1 = create_transaction(&last_cell_base_hash, 1);\n", "file_path": "chain/src/tests/basic.rs", "rank": 4, "score": 193729.53318574172 }, { "content": "#[test]\n\nfn test_invalid_out_point_index_in_same_block() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let mut chain: Vec<Block> = Vec::new();\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n parent = new_block.header().to_owned();\n\n chain.push(new_block);\n\n }\n\n\n\n let last_cell_base = &chain.last().unwrap().transactions()[0];\n\n let tx1 = create_transaction(last_cell_base.hash(), 1);\n\n let tx1_hash = tx1.hash().to_owned();\n", "file_path": "chain/src/tests/basic.rs", "rank": 5, "score": 188816.02985961683 }, { "content": "#[test]\n\nfn test_transaction_conflict_in_different_blocks() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let mut chain: Vec<Block> = Vec::new();\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n parent = new_block.header().to_owned();\n\n chain.push(new_block);\n\n }\n\n\n\n let last_cell_base = &chain.last().unwrap().transactions()[0];\n\n let tx1 = create_transaction(last_cell_base.hash(), 1);\n\n let tx1_hash = tx1.hash();\n", "file_path": "chain/src/tests/basic.rs", "rank": 6, "score": 188561.1463967067 }, { "content": "fn create_cellbase_transaction() -> Transaction {\n\n create_cellbase_transaction_with_capacity(capacity_bytes!(100))\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 7, "score": 185542.23359509313 }, { "content": "fn create_normal_transaction() -> Transaction {\n\n TransactionBuilder::default()\n\n .input(CellInput::new(\n\n OutPoint::new_cell(h256!(\"0x1\"), 0),\n\n 0,\n\n Default::default(),\n\n ))\n\n .output(CellOutput::new(\n\n capacity_bytes!(100),\n\n Bytes::default(),\n\n Script::default(),\n\n None,\n\n ))\n\n .build()\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 8, "score": 185542.2335950931 }, { "content": "#[test]\n\nfn test_invalid_out_point_index_in_different_blocks() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let mut chain: Vec<Block> = Vec::new();\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n parent = new_block.header().to_owned();\n\n chain.push(new_block);\n\n }\n\n\n\n let last_cell_base = &chain.last().unwrap().transactions()[0];\n\n let tx1 = create_transaction(last_cell_base.hash(), 1);\n\n let tx1_hash = tx1.hash();\n", "file_path": "chain/src/tests/basic.rs", "rank": 13, "score": 183960.96635451505 }, { "content": "#[test]\n\nfn test_invalid_out_point_index_in_same_block() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let final_number = 20;\n\n let switch_fork_number = 10;\n\n\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n for _ in 1..final_number {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n chain1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n", "file_path": "chain/src/tests/delay_verify.rs", "rank": 14, "score": 183960.96635451505 }, { "content": "#[test]\n\nfn test_invalid_out_point_index_in_different_blocks() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let final_number = 20;\n\n let switch_fork_number = 10;\n\n\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n for _ in 1..final_number {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n chain1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n", "file_path": "chain/src/tests/delay_verify.rs", "rank": 15, "score": 179398.61514878293 }, { "content": "fn new_transaction(\n\n relayer: &Relayer<ChainKVStore<MemoryKeyValueDB>>,\n\n index: usize,\n\n always_success_out_point: &OutPoint,\n\n) -> Transaction {\n\n let previous_output = {\n\n let chain_state = relayer.shared.shared().chain_state().lock();\n\n let tip_hash = chain_state.tip_hash();\n\n let block = relayer\n\n .shared\n\n .shared()\n\n .block(&tip_hash)\n\n .expect(\"getting tip block\");\n\n let cellbase = block\n\n .transactions()\n\n .first()\n\n .expect(\"getting cellbase from tip block\");\n\n cellbase.output_pts()[0].clone()\n\n };\n\n\n", "file_path": "sync/src/relayer/tests/compact_block_process.rs", "rank": 16, "score": 175587.89106517046 }, { "content": "fn create_cellbase_transaction_with_capacity(capacity: Capacity) -> Transaction {\n\n TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(0))\n\n .output(CellOutput::new(\n\n capacity,\n\n Bytes::default(),\n\n Script::default(),\n\n None,\n\n ))\n\n .build()\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 17, "score": 167219.8556498758 }, { "content": "fn bench(c: &mut Criterion) {\n\n let txs_sizes = vec![100usize, 200, 500, 1000];\n\n\n\n // benchmark processing 20 blocks on main branch\n\n c.bench_function_over_inputs(\n\n \"main_branch\",\n\n |b, txs_size| {\n\n b.iter_with_setup(\n\n || {\n\n let (chain, shared, dir, system_cell_hash, data_hash) = new_chain(*txs_size);\n\n let mut blocks = vec![shared.block(&shared.genesis_hash()).unwrap()];\n\n (0..20).for_each(|_| {\n\n let parent_index = blocks.len() - 1;\n\n gen_block(&mut blocks, parent_index, &system_cell_hash, &data_hash);\n\n });\n\n (chain, blocks, dir)\n\n },\n\n |(chain, blocks, _dir)| {\n\n blocks.into_iter().skip(1).for_each(|block| {\n\n chain\n", "file_path": "benches/benches/process_block.rs", "rank": 18, "score": 164362.59687485787 }, { "content": "fn create_cellbase(number: BlockNumber) -> Transaction {\n\n let (_, always_success_script) = create_always_success_cell();\n\n TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(number))\n\n .output(CellOutput::new(\n\n capacity_bytes!(5000),\n\n Bytes::default(),\n\n always_success_script,\n\n None,\n\n ))\n\n .build()\n\n}\n\n\n\npub(crate) fn gen_block(\n\n parent_header: &Header,\n\n difficulty: U256,\n\n transactions: Vec<Transaction>,\n\n proposals: Vec<Transaction>,\n\n uncles: Vec<UncleBlock>,\n\n) -> Block {\n", "file_path": "chain/src/tests/util.rs", "rank": 19, "score": 162830.69864978534 }, { "content": "#[test]\n\nfn test_reconstruct_block() {\n\n let (relayer, always_success_out_point) = build_chain(5);\n\n let prepare: Vec<Transaction> = (0..20)\n\n .map(|i| new_transaction(&relayer, i, &always_success_out_point))\n\n .collect();\n\n\n\n // Case: miss tx.0\n\n {\n\n let mut compact = CompactBlock {\n\n nonce: 2,\n\n ..Default::default()\n\n };\n\n let (key0, key1) = short_transaction_id_keys(compact.header.nonce(), compact.nonce);\n\n let short_ids = prepare\n\n .iter()\n\n .map(|tx| short_transaction_id(key0, key1, &tx.witness_hash()))\n\n .collect();\n\n let transactions: Vec<Transaction> = prepare.iter().skip(1).cloned().collect();\n\n compact.short_ids = short_ids;\n\n let chain_state = relayer.shared.chain_state().lock();\n", "file_path": "sync/src/relayer/tests/compact_block_process.rs", "rank": 20, "score": 161164.840362876 }, { "content": "fn create_cellbase(number: BlockNumber) -> Transaction {\n\n TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(number))\n\n .output(CellOutput::new(\n\n Capacity::zero(),\n\n Bytes::default(),\n\n Script::default(),\n\n None,\n\n ))\n\n .build()\n\n}\n\n\n", "file_path": "verification/src/tests/uncle_verifier.rs", "rank": 21, "score": 158217.83624622342 }, { "content": "fn create_cellbase(number: BlockNumber) -> Transaction {\n\n TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(number))\n\n .outputs(vec![CellOutput::new(\n\n Capacity::zero(),\n\n Bytes::default(),\n\n Script::default(),\n\n None,\n\n )])\n\n .build()\n\n}\n\n\n", "file_path": "verification/src/tests/commit_verifier.rs", "rank": 22, "score": 158217.83624622342 }, { "content": "#[test]\n\npub fn test_block_without_cellbase() {\n\n let block = BlockBuilder::default()\n\n .transaction(TransactionBuilder::default().build())\n\n .build();\n\n let verifier = CellbaseVerifier::new();\n\n assert_eq!(\n\n verifier.verify(&block),\n\n Err(VerifyError::Cellbase(CellbaseError::InvalidQuantity))\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 23, "score": 157976.67486266847 }, { "content": "#[test]\n\npub fn test_block_with_two_cellbases() {\n\n let block = BlockBuilder::default()\n\n .transaction(create_cellbase_transaction())\n\n .transaction(create_cellbase_transaction())\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert_eq!(\n\n verifier.verify(&block),\n\n Err(VerifyError::Cellbase(CellbaseError::InvalidQuantity))\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 24, "score": 157976.67486266847 }, { "content": "#[test]\n\nfn test_genesis_transaction_fetch() {\n\n let tx = TransactionBuilder::default()\n\n .input(CellInput::new(OutPoint::null(), 0, Default::default()))\n\n .outputs(vec![\n\n CellOutput::new(\n\n capacity_bytes!(100_000_000),\n\n Bytes::default(),\n\n Script::default(),\n\n None\n\n );\n\n 100\n\n ])\n\n .build();\n\n\n\n let root_hash = tx.hash().to_owned();\n\n\n\n let genesis_block = BlockBuilder::default()\n\n .transaction(tx)\n\n .header_builder(HeaderBuilder::default().difficulty(U256::from(1000u64)))\n\n .build();\n\n\n\n let consensus = Consensus::default().set_genesis_block(genesis_block);\n\n let (_chain_controller, shared) = start_chain(Some(consensus));\n\n\n\n let out_point = OutPoint::new_cell(root_hash, 0);\n\n let state = shared.chain_state().lock().cell(&out_point);\n\n assert!(state.is_live());\n\n}\n\n\n", "file_path": "chain/src/tests/basic.rs", "rank": 25, "score": 155663.1897285527 }, { "content": "#[test]\n\nfn test_genesis_transaction_spend() {\n\n let tx = TransactionBuilder::default()\n\n .input(CellInput::new(OutPoint::null(), 0, Default::default()))\n\n .outputs(vec![\n\n CellOutput::new(\n\n capacity_bytes!(100_000_000),\n\n Bytes::default(),\n\n Script::default(),\n\n None\n\n );\n\n 100\n\n ])\n\n .build();\n\n\n\n let mut root_hash = tx.hash().to_owned();\n\n\n\n let genesis_tx_hash = root_hash.clone();\n\n\n\n let genesis_block = BlockBuilder::default()\n\n .transaction(tx)\n", "file_path": "chain/src/tests/basic.rs", "rank": 26, "score": 155663.1897285527 }, { "content": "#[test]\n\nfn test_block_median_time() {\n\n let shared = new_shared();\n\n let chain_state = shared.chain_state().lock();\n\n assert_eq!((&*chain_state).block_median_time(0), Some(0));\n\n let now = faketime::unix_time_as_millis();\n\n insert_block_timestamps(shared.store(), &[now]);\n\n assert_eq!(\n\n (&*chain_state).block_median_time(1).expect(\"median time\"),\n\n now\n\n );\n\n let timestamps = (1..=22).collect::<Vec<_>>();\n\n insert_block_timestamps(shared.store(), &timestamps);\n\n assert_eq!(\n\n (&*chain_state)\n\n .block_median_time(*timestamps.last().expect(\"last\"))\n\n .expect(\"median time\"),\n\n 17\n\n );\n\n}\n", "file_path": "shared/src/tests/shared.rs", "rank": 27, "score": 155451.75264517448 }, { "content": "#[test]\n\npub fn test_max_block_bytes_verifier() {\n\n let block = BlockBuilder::default().build();\n\n let proof_size = 0usize;\n\n\n\n {\n\n let verifier =\n\n BlockBytesVerifier::new(block.serialized_size(proof_size) as u64, proof_size);\n\n assert_eq!(verifier.verify(&block), Ok(()));\n\n }\n\n\n\n {\n\n let verifier =\n\n BlockBytesVerifier::new(block.serialized_size(proof_size) as u64 - 1, proof_size);\n\n assert_eq!(\n\n verifier.verify(&block),\n\n Err(VerifyError::ExceededMaximumBlockBytes)\n\n );\n\n }\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 28, "score": 154641.28948121416 }, { "content": "#[test]\n\npub fn test_block_with_one_cellbase_at_first() {\n\n let transaction = create_normal_transaction();\n\n\n\n let block = BlockBuilder::default()\n\n .transaction(create_cellbase_transaction())\n\n .transaction(transaction)\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert!(verifier.verify(&block).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 29, "score": 154641.28948121416 }, { "content": "#[test]\n\npub fn test_block_with_one_cellbase_at_last() {\n\n let block = BlockBuilder::default()\n\n .transaction(create_normal_transaction())\n\n .transaction(create_cellbase_transaction())\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert_eq!(\n\n verifier.verify(&block),\n\n Err(VerifyError::Cellbase(CellbaseError::InvalidPosition))\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 30, "score": 154641.28948121416 }, { "content": "#[test]\n\npub fn test_since() {\n\n // use remain flags\n\n let transaction = TransactionBuilder::default()\n\n .inputs(vec![CellInput::new(\n\n OutPoint::new_cell(h256!(\"0x1\"), 0),\n\n 0x2000_0000_0000_0000,\n\n Default::default(),\n\n )])\n\n .build();\n\n\n\n let rtx = ResolvedTransaction {\n\n transaction: &transaction,\n\n resolved_deps: Vec::new(),\n\n resolved_inputs: vec![ResolvedOutPoint::cell_only(CellMeta {\n\n block_number: Some(1),\n\n ..CellMeta::from(&CellOutput::new(\n\n capacity_bytes!(50),\n\n Bytes::new(),\n\n Script::default(),\n\n None,\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 31, "score": 152475.02422834517 }, { "content": "#[test]\n\npub fn test_empty() {\n\n let transaction = TransactionBuilder::default().build();\n\n let verifier = EmptyVerifier::new(&transaction);\n\n\n\n assert_eq!(verifier.verify().err(), Some(TransactionError::Empty));\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 32, "score": 152475.02422834517 }, { "content": "fn create_transaction(\n\n parent_hash: &H256,\n\n system_cell_hash: &H256,\n\n data_hash: &H256,\n\n) -> Transaction {\n\n TransactionBuilder::default()\n\n .output(CellOutput::new(\n\n capacity_bytes!(50_000),\n\n (0..255).collect(),\n\n Script::new(vec![(0..255).collect()], data_hash.to_owned()),\n\n None,\n\n ))\n\n .input(CellInput::new(\n\n OutPoint::new_cell(parent_hash.to_owned(), 0),\n\n 0,\n\n vec![],\n\n ))\n\n .dep(OutPoint::new_cell(system_cell_hash.to_owned(), 0))\n\n .build()\n\n}\n", "file_path": "benches/benches/process_block.rs", "rank": 33, "score": 151986.5084927053 }, { "content": "#[test]\n\nfn transaction_filter() {\n\n let mut filter = TransactionFilter::new(&[0; 8], 3, 1);\n\n let tx = TransactionBuilder::default().build();\n\n assert!(!filter.contains(&tx));\n\n filter.insert(&tx.hash());\n\n assert!(filter.contains(&tx));\n\n}\n", "file_path": "sync/src/tests/filter.rs", "rank": 34, "score": 151670.79207376612 }, { "content": "#[test]\n\nfn test_dead_cell_in_same_block() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let final_number = 20;\n\n let switch_fork_number = 10;\n\n\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n for _ in 1..final_number {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n chain1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n", "file_path": "chain/src/tests/delay_verify.rs", "rank": 36, "score": 151613.26536746923 }, { "content": "#[test]\n\npub fn test_cellbase_maturity() {\n\n let transaction = TransactionBuilder::default()\n\n .output(CellOutput::new(\n\n capacity_bytes!(50),\n\n vec![1; 51].into(),\n\n Script::default(),\n\n None,\n\n ))\n\n .build();\n\n\n\n let rtx = ResolvedTransaction {\n\n transaction: &transaction,\n\n resolved_deps: Vec::new(),\n\n resolved_inputs: vec![ResolvedOutPoint::cell_only(CellMeta {\n\n block_number: Some(30),\n\n cellbase: true,\n\n ..CellMeta::from(&CellOutput::new(\n\n capacity_bytes!(50),\n\n Bytes::new(),\n\n Script::default(),\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 37, "score": 148629.19091150464 }, { "content": "#[test]\n\npub fn test_capacity_outofbound() {\n\n let transaction = TransactionBuilder::default()\n\n .output(CellOutput::new(\n\n capacity_bytes!(50),\n\n Bytes::from(vec![1; 51]),\n\n Script::default(),\n\n None,\n\n ))\n\n .build();\n\n\n\n let rtx = ResolvedTransaction {\n\n transaction: &transaction,\n\n resolved_deps: Vec::new(),\n\n resolved_inputs: vec![ResolvedOutPoint::cell_only(CellMeta::from(\n\n &CellOutput::new(capacity_bytes!(50), Bytes::new(), Script::default(), None),\n\n ))],\n\n };\n\n let verifier = CapacityVerifier::new(&rtx);\n\n\n\n assert_eq!(\n\n verifier.verify().err(),\n\n Some(TransactionError::CapacityOverflow)\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 38, "score": 148629.19091150464 }, { "content": "#[test]\n\npub fn test_capacity_invalid() {\n\n let transaction = TransactionBuilder::default()\n\n .outputs(vec![\n\n CellOutput::new(\n\n capacity_bytes!(50),\n\n Bytes::default(),\n\n Script::default(),\n\n None,\n\n ),\n\n CellOutput::new(\n\n capacity_bytes!(100),\n\n Bytes::default(),\n\n Script::default(),\n\n None,\n\n ),\n\n ])\n\n .build();\n\n\n\n let rtx = ResolvedTransaction {\n\n transaction: &transaction,\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 39, "score": 148629.19091150464 }, { "content": "#[test]\n\npub fn test_duplicate_deps() {\n\n let transaction = TransactionBuilder::default()\n\n .deps(vec![\n\n OutPoint::new_cell(h256!(\"0x1\"), 0),\n\n OutPoint::new_cell(h256!(\"0x1\"), 0),\n\n ])\n\n .build();\n\n\n\n let verifier = DuplicateDepsVerifier::new(&transaction);\n\n\n\n assert_eq!(\n\n verifier.verify().err(),\n\n Some(TransactionError::DuplicateDeps)\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 40, "score": 148629.19091150464 }, { "content": "#[test]\n\npub fn test_cellbase_with_fee() {\n\n let transaction = create_normal_transaction();\n\n\n\n let block = BlockBuilder::default()\n\n .transaction(create_cellbase_transaction_with_capacity(capacity_bytes!(\n\n 110\n\n )))\n\n .transaction(transaction)\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert!(verifier.verify(&block).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 41, "score": 148425.09986726166 }, { "content": "#[test]\n\nfn test_dead_cell_in_different_block() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let final_number = 20;\n\n let switch_fork_number = 10;\n\n\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n for _ in 1..final_number {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n chain1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n", "file_path": "chain/src/tests/delay_verify.rs", "rank": 42, "score": 148007.35198308635 }, { "content": "fn create_transaction(\n\n parent: &H256,\n\n always_success_script: &Script,\n\n always_success_out_point: &OutPoint,\n\n) -> Transaction {\n\n let capacity = 100_000_000 / 100 as usize;\n\n let output = CellOutput::new(\n\n Capacity::bytes(capacity).unwrap(),\n\n Bytes::default(),\n\n always_success_script.to_owned(),\n\n Some(always_success_script.to_owned()),\n\n );\n\n let inputs: Vec<CellInput> = (0..100)\n\n .map(|index| CellInput::new(OutPoint::new_cell(parent.clone(), index), 0, vec![]))\n\n .collect();\n\n\n\n TransactionBuilder::default()\n\n .inputs(inputs)\n\n .outputs(vec![output; 100])\n\n .dep(always_success_out_point.to_owned())\n\n .build()\n\n}\n\n\n", "file_path": "verification/src/tests/commit_verifier.rs", "rank": 43, "score": 147072.2368158555 }, { "content": "fn gen_block(\n\n parent_header: &Header,\n\n transactions: Vec<Transaction>,\n\n proposals: Vec<ProposalShortId>,\n\n uncles: Vec<UncleBlock>,\n\n) -> Block {\n\n let now = 1 + parent_header.timestamp();\n\n let number = parent_header.number() + 1;\n\n let nonce = parent_header.nonce() + 1;\n\n let difficulty = parent_header.difficulty() + U256::from(1u64);\n\n let cellbase = create_cellbase(number);\n\n let header_builder = HeaderBuilder::default()\n\n .parent_hash(parent_header.hash().to_owned())\n\n .timestamp(now)\n\n .number(number)\n\n .difficulty(difficulty)\n\n .nonce(nonce);\n\n\n\n BlockBuilder::default()\n\n .transaction(cellbase)\n\n .transactions(transactions)\n\n .proposals(proposals)\n\n .uncles(uncles)\n\n .header_builder(header_builder)\n\n .build()\n\n}\n\n\n", "file_path": "verification/src/tests/commit_verifier.rs", "rank": 44, "score": 146852.90512245503 }, { "content": "fn new_cellbase(number: BlockNumber, always_success_script: &Script) -> Transaction {\n\n let outputs = (0..1)\n\n .map(|_| {\n\n CellOutput::new(\n\n capacity_bytes!(500000),\n\n Bytes::default(),\n\n always_success_script.to_owned(),\n\n None,\n\n )\n\n })\n\n .collect::<Vec<_>>();\n\n TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(number))\n\n .outputs(outputs)\n\n .build()\n\n}\n\n\n", "file_path": "rpc/src/test.rs", "rank": 45, "score": 146628.2754099444 }, { "content": "#[test]\n\npub fn test_cellbase_with_less_reward() {\n\n let transaction = create_normal_transaction();\n\n\n\n let block = BlockBuilder::default()\n\n .transaction(create_cellbase_transaction_with_capacity(capacity_bytes!(\n\n 50\n\n )))\n\n .transaction(transaction)\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert!(verifier.verify(&block).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 46, "score": 144819.18648287878 }, { "content": "fn cal_transactions_root(vec: &[Transaction]) -> H256 {\n\n merkle_root(\n\n &vec.iter()\n\n .map(Transaction::hash)\n\n .map(ToOwned::to_owned)\n\n .collect::<Vec<_>>(),\n\n )\n\n}\n\n\n", "file_path": "core/src/block.rs", "rank": 49, "score": 144450.80054011225 }, { "content": "fn serialized_transaction_size(\n\n tx: &Transaction,\n\n) -> Result<(TransactionHeader, Vec<CellOutputAddress>)> {\n\n let config = bincode::config();\n\n let mut header = [0; TRANSACTION_FIELDS_SIZE];\n\n let size_header = config.serialized_size(&header)? as usize;\n\n let size_version = config.serialized_size(&tx.version())? as usize;\n\n let size_deps = config.serialized_size(tx.deps())? as usize;\n\n let size_inputs = config.serialized_size(tx.inputs())? as usize;\n\n let (size_outputs, output_addresses) = tx\n\n .outputs()\n\n .iter()\n\n .map(|output| config.serialized_size(output).map(|len| len as usize))\n\n .collect::<Result<Vec<usize>>>()?\n\n .into_iter()\n\n .fold(\n\n (0, Vec::with_capacity(tx.outputs().len())),\n\n |(offset, mut addresses), size| {\n\n addresses.push(CellOutputAddress {\n\n offset,\n", "file_path": "store/src/flat_block_body.rs", "rank": 50, "score": 143108.44188065964 }, { "content": "#[test]\n\npub fn test_skip_dao_capacity_check() {\n\n let transaction = TransactionBuilder::default()\n\n .input(CellInput::new(OutPoint::new_issuing_dao(), 0, vec![]))\n\n .output(CellOutput::new(\n\n capacity_bytes!(500),\n\n Bytes::from(vec![1; 10]),\n\n Script::default(),\n\n None,\n\n ))\n\n .build();\n\n\n\n let rtx = ResolvedTransaction {\n\n transaction: &transaction,\n\n resolved_deps: Vec::new(),\n\n resolved_inputs: vec![ResolvedOutPoint::issuing_dao()],\n\n };\n\n let verifier = CapacityVerifier::new(&rtx);\n\n\n\n assert!(verifier.verify().is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 51, "score": 141615.65636472503 }, { "content": "#[test]\n\npub fn test_max_proposals_limit_verifier() {\n\n let block = BlockBuilder::default()\n\n .proposal(ProposalShortId::zero())\n\n .build();\n\n\n\n {\n\n let verifier = BlockProposalsLimitVerifier::new(1);\n\n assert_eq!(verifier.verify(&block), Ok(()));\n\n }\n\n\n\n {\n\n let verifier = BlockProposalsLimitVerifier::new(0);\n\n assert_eq!(\n\n verifier.verify(&block),\n\n Err(VerifyError::ExceededMaximumProposalsLimit)\n\n );\n\n }\n\n}\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 52, "score": 141424.82553678704 }, { "content": "fn create_always_success_tx() -> Transaction {\n\n let (always_success_cell, _) = create_always_success_cell();\n\n TransactionBuilder::default()\n\n .input(CellInput::new(OutPoint::null(), 0, Default::default()))\n\n .output(always_success_cell)\n\n .build()\n\n}\n\n\n", "file_path": "chain/src/tests/util.rs", "rank": 53, "score": 135655.9355799684 }, { "content": "fn gen_block(parent_header: &Header, nonce: u64, epoch: &EpochExt) -> Block {\n\n let now = 1 + parent_header.timestamp();\n\n let number = parent_header.number() + 1;\n\n let cellbase = create_cellbase(number);\n\n let header_builder = HeaderBuilder::default()\n\n .parent_hash(parent_header.hash().to_owned())\n\n .timestamp(now)\n\n .epoch(epoch.number())\n\n .number(number)\n\n .difficulty(epoch.difficulty().clone())\n\n .nonce(nonce);\n\n\n\n BlockBuilder::default()\n\n .transaction(cellbase)\n\n .proposal(ProposalShortId::from_slice(&[1; 10]).unwrap())\n\n .header_builder(header_builder)\n\n .build()\n\n}\n\n\n", "file_path": "verification/src/tests/uncle_verifier.rs", "rank": 54, "score": 135024.3249791431 }, { "content": "fn new_header_builder(\n\n shared: &Shared<ChainKVStore<MemoryKeyValueDB>>,\n\n parent: &Block,\n\n) -> HeaderBuilder {\n\n let parent_hash = parent.header().hash();\n\n let parent_epoch = shared.get_block_epoch(&parent_hash).unwrap();\n\n let epoch = shared\n\n .next_epoch_ext(&parent_epoch, parent.header())\n\n .unwrap_or(parent_epoch);\n\n HeaderBuilder::default()\n\n .parent_hash(parent_hash.to_owned())\n\n .number(parent.header().number() + 1)\n\n .timestamp(parent.header().timestamp() + 1)\n\n .epoch(epoch.number())\n\n .difficulty(epoch.difficulty().to_owned())\n\n}\n\n\n", "file_path": "sync/src/relayer/tests/compact_block_process.rs", "rank": 55, "score": 134947.20670818232 }, { "content": "pub fn wait_until<F>(secs: u64, mut f: F) -> bool\n\nwhere\n\n F: FnMut() -> bool,\n\n{\n\n let start = Instant::now();\n\n let timeout = Duration::new(secs, 0);\n\n while Instant::now().duration_since(start) <= timeout {\n\n if f() {\n\n return true;\n\n }\n\n sleep(Duration::new(1, 0));\n\n }\n\n false\n\n}\n", "file_path": "test/src/utils.rs", "rank": 56, "score": 134440.98043067261 }, { "content": "fn build_chain(tip: BlockNumber) -> (Relayer<ChainKVStore<MemoryKeyValueDB>>, OutPoint) {\n\n let (always_success_cell, always_success_script) = create_always_success_cell();\n\n let always_success_tx = TransactionBuilder::default()\n\n .input(CellInput::new(OutPoint::null(), 0, Default::default()))\n\n .output(always_success_cell)\n\n .build();\n\n let always_success_out_point = OutPoint::new_cell(always_success_tx.hash().to_owned(), 0);\n\n\n\n let shared = {\n\n let genesis = BlockBuilder::from_header_builder(\n\n HeaderBuilder::default()\n\n .timestamp(unix_time_as_millis())\n\n .difficulty(U256::from(1000u64)),\n\n )\n\n .transaction(always_success_tx)\n\n .build();\n\n let consensus = Consensus::default()\n\n .set_genesis_block(genesis)\n\n .set_cellbase_maturity(0);\n\n SharedBuilder::<MemoryKeyValueDB>::new()\n", "file_path": "sync/src/relayer/tests/compact_block_process.rs", "rank": 57, "score": 133135.07072879496 }, { "content": "fn cal_witnesses_root(vec: &[Transaction]) -> H256 {\n\n // The witness hash of cellbase transaction is assumed to be zero 0x0000....0000\n\n let mut witnesses = vec![H256::zero()];\n\n witnesses.extend(\n\n vec.iter()\n\n .skip(1)\n\n .map(Transaction::witness_hash)\n\n .map(ToOwned::to_owned),\n\n );\n\n merkle_root(&witnesses[..])\n\n}\n\n\n\npub(crate) fn cal_proposals_hash(vec: &[ProposalShortId]) -> H256 {\n\n if vec.is_empty() {\n\n H256::zero()\n\n } else {\n\n let mut ret = [0u8; 32];\n\n let mut blake2b = new_blake2b();\n\n for id in vec.iter() {\n\n blake2b.update(&(&id as &[u8; 10])[..]);\n", "file_path": "core/src/block.rs", "rank": 58, "score": 131308.29250998207 }, { "content": "fn bench(c: &mut Criterion) {\n\n c.bench_function(\"bench_solve\", |b| {\n\n let cuckoo = Cuckoo::new(6, 8);\n\n b.iter(|| {\n\n for _ in 0..100 {\n\n for (message, _) in TESTSET.iter() {\n\n cuckoo.solve(message).unwrap();\n\n }\n\n }\n\n })\n\n });\n\n\n\n c.bench_function(\"bench_verify\", |b| {\n\n let cuckoo = Cuckoo::new(6, 8);\n\n b.iter(|| {\n\n for _ in 0..100 {\n\n for (message, proof) in TESTSET.iter() {\n\n cuckoo.verify(message, proof);\n\n }\n\n }\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench);\n\ncriterion_main!(benches);\n", "file_path": "benches/benches/cuckoo.rs", "rank": 59, "score": 127958.13244879534 }, { "content": "fn main() {\n\n // forward git repo hashes we build at\n\n println!(\n\n \"cargo:rustc-env=COMMIT_DESCRIBE={}\",\n\n build_info::get_commit_describe().unwrap_or_default()\n\n );\n\n println!(\n\n \"cargo:rustc-env=COMMIT_DATE={}\",\n\n build_info::get_commit_date().unwrap_or_default()\n\n );\n\n}\n", "file_path": "build.rs", "rank": 60, "score": 126393.4386012933 }, { "content": "#[test]\n\nfn test_rpc() {\n\n // Set `print_mode = true` manually to print the result of rpc test cases.\n\n // It is useful when we just want get the actual results.\n\n let print_mode = false;\n\n\n\n // Setup node\n\n let height = 1024;\n\n let (_shared, _chain_controller, server) = setup_node(height);\n\n\n\n // Load cases in json format and run\n\n let mut cases: Value = {\n\n let mut file_path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n file_path.push(\"json/rpc.json\");\n\n let file = File::open(file_path).expect(\"opening test data json\");\n\n from_reader(file).expect(\"reading test data json\")\n\n };\n\n let mut outputs: Vec<Value> = Vec::new();\n\n\n\n // Run cases one by one\n\n let client = reqwest::Client::new();\n", "file_path": "rpc/src/test.rs", "rank": 61, "score": 125946.17769415653 }, { "content": "fn gen_block(\n\n blocks: &mut Vec<Block>,\n\n parent_index: usize,\n\n system_cell_hash: &H256,\n\n data_hash: &H256,\n\n) {\n\n let p_block = &blocks[parent_index];\n\n\n\n let (number, timestamp, difficulty) = (\n\n p_block.header().number() + 1,\n\n p_block.header().timestamp() + 10000,\n\n p_block.header().difficulty() + U256::from(1u64),\n\n );\n\n\n\n let mut cell_output = CellOutput::default();\n\n cell_output.capacity = cell_output.occupied_capacity().unwrap();\n\n\n\n let cellbase = TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(number))\n\n .output(cell_output)\n", "file_path": "benches/benches/process_block.rs", "rank": 62, "score": 123320.94308745756 }, { "content": "fn main() {\n\n let mut bundled = includedir_codegen::start(\"BUNDLED\");\n\n\n\n for f in &[\"ckb.toml\", \"ckb-miner.toml\"] {\n\n bundled\n\n .add_file(f, Compression::Gzip)\n\n .expect(\"add files to resource bundle\");\n\n }\n\n for entry in WalkDir::new(\"specs\").follow_links(true).into_iter() {\n\n match entry {\n\n Ok(ref e)\n\n if !e.file_type().is_dir() && !e.file_name().to_string_lossy().starts_with(\".\") =>\n\n {\n\n bundled\n\n .add_file(e.path(), Compression::Gzip)\n\n .expect(\"add files to resource bundle\");\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n bundled.build(\"bundled.rs\").expect(\"build resource bundle\");\n\n}\n", "file_path": "resource/build.rs", "rank": 63, "score": 122103.55087671126 }, { "content": "fn bench(c: &mut Criterion) {\n\n let tmp_dir = tempfile::Builder::new().tempdir().unwrap();\n\n let config = DBConfig {\n\n path: tmp_dir.as_ref().to_path_buf(),\n\n ..Default::default()\n\n };\n\n\n\n let test_data = {\n\n let db = RocksDB::open(&config, COLUMNS);\n\n let store = ChainKVStore::new(db);\n\n\n\n let output = CellOutput::default();\n\n let tx1 = TransactionBuilder::default().output(output.clone()).build();\n\n let tx50 = TransactionBuilder::default()\n\n .outputs(vec![output.clone(); 50])\n\n .build();\n\n let tx100 = TransactionBuilder::default()\n\n .outputs(vec![output.clone(); 100])\n\n .build();\n\n let tx300 = TransactionBuilder::default()\n", "file_path": "benches/benches/fetch_cell_output.rs", "rank": 64, "score": 122013.69476423156 }, { "content": "#[test]\n\nfn test_proposal() {\n\n let (\n\n chain_controller,\n\n shared,\n\n mut prev_tx_hash,\n\n always_success_script,\n\n always_success_out_point,\n\n ) = setup_env();\n\n\n\n let mut txs20 = Vec::new();\n\n for _ in 0..20 {\n\n let tx = create_transaction(\n\n &prev_tx_hash,\n\n &always_success_script,\n\n &always_success_out_point,\n\n );\n\n txs20.push(tx.clone());\n\n prev_tx_hash = tx.hash().to_owned();\n\n }\n\n\n", "file_path": "verification/src/tests/commit_verifier.rs", "rank": 65, "score": 120059.12838693956 }, { "content": "fn sort_then_drop<T, F>(list: &mut Vec<T>, n: usize, compare: F)\n\nwhere\n\n F: FnMut(&T, &T) -> std::cmp::Ordering,\n\n{\n\n list.sort_by(compare);\n\n if list.len() > n {\n\n list.truncate(list.len() - n);\n\n }\n\n}\n\n\n\nimpl PeerRegistry {\n\n pub fn new(\n\n max_inbound: u32,\n\n max_outbound: u32,\n\n reserved_only: bool,\n\n reserved_peers: Vec<PeerId>,\n\n ) -> Self {\n\n let mut reserved_peers_set =\n\n FnvHashSet::with_capacity_and_hasher(reserved_peers.len(), Default::default());\n\n for reserved_peer in reserved_peers {\n", "file_path": "network/src/peer_registry.rs", "rank": 66, "score": 119717.55728182968 }, { "content": "#[test]\n\nfn test_bootnodes() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n assert!(peer_store.bootnodes(1).is_empty());\n\n let peer_id = PeerId::random();\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n peer_store.add_bootnode(peer_id.clone(), addr.clone());\n\n assert_eq!(peer_store.bootnodes(2).len(), 1);\n\n let peer_id2 = PeerId::random();\n\n peer_store.add_discovered_addr(&peer_id2, addr.clone());\n\n assert_eq!(\n\n peer_store.bootnodes(3),\n\n vec![(peer_id2, addr.clone()), (peer_id, addr)]\n\n );\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 67, "score": 117385.40918798544 }, { "content": "#[test]\n\nfn test_chain_fork_by_hash() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let final_number = 20;\n\n\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n for _ in 1..final_number {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n chain1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n\n }\n", "file_path": "chain/src/tests/basic.rs", "rank": 68, "score": 117385.40918798544 }, { "content": "#[test]\n\nfn test_report() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n let peer_id = PeerId::random();\n\n assert!(peer_store.report(&peer_id, Behaviour::TestGood).is_ok());\n\n assert!(\n\n peer_store.peer_score(&peer_id).expect(\"peer score\")\n\n > peer_store.peer_score_config().default_score\n\n );\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 69, "score": 117385.40918798544 }, { "content": "#[test]\n\nfn test_next_epoch_ext() {\n\n let genesis_block = BlockBuilder::default()\n\n .header_builder(HeaderBuilder::default().difficulty(U256::from(1000u64)))\n\n .build();\n\n let mut consensus = Consensus::default().set_genesis_block(genesis_block);\n\n consensus.genesis_epoch_ext.set_length(400);\n\n let epoch = consensus.genesis_epoch_ext.clone();\n\n\n\n let (chain_controller, shared) = start_chain(Some(consensus.clone()));\n\n let final_number = shared.consensus().genesis_epoch_ext().length();\n\n\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n let mut last_epoch = epoch.clone();\n\n\n\n for _ in 1..final_number - 1 {\n\n let epoch = shared\n\n .next_epoch_ext(&last_epoch, &parent)\n", "file_path": "chain/src/tests/basic.rs", "rank": 70, "score": 117385.40918798544 }, { "content": "#[test]\n\nfn test_uncle_proposal() {\n\n let (\n\n chain_controller,\n\n shared,\n\n mut prev_tx_hash,\n\n always_success_script,\n\n always_success_out_point,\n\n ) = setup_env();\n\n\n\n let mut txs20 = Vec::new();\n\n for _ in 0..20 {\n\n let tx = create_transaction(\n\n &prev_tx_hash,\n\n &always_success_script,\n\n &always_success_out_point,\n\n );\n\n txs20.push(tx.clone());\n\n prev_tx_hash = tx.hash().to_owned();\n\n }\n\n\n", "file_path": "verification/src/tests/commit_verifier.rs", "rank": 71, "score": 117385.40918798544 }, { "content": "#[test]\n\nfn test_chain_get_ancestor() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let final_number = 20;\n\n\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n for _ in 1..final_number {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n chain1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n\n }\n", "file_path": "chain/src/tests/basic.rs", "rank": 72, "score": 117385.40918798544 }, { "content": "#[cfg(not(disable_faketime))]\n\n#[test]\n\nfn test_uncle_verifier() {\n\n let faketime_file = faketime::millis_tempfile(0).expect(\"create faketime file\");\n\n faketime::enable(&faketime_file);\n\n\n\n let mut consensus = Consensus::default();\n\n consensus.genesis_epoch_ext.set_length(10);\n\n\n\n let (chain_controller, shared) = start_chain(Some(consensus));\n\n\n\n let number = 20;\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n faketime::write_millis(&faketime_file, 10).expect(\"write millis\");\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n for i in 1..number {\n\n let parent_epoch = shared.get_block_epoch(&parent.hash()).unwrap();\n\n let epoch = shared\n\n .next_epoch_ext(&parent_epoch, &parent)\n", "file_path": "verification/src/tests/uncle_verifier.rs", "rank": 73, "score": 117385.18657652895 }, { "content": "fn main() {\n\n let log_config = Config {\n\n filter: Some(\"info\".to_owned()),\n\n ..Default::default()\n\n };\n\n let _logger_guard = logger::init(log_config).expect(\"init Logger\");\n\n\n\n let binary = env::args()\n\n .nth(1)\n\n .unwrap_or_else(|| \"../target/release/ckb\".to_string());\n\n let start_port = env::args()\n\n .nth(2)\n\n .unwrap_or_else(|| \"9000\".to_string())\n\n .parse()\n\n .expect(\"invalid port number\");\n\n if let Some(spec_name) = env::args().nth(3) {\n\n let spec: Box<Spec> = match &spec_name[..] {\n\n \"block_relay_basic\" => Box::new(BlockRelayBasic),\n\n \"block_sync_basic\" => Box::new(BlockSyncBasic),\n\n \"mining_basic\" => Box::new(MiningBasic),\n", "file_path": "test/src/main.rs", "rank": 74, "score": 117293.4871748294 }, { "content": "#[test]\n\npub fn capacity() {\n\n let v = vec![0u8; 7];\n\n assert_eq!(v.occupied_capacity().unwrap(), capacity_bytes!(7));\n\n let u = 0u32;\n\n assert_eq!(u.occupied_capacity().unwrap(), capacity_bytes!(4));\n\n let a = StructA {\n\n f1: 1,\n\n f2: v.clone(),\n\n };\n\n assert_eq!(a.occupied_capacity().unwrap(), capacity_bytes!(11));\n\n let a1 = StructA1 {\n\n f1: 2,\n\n f2: v.clone(),\n\n _f3: v.clone(),\n\n };\n\n assert_eq!(a1.occupied_capacity().unwrap(), capacity_bytes!(11));\n\n let b = StructB(3, v.clone());\n\n assert_eq!(b.occupied_capacity().unwrap(), capacity_bytes!(11));\n\n let b1 = StructB1(4, v.clone(), v.clone());\n\n assert_eq!(b1.occupied_capacity().unwrap(), capacity_bytes!(11));\n\n}\n", "file_path": "util/occupied-capacity/tests/tests.rs", "rank": 75, "score": 116870.96288673201 }, { "content": "fn prepare_epoch_ext<P: ChainProvider>(provider: &P, block: &Block) -> Result<EpochExt, Error> {\n\n if block.is_genesis() {\n\n return Ok(provider.consensus().genesis_epoch_ext().to_owned());\n\n }\n\n let parent_hash = block.header().parent_hash();\n\n let parent_ext = provider\n\n .get_block_epoch(parent_hash)\n\n .ok_or_else(|| Error::UnknownParent(parent_hash.clone()))?;\n\n let parent = provider\n\n .block_header(parent_hash)\n\n .ok_or_else(|| Error::UnknownParent(parent_hash.clone()))?;\n\n Ok(provider\n\n .next_epoch_ext(&parent_ext, &parent)\n\n .unwrap_or(parent_ext))\n\n}\n\n\n\nimpl<P> BlockVerifier<P>\n\nwhere\n\n P: ChainProvider + Clone,\n\n{\n", "file_path": "verification/src/block_verifier.rs", "rank": 76, "score": 115293.83171295418 }, { "content": "type TransactionHeader = [usize; 9];\n\n\n\n/// Address of a CellOutput.\n\n#[derive(Clone, Serialize, Deserialize, Eq, PartialEq, Hash, Debug)]\n\npub(crate) struct CellOutputAddress {\n\n /// Offset in Block Body.\n\n pub offset: usize,\n\n /// Length.\n\n pub length: usize,\n\n}\n\n\n\n/// Address of a Transaction.\n\n#[derive(Clone, Serialize, Deserialize, Eq, PartialEq, Hash, Debug)]\n\npub(crate) struct TransactionAddressInner {\n\n /// Index in Block.\n\n pub index: usize,\n\n /// Offset in Block Body.\n\n pub offset: usize,\n\n /// Length.\n\n pub length: usize,\n", "file_path": "store/src/flat_block_body.rs", "rank": 77, "score": 114885.96225922438 }, { "content": "#[test]\n\nfn test_update_status() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n let peer_id = PeerId::random();\n\n peer_store.update_status(&peer_id, Status::Connected);\n\n assert_eq!(peer_store.peer_status(&peer_id), Status::Unknown);\n\n let addr = \"/ip4/127.0.0.1\".parse().unwrap();\n\n peer_store.add_connected_peer(&peer_id, addr, SessionType::Inbound);\n\n peer_store.update_status(&peer_id, Status::Connected);\n\n assert_eq!(peer_store.peer_status(&peer_id), Status::Connected);\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 78, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_find_fork_case1() {\n\n let builder = SharedBuilder::<MemoryKeyValueDB>::new();\n\n let shared = builder.consensus(Consensus::default()).build().unwrap();\n\n let notify = NotifyService::default().start::<&str>(None);\n\n let mut chain_service = ChainService::new(shared.clone(), notify);\n\n let genesis = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n\n\n let mut fork1: Vec<Block> = Vec::new();\n\n let mut fork2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = genesis.clone();\n\n for _ in 0..4 {\n\n let new_block = gen_block(&parent, U256::from(100u64), vec![], vec![], vec![]);\n\n fork1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n\n }\n\n\n\n let mut parent = genesis.clone();\n\n for _ in 0..3 {\n\n let new_block = gen_block(&parent, U256::from(90u64), vec![], vec![], vec![]);\n", "file_path": "chain/src/tests/find_fork.rs", "rank": 79, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_find_fork_case4() {\n\n let builder = SharedBuilder::<MemoryKeyValueDB>::new();\n\n let shared = builder.consensus(Consensus::default()).build().unwrap();\n\n let notify = NotifyService::default().start::<&str>(None);\n\n let mut chain_service = ChainService::new(shared.clone(), notify);\n\n\n\n let genesis = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n\n\n let mut fork1: Vec<Block> = Vec::new();\n\n let mut fork2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = genesis.clone();\n\n for _ in 0..5 {\n\n let new_block = gen_block(&parent, U256::from(40u64), vec![], vec![], vec![]);\n\n fork1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n\n }\n\n\n\n let mut parent = genesis.clone();\n\n for _ in 0..2 {\n", "file_path": "chain/src/tests/find_fork.rs", "rank": 80, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_ban_peer() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n let peer_id = PeerId::random();\n\n peer_store.ban_peer(&peer_id, Duration::from_secs(10));\n\n assert!(!peer_store.is_banned(&peer_id));\n\n let addr = \"/ip4/127.0.0.1\".parse().unwrap();\n\n peer_store.add_connected_peer(&peer_id, addr, SessionType::Inbound);\n\n peer_store.ban_peer(&peer_id, Duration::from_secs(10));\n\n assert!(peer_store.is_banned(&peer_id));\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 81, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_chain_fork_by_total_difficulty() {\n\n let (chain_controller, shared) = start_chain(None);\n\n let final_number = 20;\n\n\n\n let mut chain1: Vec<Block> = Vec::new();\n\n let mut chain2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n for _ in 1..final_number {\n\n let difficulty = parent.difficulty().to_owned();\n\n let new_block = gen_block(\n\n &parent,\n\n difficulty + U256::from(100u64),\n\n vec![],\n\n vec![],\n\n vec![],\n\n );\n\n chain1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n\n }\n", "file_path": "chain/src/tests/basic.rs", "rank": 82, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_find_fork_case2() {\n\n let builder = SharedBuilder::<MemoryKeyValueDB>::new();\n\n let shared = builder.consensus(Consensus::default()).build().unwrap();\n\n let notify = NotifyService::default().start::<&str>(None);\n\n let mut chain_service = ChainService::new(shared.clone(), notify);\n\n\n\n let genesis = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n\n\n let mut fork1: Vec<Block> = Vec::new();\n\n let mut fork2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = genesis.clone();\n\n for _ in 0..4 {\n\n let new_block = gen_block(&parent, U256::from(100u64), vec![], vec![], vec![]);\n\n fork1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n\n }\n\n\n\n let mut parent = fork1[0].header().to_owned();\n\n for _ in 0..2 {\n", "file_path": "chain/src/tests/find_fork.rs", "rank": 83, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_peers_to_attempt() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n assert!(peer_store.peers_to_attempt(1).is_empty());\n\n let peer_id = PeerId::random();\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n peer_store.add_bootnode(peer_id.clone(), addr.clone());\n\n assert!(peer_store.peers_to_attempt(1).is_empty());\n\n let peer_id2 = PeerId::random();\n\n peer_store.add_discovered_addr(&peer_id2, addr.clone());\n\n assert_eq!(peer_store.peers_to_attempt(2).len(), 1);\n\n peer_store.update_status(&peer_id2, Status::Connected);\n\n assert!(peer_store.peers_to_attempt(1).is_empty());\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 84, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_find_fork_case3() {\n\n let builder = SharedBuilder::<MemoryKeyValueDB>::new();\n\n let shared = builder.consensus(Consensus::default()).build().unwrap();\n\n let notify = NotifyService::default().start::<&str>(None);\n\n let mut chain_service = ChainService::new(shared.clone(), notify);\n\n\n\n let genesis = shared.block_header(&shared.block_hash(0).unwrap()).unwrap();\n\n\n\n let mut fork1: Vec<Block> = Vec::new();\n\n let mut fork2: Vec<Block> = Vec::new();\n\n\n\n let mut parent = genesis.clone();\n\n for _ in 0..3 {\n\n let new_block = gen_block(&parent, U256::from(80u64), vec![], vec![], vec![]);\n\n fork1.push(new_block.clone());\n\n parent = new_block.header().to_owned();\n\n }\n\n\n\n let mut parent = genesis.clone();\n\n for _ in 0..5 {\n", "file_path": "chain/src/tests/find_fork.rs", "rank": 85, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_peers_to_feeler() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n assert!(peer_store.peers_to_feeler(1).is_empty());\n\n let peer_id = PeerId::random();\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n peer_store.add_bootnode(peer_id.clone(), addr.clone());\n\n assert!(peer_store.peers_to_feeler(1).is_empty());\n\n let peer_id2 = PeerId::random();\n\n peer_store.add_discovered_addr(&peer_id2, addr.clone());\n\n assert_eq!(peer_store.peers_to_feeler(2).len(), 1);\n\n peer_store.update_status(&peer_id2, Status::Connected);\n\n assert!(peer_store.peers_to_feeler(1).is_empty());\n\n peer_store.update_status(&peer_id2, Status::Unknown);\n\n assert_eq!(peer_store.peers_to_feeler(2).len(), 1);\n\n // peer does not need feeler if it connected to us recently\n\n peer_store.add_connected_peer(&peer_id2, addr.clone(), SessionType::Inbound);\n\n peer_store.update_status(&peer_id2, Status::Unknown);\n\n assert!(peer_store.peers_to_feeler(1).is_empty());\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 86, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_random_peers() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n assert!(peer_store.random_peers(1).is_empty());\n\n let peer_id = PeerId::random();\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n peer_store.add_bootnode(peer_id.clone(), addr.clone());\n\n assert!(peer_store.random_peers(1).is_empty());\n\n let peer_id2 = PeerId::random();\n\n peer_store.add_discovered_addr(&peer_id2, addr.clone());\n\n // random should not return peer that we have never connected to\n\n assert!(peer_store.random_peers(1).is_empty());\n\n peer_store.add_connected_peer(&peer_id2, addr.clone(), SessionType::Inbound);\n\n assert_eq!(peer_store.random_peers(2).len(), 1);\n\n peer_store.update_status(&peer_id2, Status::Connected);\n\n assert_eq!(peer_store.random_peers(1).len(), 1);\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 87, "score": 114869.4753824747 }, { "content": "#[test]\n\nfn test_attepmt_ban() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n let peer_id = PeerId::random();\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n peer_store.add_connected_peer(&peer_id, addr.clone(), SessionType::Inbound);\n\n peer_store.add_discovered_addr(&peer_id, addr.clone());\n\n assert_eq!(peer_store.peers_to_attempt(2).len(), 1);\n\n peer_store.ban_peer(&peer_id, Duration::from_secs(10));\n\n assert_eq!(peer_store.peers_to_attempt(2).len(), 0);\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 88, "score": 114869.4753824747 }, { "content": "fn random_order_benchmark(c: &mut Criterion) {\n\n {\n\n let mut peer_store = SqlitePeerStore::memory().expect(\"temp\");\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n {\n\n for _ in 0..8000 {\n\n let peer_id = PeerId::random();\n\n peer_store.add_connected_peer(&peer_id, addr.clone(), SessionType::Outbound);\n\n let _ = peer_store.add_discovered_addr(&peer_id, addr.clone());\n\n }\n\n }\n\n c.bench_function(\"random order 1000 / 8000 peer_info\", {\n\n move |b| {\n\n b.iter(|| {\n\n let count = 1000;\n\n assert_eq!(peer_store.peers_to_attempt(count).len() as u32, count);\n\n })\n\n }\n\n });\n\n\n", "file_path": "network/src/benches/sqlite_peer_store.rs", "rank": 89, "score": 114545.3721745979 }, { "content": "fn setup_node(\n\n height: u64,\n\n) -> (\n\n Shared<ChainKVStore<MemoryKeyValueDB>>,\n\n ChainController,\n\n RpcServer,\n\n) {\n\n let (always_success_cell, always_success_script) = create_always_success_cell();\n\n let always_success_tx = TransactionBuilder::default()\n\n .input(CellInput::new(OutPoint::null(), 0, Default::default()))\n\n .output(always_success_cell)\n\n .build();\n\n\n\n let consensus = {\n\n let genesis = BlockBuilder::default()\n\n .header_builder(\n\n HeaderBuilder::default()\n\n .timestamp(GENESIS_TIMESTAMP)\n\n .difficulty(U256::from(1000u64)),\n\n )\n", "file_path": "rpc/src/test.rs", "rank": 90, "score": 113716.99619582739 }, { "content": "#[test]\n\nfn test_delete_peer_info() {\n\n let mut peer_store = new_peer_store();\n\n let addr1 = \"/ip4/127.0.0.1\".parse().unwrap();\n\n let addr2 = \"/ip4/192.163.1.1\".parse().unwrap();\n\n let now = faketime::unix_time();\n\n // prepare peer_info records\n\n for _ in 0..(PEER_STORE_LIMIT - 2) {\n\n db::PeerInfo::insert(\n\n &peer_store.conn,\n\n &PeerId::random(),\n\n &addr1,\n\n SessionType::Inbound,\n\n peer_store.peer_score_config().default_score,\n\n now,\n\n )\n\n .expect(\"insert peer infos\");\n\n }\n\n let evict_target = PeerId::random();\n\n let fake_target = PeerId::random();\n\n {\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 91, "score": 112497.30121941002 }, { "content": "#[test]\n\nfn test_add_connected_peer() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n let peer_id = PeerId::random();\n\n let addr = \"/ip4/127.0.0.1\".parse().unwrap();\n\n peer_store.add_connected_peer(&peer_id, addr, SessionType::Outbound);\n\n assert_eq!(\n\n peer_store.peer_score(&peer_id),\n\n Some(peer_store.peer_score_config().default_score)\n\n );\n\n assert_eq!(peer_store.peer_addrs(&peer_id, 1).unwrap().len(), 0);\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 92, "score": 112497.30121941002 }, { "content": "#[test]\n\nfn test_accept_inbound_peer_until_full() {\n\n let mut peer_store = new_peer_store();\n\n let reserved_peer = PeerId::random();\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n // accept node until inbound connections is full\n\n let mut peers = PeerRegistry::new(3, 3, false, vec![reserved_peer.clone()]);\n\n for session_id in 1..=3 {\n\n peers\n\n .accept_peer(\n\n PeerId::random(),\n\n addr.clone(),\n\n session_id.into(),\n\n SessionType::Inbound,\n\n peer_store.as_mut(),\n\n )\n\n .expect(\"accept\");\n\n }\n\n\n\n assert_eq!(\n\n peers\n", "file_path": "network/src/tests/peer_registry.rs", "rank": 93, "score": 112497.30121941002 }, { "content": "#[test]\n\nfn test_add_discovered_addr() {\n\n let mut peer_store: Box<dyn PeerStore> = Box::new(new_peer_store());\n\n let peer_id = PeerId::random();\n\n peer_store.add_discovered_addr(&peer_id, \"/ip4/127.0.0.1\".parse().unwrap());\n\n assert_eq!(peer_store.peer_addrs(&peer_id, 2).unwrap().len(), 1);\n\n}\n\n\n", "file_path": "network/src/tests/sqlite_peer_store.rs", "rank": 94, "score": 112497.30121941002 }, { "content": "#[test]\n\nfn test_accept_inbound_peer_eviction() {\n\n // eviction inbound peer\n\n // 1. should evict from largest network groups\n\n // 2. should never evict reserved peer\n\n // 3. should evict lowest scored peer\n\n let mut peer_store = new_peer_store();\n\n let reserved_peer = PeerId::random();\n\n let evict_target = PeerId::random();\n\n let lowest_score_peer = PeerId::random();\n\n let addr1 = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n let addr2 = \"/ip4/192.168.0.1\".parse::<Multiaddr>().unwrap();\n\n // prepare protected peers\n\n let longest_connection_time_peers_count = 5;\n\n let protected_peers_count = 3 * EVICTION_PROTECT_PEERS + longest_connection_time_peers_count;\n\n let mut peers_registry = PeerRegistry::new(\n\n (protected_peers_count + longest_connection_time_peers_count) as u32,\n\n 3,\n\n false,\n\n vec![reserved_peer.clone()],\n\n );\n", "file_path": "network/src/tests/peer_registry.rs", "rank": 95, "score": 112497.30121941002 }, { "content": "fn insert_peer_info_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"insert 100 peer_info\", |b| {\n\n b.iter({\n\n let mut peer_store = SqlitePeerStore::memory().expect(\"memory\");\n\n let peer_ids = (0..100).map(|_| PeerId::random()).collect::<Vec<_>>();\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n move || {\n\n for peer_id in peer_ids.clone() {\n\n peer_store.add_connected_peer(&peer_id, addr.clone(), SessionType::Outbound);\n\n }\n\n }\n\n })\n\n });\n\n c.bench_function(\"insert 1000 peer_info\", |b| {\n\n b.iter({\n\n let mut peer_store = SqlitePeerStore::memory().expect(\"memory\");\n\n let peer_ids = (0..1000).map(|_| PeerId::random()).collect::<Vec<_>>();\n\n let addr = \"/ip4/127.0.0.1\".parse::<Multiaddr>().unwrap();\n\n move || {\n\n for peer_id in peer_ids.clone() {\n", "file_path": "network/src/benches/sqlite_peer_store.rs", "rank": 96, "score": 112369.24133223074 }, { "content": "fn parent_dir(mut path: PathBuf) -> PathBuf {\n\n path.pop();\n\n path\n\n}\n\n\n", "file_path": "resource/src/lib.rs", "rank": 97, "score": 110760.10189512401 }, { "content": "fn new_chain(\n\n txs_size: usize,\n\n) -> (\n\n ChainController,\n\n Shared<ChainKVStore<CacheDB<RocksDB>>>,\n\n TempDir,\n\n H256,\n\n H256,\n\n) {\n\n let always_success = include_bytes!(\"../../resource/specs/cells/always_success\");\n\n let mut cell_output = CellOutput::default();\n\n cell_output.data = Bytes::from(always_success.to_vec());\n\n cell_output.capacity = cell_output.occupied_capacity().unwrap();\n\n\n\n let data_hash = cell_output.data_hash();\n\n\n\n let cellbase = TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(0))\n\n .output(cell_output)\n\n .build();\n", "file_path": "benches/benches/process_block.rs", "rank": 98, "score": 110739.4395780655 }, { "content": "#[test]\n\nfn basic_sync() {\n\n let faketime_file = faketime::millis_tempfile(0).expect(\"create faketime file\");\n\n faketime::enable(&faketime_file);\n\n let thread_name = format!(\"FAKETIME={}\", faketime_file.display());\n\n\n\n let (mut node1, shared1) = setup_node(&thread_name, 1);\n\n let (mut node2, shared2) = setup_node(&thread_name, 3);\n\n\n\n node1.connect(&mut node2, NetworkProtocol::SYNC.into());\n\n\n\n let (signal_tx1, signal_rx1) = sync_channel(DEFAULT_CHANNEL);\n\n thread::Builder::new()\n\n .name(thread_name.clone())\n\n .spawn(move || {\n\n node1.start(&signal_tx1, |data| {\n\n let msg = get_root::<SyncMessage>(data);\n\n // terminate thread after 3 blocks\n\n msg.payload_as_block()\n\n .map(|block| block.header().unwrap().number() == 3)\n\n .unwrap_or(false)\n", "file_path": "sync/src/tests/synchronizer.rs", "rank": 99, "score": 110423.72315912631 } ]
Rust
crates/brine_voxel_v1/src/chunk_builder/plugin.rs
BGR360/brine
048656dfb3dc5c608536f14d687c6f7a9075df0a
use std::collections::hash_map::Entry; use std::{any::Any, marker::PhantomData}; use bevy::tasks::Task; use bevy::utils::{HashMap, HashSet}; use bevy::{ecs::event::Events, prelude::*, tasks::AsyncComputeTaskPool}; use futures_lite::future; use brine_asset::{api::BlockFace, MinecraftAssets}; use brine_chunk::ChunkSection; use brine_data::BlockStateId; use brine_proto::event; use crate::chunk_builder::component::PendingChunk; use crate::mesh::VoxelMesh; use crate::texture::BlockTextures; use super::component::{ChunkSection as ChunkSectionComponent, PendingMeshAtlas}; use super::{ component::{BuiltChunkBundle, BuiltChunkSectionBundle}, ChunkBuilder, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, SystemLabel)] pub enum System { BuilderTaskSpawn, BuilderResultAddToWorld, } pub struct ChunkBuilderPlugin<T: ChunkBuilder> { shared: bool, _phantom: PhantomData<T>, } impl<T: ChunkBuilder> ChunkBuilderPlugin<T> { pub fn shared() -> Self { Self { shared: true, ..Default::default() } } } impl<T: ChunkBuilder> Default for ChunkBuilderPlugin<T> { fn default() -> Self { Self { shared: false, _phantom: PhantomData, } } } impl<T> Plugin for ChunkBuilderPlugin<T> where T: ChunkBuilder + Default + Send + Sync + 'static, { fn build(&self, app: &mut App) { let mut systems = SystemSet::new(); systems = if self.shared { systems.with_system(Self::builder_task_spawn_shared.label(System::BuilderTaskSpawn)) } else { systems.with_system(Self::builder_task_spawn_unique.label(System::BuilderTaskSpawn)) }; systems = systems .with_system(Self::receive_built_meshes) .with_system(Self::add_built_chunks_to_world.label(System::BuilderResultAddToWorld)); app.add_system_set(systems); } } type MesherTask = Task<(brine_chunk::Chunk, Vec<VoxelMesh>)>; impl<T> ChunkBuilderPlugin<T> where T: ChunkBuilder + Default + Any + Send + Sync + 'static, { fn builder_task_spawn( chunk_event: event::clientbound::ChunkData, commands: &mut Commands, task_pool: &AsyncComputeTaskPool, ) { let chunk = chunk_event.chunk_data; if !chunk.is_full() { return; } let chunk_x = chunk.chunk_x; let chunk_z = chunk.chunk_z; debug!("Received chunk ({}, {}), spawning task", chunk_x, chunk_z); let task: MesherTask = task_pool.spawn(async move { let built = T::default().build_chunk(&chunk); (chunk, built) }); commands.spawn().insert_bundle(( task, PendingChunk::new(T::TYPE), Name::new(format!("Pending Chunk ({}, {})", chunk_x, chunk_z)), )); } fn build_texture_atlas_for_mesh( mesh: &VoxelMesh, chunk_section: &ChunkSection, asset_server: &AssetServer, mc_assets: &MinecraftAssets, texture_builder: &mut BlockTextures, ) -> PendingMeshAtlas { let mut texture_handles: HashSet<Handle<Image>> = Default::default(); let mut face_textures: Vec<Handle<Image>> = Vec::with_capacity(mesh.faces.len()); let mut handle_cache: HashMap<(BlockStateId, BlockFace), Handle<Image>> = Default::default(); for face in mesh.faces.iter() { let [x, y, z] = face.voxel; let face = face.axis.into(); let block_state_id = chunk_section.get_block((x, y, z)).unwrap(); let block_state_id = BlockStateId(block_state_id.0 as u16); let key = (block_state_id, face); let weak_handle = match handle_cache.entry(key) { Entry::Vacant(entry) => { let strong_handle = match mc_assets .get_texture_path_for_block_state_and_face(block_state_id, face) { Some(path) => asset_server.load(path), None => { debug!("No texture for {:?}:{:?}", block_state_id, face); texture_builder.placeholder_texture.clone() } }; if !texture_handles.contains(&strong_handle) { texture_handles.insert(strong_handle.clone()); } entry.insert(strong_handle.as_weak()).clone_weak() } Entry::Occupied(entry) => entry.get().clone_weak(), }; face_textures.push(weak_handle); } let atlas = texture_builder .create_texture_atlas_with_textures(texture_handles.into_iter(), asset_server); PendingMeshAtlas { atlas, face_textures, } } fn add_built_chunk_to_world( chunk_data: brine_chunk::Chunk, voxel_meshes: Vec<VoxelMesh>, atlases: Vec<&TextureAtlas>, face_textures: Vec<Vec<Handle<Image>>>, meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, commands: &mut Commands, ) -> Entity { debug!( "Adding chunk ({}, {}) to world", chunk_data.chunk_x, chunk_data.chunk_z ); commands .spawn() .insert_bundle(BuiltChunkBundle::new( T::TYPE, chunk_data.chunk_x, chunk_data.chunk_z, )) .with_children(move |parent| { for (((section, mut mesh), atlas), face_textures) in chunk_data .sections .into_iter() .zip(voxel_meshes.into_iter()) .zip(atlases.into_iter()) .zip(face_textures.into_iter()) { mesh.adjust_tex_coords(atlas, &face_textures); parent .spawn() .insert_bundle(BuiltChunkSectionBundle::new(T::TYPE, section.chunk_y)) .insert_bundle(PbrBundle { mesh: meshes.add(mesh.to_render_mesh()), material: materials.add(StandardMaterial { base_color_texture: Some(atlas.texture.clone()), unlit: true, ..Default::default() }), ..Default::default() }) .insert(ChunkSectionComponent(section)); } }) .id() } /* ____ _ / ___| _ _ ___| |_ ___ _ __ ___ ___ \___ \| | | / __| __/ _ \ '_ ` _ \/ __| ___) | |_| \__ \ || __/ | | | | \__ \ |____/ \__, |___/\__\___|_| |_| |_|___/ |___/ */ fn builder_task_spawn_unique( mut chunk_events: ResMut<Events<event::clientbound::ChunkData>>, mut commands: Commands, task_pool: Res<AsyncComputeTaskPool>, ) { for chunk_event in chunk_events.drain() { Self::builder_task_spawn(chunk_event, &mut commands, &task_pool); } } fn builder_task_spawn_shared( mut chunk_events: EventReader<event::clientbound::ChunkData>, mut commands: Commands, task_pool: Res<AsyncComputeTaskPool>, ) { for chunk_event in chunk_events.iter() { Self::builder_task_spawn(chunk_event.clone(), &mut commands, &task_pool); } } fn receive_built_meshes( asset_server: Res<AssetServer>, mc_assets: Res<MinecraftAssets>, mut chunks_with_pending_meshes: Query<(Entity, &mut PendingChunk, &mut MesherTask)>, mut texture_builder: ResMut<BlockTextures>, mut commands: Commands, ) { const MAX_PER_FRAME: usize = 1; for (i, (entity, mut pending_chunk, mut mesher_task)) in chunks_with_pending_meshes.iter_mut().enumerate() { if i >= MAX_PER_FRAME { break; } if pending_chunk.builder != T::TYPE { continue; } if let Some((chunk, voxel_meshes)) = future::block_on(future::poll_once(&mut *mesher_task)) { debug!( "Received meshes for Chunk ({}, {})", chunk.chunk_x, chunk.chunk_z ); let texture_atlases = voxel_meshes .iter() .zip(chunk.sections.iter()) .map(|(mesh, chunk_section)| { Self::build_texture_atlas_for_mesh( mesh, chunk_section, &*asset_server, &*mc_assets, &mut *texture_builder, ) }) .collect(); pending_chunk.chunk_data = Some(chunk); pending_chunk.voxel_meshes = Some(voxel_meshes); pending_chunk.texture_atlases = Some(texture_atlases); commands.entity(entity).remove::<MesherTask>(); } } } fn add_built_chunks_to_world( atlases: Res<Assets<TextureAtlas>>, mut chunks_with_pending_atlases: Query<(Entity, &mut PendingChunk), Without<MesherTask>>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, mut commands: Commands, ) { for (entity, mut pending_chunk) in chunks_with_pending_atlases.iter_mut() { if pending_chunk.builder != T::TYPE { continue; } let built_atlases: Vec<Option<&TextureAtlas>> = pending_chunk .texture_atlases .as_ref() .unwrap() .iter() .map(|pending_atlas| atlases.get(&pending_atlas.atlas)) .collect(); if built_atlases.iter().any(|atlas| atlas.is_none()) { continue; } let atlases: Vec<&TextureAtlas> = built_atlases.iter().map(|atlas| atlas.unwrap()).collect(); let face_textures: Vec<Vec<Handle<Image>>> = pending_chunk .texture_atlases .take() .unwrap() .into_iter() .map(|atlas| atlas.face_textures) .collect(); let chunk = pending_chunk.chunk_data.take().unwrap(); let voxel_meshes = pending_chunk.voxel_meshes.take().unwrap(); debug!( "Received all texture atlases for Chunk ({}, {})", chunk.chunk_x, chunk.chunk_z ); Self::add_built_chunk_to_world( chunk, voxel_meshes, atlases, face_textures, &mut *meshes, &mut *materials, &mut commands, ); commands.entity(entity).despawn(); } } }
use std::collections::hash_map::Entry; use std::{any::Any, marker::PhantomData}; use bevy::tasks::Task; use bevy::utils::{HashMap, HashSet}; use bevy::{ecs::event::Events, prelude::*, tasks::AsyncComputeTaskPool}; use futures_lite::future; use brine_asset::{api::BlockFace, MinecraftAssets}; use brine_chunk::ChunkSection; use brine_data::BlockStateId; use brine_proto::event; use crate::chunk_builder::component::PendingChunk; use crate::mesh::VoxelMesh; use crate::texture::BlockTextures; use super::component::{ChunkSection as ChunkSectionComponent, PendingMeshAtlas}; use super::{ component::{BuiltChunkBundle, BuiltChunkSectionBundle}, ChunkBuilder, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, SystemLabel)] pub enum System { BuilderTaskSpawn, BuilderResultAddToWorld, } pub struct ChunkBuilderPlugin<T: ChunkBuilder> { shared: bool, _phantom: PhantomData<T>, } impl<T: ChunkBuilder> ChunkBuilderPlugin<T> { pub fn shared() -> Self { Self { shared: true, ..Default::default() } } } impl<T: ChunkBuilder> Default for ChunkBuilderPlugin<T> { fn default() -> Self { Self { shared: false, _phantom: PhantomData, } } } impl<T> Plugin for ChunkBuilderPlugin<T> where T: ChunkBuilder + Default + Send + Sync + 'static, { fn build(&self, app: &mut App) { let mut systems = SystemSet::new(); systems = if self.shared { systems.with_system(Self::builder_task_spawn_shared.label(System::BuilderTaskSpawn)) } else { systems.with_system(Self::builder_task_spawn_unique.label(System::BuilderTaskSpawn)) }; systems = systems .with_system(Self::receive_built_meshes) .with_system(Self::add_built_chunks_to_world.label(System::BuilderResultAddToWorld)); app.add_system_set(systems); } } type MesherTask = Task<(brine_chunk::Chunk, Vec<VoxelMesh>)>; impl<T> ChunkBuilderPlugin<T> where T: ChunkBuilder + Default + Any + Send + Sync + 'static, { fn builder_task_spawn( chunk_event: event::clientbound::ChunkData, commands: &mut Commands, task_pool: &AsyncComputeTaskPool, ) { let chunk = chunk_event.chunk_data; if !chunk.is_full() { return; } let chunk_x = chunk.chunk_x; let chunk_z = chunk.chunk_z; debug!("Received chunk ({}, {}), spawning task", chunk_x, chunk_z); let task: MesherTask = task_pool.spawn(async move { let built = T::default().build_chunk(&chunk); (chunk, built) }); commands.spawn().insert_bundle(( task, PendingChunk::new(T::TYPE), Name::new(format!("Pending Chunk ({}, {})", chunk_x, chunk_z)), )); } fn build_texture_atlas_for_mesh( mesh: &VoxelMesh, chunk_section: &ChunkSection, asset_server: &AssetServer, mc_assets: &MinecraftAssets, texture_builder: &mut BlockTextures, ) -> PendingMeshAtlas { let mut texture_handles: HashSet<Handle<Image>> = Default::default(); let mut face_textures: Vec<Handle<Image>> = Vec::with_capacity(mesh.faces.len()); let mut handle_cache: HashMap<(BlockStateId, BlockFace), Handle<Image>> = Default::default(); for face in mesh.faces.iter() { let [x, y, z] = face.voxel; let face = face.axis.into(); let block_state_id = chunk_section.get_block((x, y, z)).unwrap(); let block_state_id = BlockStateId(block_state_id.0 as u16); let key = (block_state_id, face); let weak_handle = match handle_cache.entry(key) { Entry::Vacant(entry) => {
if !texture_handles.contains(&strong_handle) { texture_handles.insert(strong_handle.clone()); } entry.insert(strong_handle.as_weak()).clone_weak() } Entry::Occupied(entry) => entry.get().clone_weak(), }; face_textures.push(weak_handle); } let atlas = texture_builder .create_texture_atlas_with_textures(texture_handles.into_iter(), asset_server); PendingMeshAtlas { atlas, face_textures, } } fn add_built_chunk_to_world( chunk_data: brine_chunk::Chunk, voxel_meshes: Vec<VoxelMesh>, atlases: Vec<&TextureAtlas>, face_textures: Vec<Vec<Handle<Image>>>, meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, commands: &mut Commands, ) -> Entity { debug!( "Adding chunk ({}, {}) to world", chunk_data.chunk_x, chunk_data.chunk_z ); commands .spawn() .insert_bundle(BuiltChunkBundle::new( T::TYPE, chunk_data.chunk_x, chunk_data.chunk_z, )) .with_children(move |parent| { for (((section, mut mesh), atlas), face_textures) in chunk_data .sections .into_iter() .zip(voxel_meshes.into_iter()) .zip(atlases.into_iter()) .zip(face_textures.into_iter()) { mesh.adjust_tex_coords(atlas, &face_textures); parent .spawn() .insert_bundle(BuiltChunkSectionBundle::new(T::TYPE, section.chunk_y)) .insert_bundle(PbrBundle { mesh: meshes.add(mesh.to_render_mesh()), material: materials.add(StandardMaterial { base_color_texture: Some(atlas.texture.clone()), unlit: true, ..Default::default() }), ..Default::default() }) .insert(ChunkSectionComponent(section)); } }) .id() } /* ____ _ / ___| _ _ ___| |_ ___ _ __ ___ ___ \___ \| | | / __| __/ _ \ '_ ` _ \/ __| ___) | |_| \__ \ || __/ | | | | \__ \ |____/ \__, |___/\__\___|_| |_| |_|___/ |___/ */ fn builder_task_spawn_unique( mut chunk_events: ResMut<Events<event::clientbound::ChunkData>>, mut commands: Commands, task_pool: Res<AsyncComputeTaskPool>, ) { for chunk_event in chunk_events.drain() { Self::builder_task_spawn(chunk_event, &mut commands, &task_pool); } } fn builder_task_spawn_shared( mut chunk_events: EventReader<event::clientbound::ChunkData>, mut commands: Commands, task_pool: Res<AsyncComputeTaskPool>, ) { for chunk_event in chunk_events.iter() { Self::builder_task_spawn(chunk_event.clone(), &mut commands, &task_pool); } } fn receive_built_meshes( asset_server: Res<AssetServer>, mc_assets: Res<MinecraftAssets>, mut chunks_with_pending_meshes: Query<(Entity, &mut PendingChunk, &mut MesherTask)>, mut texture_builder: ResMut<BlockTextures>, mut commands: Commands, ) { const MAX_PER_FRAME: usize = 1; for (i, (entity, mut pending_chunk, mut mesher_task)) in chunks_with_pending_meshes.iter_mut().enumerate() { if i >= MAX_PER_FRAME { break; } if pending_chunk.builder != T::TYPE { continue; } if let Some((chunk, voxel_meshes)) = future::block_on(future::poll_once(&mut *mesher_task)) { debug!( "Received meshes for Chunk ({}, {})", chunk.chunk_x, chunk.chunk_z ); let texture_atlases = voxel_meshes .iter() .zip(chunk.sections.iter()) .map(|(mesh, chunk_section)| { Self::build_texture_atlas_for_mesh( mesh, chunk_section, &*asset_server, &*mc_assets, &mut *texture_builder, ) }) .collect(); pending_chunk.chunk_data = Some(chunk); pending_chunk.voxel_meshes = Some(voxel_meshes); pending_chunk.texture_atlases = Some(texture_atlases); commands.entity(entity).remove::<MesherTask>(); } } } fn add_built_chunks_to_world( atlases: Res<Assets<TextureAtlas>>, mut chunks_with_pending_atlases: Query<(Entity, &mut PendingChunk), Without<MesherTask>>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, mut commands: Commands, ) { for (entity, mut pending_chunk) in chunks_with_pending_atlases.iter_mut() { if pending_chunk.builder != T::TYPE { continue; } let built_atlases: Vec<Option<&TextureAtlas>> = pending_chunk .texture_atlases .as_ref() .unwrap() .iter() .map(|pending_atlas| atlases.get(&pending_atlas.atlas)) .collect(); if built_atlases.iter().any(|atlas| atlas.is_none()) { continue; } let atlases: Vec<&TextureAtlas> = built_atlases.iter().map(|atlas| atlas.unwrap()).collect(); let face_textures: Vec<Vec<Handle<Image>>> = pending_chunk .texture_atlases .take() .unwrap() .into_iter() .map(|atlas| atlas.face_textures) .collect(); let chunk = pending_chunk.chunk_data.take().unwrap(); let voxel_meshes = pending_chunk.voxel_meshes.take().unwrap(); debug!( "Received all texture atlases for Chunk ({}, {})", chunk.chunk_x, chunk.chunk_z ); Self::add_built_chunk_to_world( chunk, voxel_meshes, atlases, face_textures, &mut *meshes, &mut *materials, &mut commands, ); commands.entity(entity).despawn(); } } }
let strong_handle = match mc_assets .get_texture_path_for_block_state_and_face(block_state_id, face) { Some(path) => asset_server.load(path), None => { debug!("No texture for {:?}:{:?}", block_state_id, face); texture_builder.placeholder_texture.clone() } };
assignment_statement
[ { "content": "pub fn build_bevy_mesh(voxel_mesh: &VoxelMesh) -> Mesh {\n\n let num_vertices = voxel_mesh.quads.len() * 4;\n\n let num_indices = voxel_mesh.quads.len() * 6;\n\n let mut positions = Vec::with_capacity(num_vertices);\n\n let mut normals = Vec::with_capacity(num_vertices);\n\n let mut tex_coords = Vec::with_capacity(num_vertices);\n\n let mut indices = Vec::with_capacity(num_indices);\n\n\n\n for quad in voxel_mesh.quads.iter() {\n\n indices.extend_from_slice(\n\n &quad\n\n .get_indices()\n\n .map(|i| positions.len() as u32 + i as u32),\n\n );\n\n\n\n positions.extend_from_slice(&quad.positions);\n\n normals.extend_from_slice(&quad.get_normals());\n\n tex_coords.extend_from_slice(&quad.get_tex_coords());\n\n }\n\n\n\n let mut mesh = Mesh::new(PrimitiveTopology::TriangleList);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_POSITION, positions);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_NORMAL, normals);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_UV_0, tex_coords);\n\n mesh.set_indices(Some(Indices::U32(indices)));\n\n\n\n mesh\n\n}\n", "file_path": "crates/brine_render/src/chunk/chunk_bakery.rs", "rank": 1, "score": 225321.85555323237 }, { "content": "fn spawn_component(mut commands: Commands) {\n\n commands.spawn().insert_bundle((\n\n Name::new(\"Debug Wireframe\"),\n\n EnableWireframe { enable: true },\n\n ));\n\n}\n\n\n", "file_path": "src/debug/wireframe.rs", "rank": 2, "score": 194515.84267676267 }, { "content": "type LoadChunkTask = Task<Result<Chunk>>;\n\n\n", "file_path": "src/server.rs", "rank": 3, "score": 193279.8357585012 }, { "content": "pub fn build_bevy_mesh(voxel_mesh: &VoxelMesh) -> Mesh {\n\n let num_vertices = voxel_mesh.quads.len() * 4;\n\n let num_indices = voxel_mesh.quads.len() * 6;\n\n let mut positions = Vec::with_capacity(num_vertices);\n\n let mut normals = Vec::with_capacity(num_vertices);\n\n let mut tex_coords = Vec::with_capacity(num_vertices);\n\n let mut indices = Vec::with_capacity(num_indices);\n\n\n\n for quad in voxel_mesh.quads.iter() {\n\n indices.extend_from_slice(\n\n &quad\n\n .get_indices()\n\n .map(|i| positions.len() as u32 + i as u32),\n\n );\n\n\n\n positions.extend_from_slice(&quad.positions);\n\n normals.extend_from_slice(&quad.get_normals());\n\n tex_coords.extend_from_slice(&quad.get_tex_coords());\n\n }\n\n\n\n let mut mesh = Mesh::new(PrimitiveTopology::TriangleList);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_POSITION, positions);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_NORMAL, normals);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_UV_0, tex_coords);\n\n mesh.set_indices(Some(Indices::U32(indices)));\n\n\n\n mesh\n\n}\n", "file_path": "crates/brine_voxel/examples/common/mesh_viewer.rs", "rank": 4, "score": 187819.8441333166 }, { "content": "fn give_chunk_sections_correct_y_height(mut query: Query<(&mut Transform, &BuiltChunkSection)>) {\n\n for (mut transform, chunk_section) in query.iter_mut() {\n\n let height = (chunk_section.section_y as f32) * 16.0;\n\n if transform.translation.y != height {\n\n transform.translation.y = height;\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 5, "score": 183170.52752103162 }, { "content": "/// Saves a chunk packet to a pair of `chunk_{X}_{Z}.dump` and\n\n/// `chunk_{X}_{Z}.meta` files in the directory pointed to by `path`.\n\npub fn save_packet_if_has_chunk_data(\n\n packet: &Packet,\n\n path: impl AsRef<Path>,\n\n) -> Result<Option<PathBuf>> {\n\n if let Some(ChunkData {\n\n chunk_x,\n\n chunk_z,\n\n bitmask,\n\n full_chunk: true,\n\n data,\n\n }) = ChunkData::from_packet(packet)\n\n {\n\n let mut path = PathBuf::from(path.as_ref());\n\n path.push(format!(\"chunk_{}_{}.dump\", chunk_x, chunk_z));\n\n\n\n let dump_path = path.with_extension(\"dump\");\n\n let meta_path = path.with_extension(\"meta\");\n\n\n\n let meta = ChunkMeta {\n\n chunk_x,\n", "file_path": "src/chunk.rs", "rank": 6, "score": 164857.90610713593 }, { "content": "/// Loads a chunk from a pair of `.dump` and `.meta` files.\n\npub fn load_chunk(path: impl AsRef<Path>) -> Result<Chunk> {\n\n let chunk = load_chunk_data(path)?.decode()?;\n\n\n\n Ok(chunk)\n\n}\n\n\n", "file_path": "src/chunk.rs", "rank": 7, "score": 160701.5160873254 }, { "content": "fn set_up_camera(mut commands: Commands) {\n\n // Screenshot coords.\n\n let camera_start = Transform::from_translation(Vec3::new(-200.0, 87.8, 157.3))\n\n .with_rotation(Quat::from_euler(EulerRot::XYZ, 0.1338, 0.183, -0.025));\n\n\n\n // let camera_start = Transform::from_translation(Vec3::new(-260.0, 115.0, 200.0))\n\n // .looking_at(Vec3::new(-40.0, 100.0, 0.0), Vec3::Y);\n\n\n\n commands\n\n .spawn_bundle(PerspectiveCameraBundle {\n\n transform: camera_start,\n\n ..Default::default()\n\n })\n\n .insert(FlyCamera::default());\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 160409.9625389541 }, { "content": "fn rotate(input: Res<Input<KeyCode>>, mut query: Query<&mut Transform, With<Root>>) {\n\n if let Ok(mut transform) = query.get_single_mut() {\n\n if input.just_pressed(KeyCode::Right) {\n\n transform.rotate(Quat::from_rotation_y(90.0_f32.to_radians()));\n\n }\n\n if input.just_pressed(KeyCode::Left) {\n\n transform.rotate(Quat::from_rotation_y(-90.0_f32.to_radians()));\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/brine_voxel/examples/common/mesh_viewer.rs", "rank": 9, "score": 156189.3136161547 }, { "content": "fn setup(mut commands: Commands) {\n\n commands.spawn_bundle(OrthographicCameraBundle::new_2d());\n\n}\n\n\n", "file_path": "crates/brine_render/examples/minecraft_textures.rs", "rank": 10, "score": 152953.73496950284 }, { "content": "pub fn get_chunk_from_packet(packet: &Packet) -> Result<Option<Chunk>> {\n\n if let Some(chunk_data) = ChunkData::from_packet(packet) {\n\n Ok(Some(chunk_data.decode()?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n\npub(crate) fn build(app: &mut App) {\n\n app.add_system(handle_chunk_data);\n\n}\n\n\n", "file_path": "crates/brine_proto_backend/src/backend_stevenarella/chunks.rs", "rank": 11, "score": 152361.90537125303 }, { "content": "fn bake_chunk(chunk: &ChunkSection, mc_data: &MinecraftData, mc_assets: &MinecraftAssets) -> Mesh {\n\n let chunk_bakery = ChunkBakery::new(mc_data, mc_assets);\n\n\n\n let baked_chunk = chunk_bakery.bake_chunk(chunk);\n\n\n\n baked_chunk.mesh\n\n}\n\n\n", "file_path": "crates/brine_render/examples/bake_chunk.rs", "rank": 12, "score": 148510.12059344506 }, { "content": "fn send_chunks(\n\n mut tasks: Query<(Entity, &mut LoadChunkTask)>,\n\n mut chunk_events: EventWriter<ChunkData>,\n\n mut commands: Commands,\n\n) -> Result<()> {\n\n for (task_entity, mut task) in tasks.iter_mut() {\n\n if let Some(chunk_data) = future::block_on(future::poll_once(&mut *task)) {\n\n let chunk_data = chunk_data?;\n\n chunk_events.send(ChunkData { chunk_data });\n\n\n\n commands.entity(task_entity).despawn();\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/server.rs", "rank": 13, "score": 142304.2249230609 }, { "content": "enum BlockMeshOutput {\n\n VisibleFaces(UnitQuadBuffer),\n\n GreedyQuads(GreedyQuadsBuffer),\n\n}\n\n\n\nimpl BlockMeshOutput {\n\n #[inline]\n\n fn num_quads(&self) -> usize {\n\n match self {\n\n Self::VisibleFaces(buffer) => buffer.num_quads(),\n\n Self::GreedyQuads(buffer) => buffer.quads.num_quads(),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn for_each_quad_and_face(\n\n self,\n\n faces: &[OrientedBlockFace; 6],\n\n mut func: impl FnMut(UnorientedQuad, &OrientedBlockFace),\n\n ) {\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 14, "score": 136683.9827419408 }, { "content": "struct BlockMeshBuilder {\n\n voxels: [BlockState; Self::BUFFER_SIZE],\n\n shape: ChunkShape,\n\n min: [u32; 3],\n\n max: [u32; 3],\n\n faces: [OrientedBlockFace; 6],\n\n}\n\n\n\nimpl BlockMeshBuilder {\n\n const BUFFER_SIZE: usize = (SHAPE_SIDE * SHAPE_SIDE * SHAPE_SIDE) as usize;\n\n\n\n fn new() -> Self {\n\n Self {\n\n voxels: [BlockState::EMPTY; Self::BUFFER_SIZE],\n\n shape: ChunkShape {},\n\n min: [0; 3],\n\n max: [SHAPE_SIDE - 1; 3],\n\n faces: RIGHT_HANDED_Y_UP_CONFIG.faces,\n\n }\n\n }\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 15, "score": 136087.59178287137 }, { "content": "/// Loads **undecoded** chunk data from a pair of `.dump` and `.meta` files.\n\npub fn load_chunk_data(path: impl AsRef<Path>) -> Result<ChunkData<Vec<u8>>> {\n\n let path = path.as_ref();\n\n let dump_path = path.with_extension(\"dump\");\n\n let meta_path = path.with_extension(\"meta\");\n\n\n\n let ChunkMeta {\n\n chunk_x,\n\n chunk_z,\n\n bitmask,\n\n } = serde_json::from_reader(fs::File::open(meta_path)?)?;\n\n\n\n let data = fs::read(dump_path)?;\n\n\n\n Ok(ChunkData {\n\n chunk_x,\n\n chunk_z,\n\n bitmask,\n\n full_chunk: true,\n\n data,\n\n })\n\n}\n\n\n", "file_path": "src/chunk.rs", "rank": 16, "score": 133261.68067879736 }, { "content": "#[derive(Clone, Copy, Eq, PartialEq)]\n\nstruct BlockState(brine_chunk::BlockState);\n\n\n\nimpl BlockState {\n\n const EMPTY: Self = Self(brine_chunk::BlockState::AIR);\n\n}\n\n\n\nimpl Voxel for BlockState {\n\n #[inline]\n\n fn is_empty(&self) -> bool {\n\n *self == Self::EMPTY\n\n }\n\n\n\n #[inline]\n\n fn is_opaque(&self) -> bool {\n\n true\n\n }\n\n}\n\n\n\nimpl MergeVoxel for BlockState {\n\n type MergeValue = Self;\n\n\n\n fn merge_value(&self) -> Self::MergeValue {\n\n *self\n\n }\n\n}\n\n\n\nconst SHAPE_SIDE: u32 = (SECTION_WIDTH as u32) + 2;\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 17, "score": 125260.7676737169 }, { "content": "fn log_network_errors(mut event_reader: EventReader<NetworkEvent<ProtocolCodec>>) {\n\n for event in event_reader.iter() {\n\n if let NetworkEvent::Error(network_error) = event {\n\n warn!(\"Network error: {}\", network_error);\n\n }\n\n }\n\n}\n", "file_path": "crates/brine_proto_backend/src/plugin.rs", "rank": 18, "score": 120794.1733583665 }, { "content": "fn setup(atlas: Res<Atlas>, texture_atlases: Res<Assets<TextureAtlas>>, mut commands: Commands) {\n\n let texture_atlas = texture_atlases.get(atlas.handle.as_ref().unwrap()).unwrap();\n\n\n\n let texture_atlas_texture = texture_atlas.texture.clone();\n\n\n\n commands.spawn_bundle(OrthographicCameraBundle::new_2d());\n\n\n\n commands.spawn_bundle(SpriteBundle {\n\n texture: texture_atlas_texture,\n\n transform: Transform::from_xyz(0.0, 0.0, 0.0).with_scale(Vec3::ONE * 2.0),\n\n ..Default::default()\n\n });\n\n}\n", "file_path": "crates/brine_voxel_v1/examples/texture_builder.rs", "rank": 19, "score": 120194.64659342423 }, { "content": "type ChunkShape = ConstShape3u32<SHAPE_SIDE, SHAPE_SIDE, SHAPE_SIDE>;\n\n\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 20, "score": 115386.65555868555 }, { "content": "fn connect(mut net_resource: ResMut<NetworkResource<StringCodec>>) {\n\n net_resource.connect(SERVER.to_string());\n\n}\n\n\n", "file_path": "crates/brine_net/examples/strings.rs", "rank": 21, "score": 105997.78233191665 }, { "content": "fn connect(mut net_resource: ResMut<NetworkResource<DummyCodec>>) {\n\n net_resource.connect(SERVER.to_string());\n\n}\n\n\n", "file_path": "crates/brine_net/examples/dummy.rs", "rank": 22, "score": 105997.78233191665 }, { "content": "/// A [`VoxelView`] that can be used with a [`Mesher`] to generate a [`Mesh`] for a\n\n/// cuboid chunk of voxels.\n\n///\n\n/// [`Mesher`]: super::Mesher\n\n/// [`Mesh`]: super::Mesh\n\npub trait MeshingView: VoxelView {\n\n type Quads: IntoIterator<Item = [[f32; 3]; 4]>;\n\n\n\n /// Returns true if the voxel at index `[x, y, z]` has no geometry to\n\n /// provide to the mesh.\n\n ///\n\n /// **Note:** [`is_empty`] and [`is_face_occluded`] are independent of each\n\n /// other. In other words, non-empty voxels can still be occluded by empty\n\n /// ones. An \"empty\" voxel simply means that no geometry should be produced\n\n /// for that voxel in this view.\n\n ///\n\n /// This makes it possible to render a chunk in multiple separate layers\n\n /// without generating any redundant geometry. To do this, use a different\n\n /// [`MeshingView`] for each layer, and use [`is_empty`] to signify which\n\n /// voxels should be included in each layer's mesh.\n\n ///\n\n /// [`is_empty`]: MeshingView::is_empty\n\n /// [`is_face_occluded`]: MeshingView::is_face_occluded\n\n fn is_empty(&self, x: IndexTy, y: IndexTy, z: IndexTy) -> bool;\n\n\n", "file_path": "crates/brine_voxel/src/meshing/meshing_view.rs", "rank": 23, "score": 103834.03838589668 }, { "content": "pub fn exit_on_error<T, E: fmt::Display>(\n\n In(result): In<Result<T, E>>,\n\n mut app_exit: EventWriter<AppExit>,\n\n) {\n\n if let Err(e) = result {\n\n error!(\"{}\", e);\n\n app_exit.send(AppExit);\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 24, "score": 103823.67365914799 }, { "content": "fn random_chunk() -> ChunkSection {\n\n let mut block_states = [BlockState::AIR; BLOCKS_PER_SECTION];\n\n\n\n let mut block_count = 0;\n\n for block_state in block_states.iter_mut() {\n\n if fastrand::f32() >= 0.9 {\n\n *block_state = random_block_state();\n\n block_count += 1;\n\n }\n\n }\n\n\n\n ChunkSection {\n\n block_count,\n\n chunk_y: 0,\n\n block_states: BlockStates(block_states),\n\n }\n\n}\n\n\n", "file_path": "crates/brine_render/examples/bake_chunk.rs", "rank": 25, "score": 102607.4410694851 }, { "content": "/// A trait that makes it possible to implement composable meshing views that\n\n/// delegate by default to some inner [`MeshingView`].\n\npub trait DelegatingMeshingView: VoxelView {\n\n type Delegate: MeshingView;\n\n\n\n fn delegate(&self) -> &Self::Delegate;\n\n\n\n #[inline(always)]\n\n fn is_empty(&self, x: u8, y: u8, z: u8) -> bool {\n\n self.delegate().is_empty(x, y, z)\n\n }\n\n\n\n #[inline(always)]\n\n fn is_full_cube(&self, x: u8, y: u8, z: u8) -> bool {\n\n self.delegate().is_full_cube(x, y, z)\n\n }\n\n\n\n #[inline(always)]\n\n fn is_face_occluded(&self, x: u8, y: u8, z: u8, face: Direction) -> bool {\n\n self.delegate().is_face_occluded(x, y, z, face)\n\n }\n\n\n", "file_path": "crates/brine_voxel/src/meshing/meshing_view.rs", "rank": 26, "score": 102568.92620451553 }, { "content": "fn load_chunks(\n\n chunk_directory: Res<ChunkDirectory>,\n\n task_pool: Res<IoTaskPool>,\n\n mut commands: Commands,\n\n) -> Result<()> {\n\n for entry in fs::read_dir(&chunk_directory.path)? {\n\n let entry = entry?;\n\n\n\n let path_string = entry.file_name().to_string_lossy().to_string();\n\n\n\n if path_string.starts_with(\"chunk_light_\") || !path_string.ends_with(\".dump\") {\n\n continue;\n\n }\n\n\n\n let path = entry.path();\n\n let task: LoadChunkTask = task_pool.spawn(async move { load_chunk(path) });\n\n\n\n commands.spawn().insert_bundle((\n\n task,\n\n Name::new(format!(\"Loading Chunk {}\", entry.path().to_string_lossy())),\n\n ));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 27, "score": 101526.2145192806 }, { "content": "struct BoolView<'a> {\n\n chunk: &'a IntChunk,\n\n}\n\n\n\nimpl<'a> BoolView<'a> {\n\n fn is_empty(&self, x: u8, y: u8, z: u8) -> Option<bool> {\n\n self.chunk.get(x, y, z).map(|i| i == 0)\n\n }\n\n}\n\n\n\nimpl<'a> VoxelView for BoolView<'a> {\n\n #[inline(always)]\n\n fn size_x(&self) -> u8 {\n\n CHUNK_SIDE\n\n }\n\n\n\n #[inline(always)]\n\n fn size_y(&self) -> u8 {\n\n CHUNK_SIDE\n\n }\n", "file_path": "crates/brine_voxel/examples/mesher.rs", "rank": 28, "score": 100423.68074647356 }, { "content": "#[derive(Component)]\n\nstruct Root;\n\n\n\npub struct MeshViewerPlugin {\n\n mesh: VoxelMesh,\n\n}\n\n\n\nimpl MeshViewerPlugin {\n\n pub fn new(mesh: VoxelMesh) -> Self {\n\n Self { mesh }\n\n }\n\n}\n\n\n\nimpl Plugin for MeshViewerPlugin {\n\n fn build(&self, app: &mut App) {\n\n let mesh = build_bevy_mesh(&self.mesh);\n\n\n\n let mut meshes = app.world.get_resource_mut::<Assets<Mesh>>().unwrap();\n\n let handle = meshes.add(mesh);\n\n\n\n app.world.insert_resource(handle);\n", "file_path": "crates/brine_voxel/examples/common/mesh_viewer.rs", "rank": 29, "score": 100404.3386911395 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nenum AppState {\n\n Loading,\n\n Finished,\n\n}\n\n\n", "file_path": "crates/brine_voxel_v1/examples/texture_builder.rs", "rank": 30, "score": 99916.51208036201 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]\n\nenum ServerState {\n\n Login,\n\n Play,\n\n}\n\n\n", "file_path": "crates/brine_proto/src/plugin/successful_login.rs", "rank": 31, "score": 99541.55935124686 }, { "content": "fn read_packets(mut codec_reader: CodecReader<StringCodec>) {\n\n for packet in codec_reader.iter() {\n\n println!(\"Packet received by client: {}\", packet);\n\n }\n\n}\n", "file_path": "crates/brine_net/examples/strings.rs", "rank": 32, "score": 99120.7278483436 }, { "content": "/// System that listens for ChunkData packets and sends ChunkData events to the\n\n/// client application.\n\nfn handle_chunk_data(\n\n mut packet_reader: CodecReader<ProtocolCodec>,\n\n mut chunk_events: EventWriter<event::clientbound::ChunkData>,\n\n) {\n\n for packet in packet_reader.iter() {\n\n match get_chunk_from_packet(packet) {\n\n Ok(Some(chunk_data)) => {\n\n trace!(\"Chunk: {:?}\", chunk_data);\n\n chunk_events.send(event::clientbound::ChunkData { chunk_data });\n\n }\n\n Err(e) => error!(\"{}\", e),\n\n _ => {}\n\n }\n\n }\n\n}\n", "file_path": "crates/brine_proto_backend/src/backend_stevenarella/chunks.rs", "rank": 33, "score": 98580.24885844527 }, { "content": "/// Trait representing a block state palette.\n\npub trait Palette {\n\n fn id_to_block_state(&self, id: u32) -> Option<BlockState>;\n\n}\n\n\n\n/// The palette of block states for a given [`ChunkSection`][crate::ChunkSection].\n\n///\n\n/// See <https://wiki.vg/index.php?title=Chunk_Format&oldid=14901#Palettes>.\n\n#[derive(Default)]\n\npub struct SectionPalette {\n\n id_to_block_state: Vec<BlockState>,\n\n}\n\n\n\nimpl SectionPalette {\n\n /// The maximum value of the `bits_per_block` field for which a section\n\n /// palette is used rather than directly using the global palette.\n\n pub const MAX_BITS_PER_BLOCK: u8 = 8;\n\n\n\n /// Decodes a chunk section's palette from a data blob.\n\n ///\n\n /// See <https://wiki.vg/index.php?title=Chunk_Format&oldid=14901#Palettes>\n", "file_path": "crates/brine_chunk/src/palette.rs", "rank": 34, "score": 98310.85283531995 }, { "content": "/// A trait for types that can turn a [`Chunk`] into [`VoxelMesh`]es.\n\npub trait ChunkBuilder: Sized {\n\n const TYPE: ChunkBuilderType;\n\n\n\n fn build_chunk(&self, chunk: &Chunk) -> Vec<VoxelMesh>;\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct ChunkBuilderType(pub &'static str);\n\n\n\nimpl ChunkBuilderType {\n\n pub const UNKNOWN: Self = Self(\"UNKNOWN_CHUNK_BUILDER\");\n\n pub const GREEDY_QUADS: Self = Self(\"GreedyQuadsChunkBuilder\");\n\n pub const VISIBLE_FACES: Self = Self(\"VisibleFacesChunkBuilder\");\n\n pub const NAIVE_BLOCKS: Self = Self(\"NaiveBlocksChunkBuilder\");\n\n}\n\n\n\nimpl Default for ChunkBuilderType {\n\n fn default() -> Self {\n\n Self::UNKNOWN\n\n }\n\n}\n\n\n\nimpl fmt::Debug for ChunkBuilderType {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(self.0).finish()\n\n }\n\n}\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/mod.rs", "rank": 35, "score": 97908.72677483302 }, { "content": "#[derive(Clone, Copy, PartialEq, Eq)]\n\nstruct BitIndex {\n\n word_index: usize,\n\n bit_offset: u8,\n\n}\n\n\n\n/// [`PackedIntVec`] iterator.\n\npub struct Iter<'a> {\n\n vec: &'a PackedIntVec,\n\n index: usize,\n\n}\n\n\n\nimpl<'a> Iterator for Iter<'a> {\n\n type Item = u32;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let next = self.vec.get(self.index);\n\n\n\n if next.is_some() {\n\n self.index += 1;\n", "file_path": "crates/brine_chunk/src/decode/packed_vec.rs", "rank": 36, "score": 97875.89081222513 }, { "content": "pub trait Mesher {\n\n fn generate_mesh<V>(&mut self, view: V) -> Mesh\n\n where\n\n V: MeshingView;\n\n}\n", "file_path": "crates/brine_voxel/src/meshing/mesher.rs", "rank": 37, "score": 97281.07772461808 }, { "content": "fn setup(\n\n mc_data: Res<MinecraftData>,\n\n mc_assets: Res<MinecraftAssets>,\n\n mut meshes: ResMut<Assets<Mesh>>,\n\n mut commands: Commands,\n\n) {\n\n let chunk = random_chunk();\n\n\n\n let mesh = bake_chunk(&chunk, &*mc_data, &*mc_assets);\n\n\n\n commands.spawn_bundle(PbrBundle {\n\n mesh: meshes.add(mesh),\n\n ..Default::default()\n\n });\n\n\n\n commands.spawn_bundle(PerspectiveCameraBundle {\n\n transform: Transform::from_translation(Vec3::new(30.0, 24.0, 30.0))\n\n .looking_at(Vec3::ONE * 8.0, Vec3::Y),\n\n ..Default::default()\n\n });\n\n}\n", "file_path": "crates/brine_render/examples/bake_chunk.rs", "rank": 38, "score": 95474.36036048675 }, { "content": "fn main() {\n\n let mc_data = MinecraftData::for_version(\"1.14.4\");\n\n let mc_assets = MinecraftAssets::new(\"assets/1.14.4\", &mc_data).unwrap();\n\n\n\n App::new()\n\n .add_plugins(DefaultPlugins)\n\n .insert_resource(Msaa { samples: 4 })\n\n .insert_resource(WgpuOptions {\n\n features: WgpuFeatures::POLYGON_MODE_LINE,\n\n ..Default::default()\n\n })\n\n .insert_resource(WireframeConfig { global: true })\n\n .add_plugin(WireframePlugin)\n\n .add_plugin(WorldInspectorPlugin::new())\n\n .insert_resource(mc_data)\n\n .insert_resource(mc_assets)\n\n .add_startup_system(setup)\n\n .run();\n\n}\n\n\n", "file_path": "crates/brine_render/examples/bake_chunk.rs", "rank": 39, "score": 95474.36036048675 }, { "content": "fn spawn_sprite(\n\n atlases: Res<Assets<TextureAtlas>>,\n\n the_atlas: Res<TheAtlas>,\n\n mut commands: Commands,\n\n mut state: ResMut<State<AtlasState>>,\n\n) {\n\n if let Some(atlas) = atlases.get(&the_atlas.handle) {\n\n println!(\"Atlas stitched. Spawning sprite.\");\n\n\n\n commands.spawn().insert_bundle(SpriteBundle {\n\n texture: atlas.texture.clone(),\n\n // transform: Transform::from_scale(Vec3::ONE * 0.5),\n\n ..Default::default()\n\n });\n\n\n\n state.set(AtlasState::Stitched).unwrap();\n\n }\n\n}\n", "file_path": "crates/brine_render/examples/texture_atlas.rs", "rank": 40, "score": 95110.27365505527 }, { "content": "fn spawn_sprite(\n\n texture_manager: Res<TextureManager>,\n\n atlases: Res<Assets<TextureAtlas>>,\n\n mut commands: Commands,\n\n) {\n\n println!(\"Atlas stitched. Spawning sprite.\");\n\n\n\n let atlas_handle = texture_manager.atlases().next().unwrap();\n\n\n\n let atlas = atlases.get(atlas_handle).unwrap();\n\n\n\n commands.spawn().insert_bundle(SpriteBundle {\n\n texture: atlas.texture.clone(),\n\n transform: Transform::from_scale(Vec3::ONE * 0.5),\n\n ..Default::default()\n\n });\n\n}\n", "file_path": "crates/brine_render/examples/minecraft_textures.rs", "rank": 41, "score": 95110.27365505527 }, { "content": "fn setup(\n\n mesh: Res<Handle<Mesh>>,\n\n asset_server: Res<AssetServer>,\n\n mut materials: ResMut<Assets<StandardMaterial>>,\n\n mut commands: Commands,\n\n) {\n\n let offset = CHUNK_SIDE as f32 / 2.0;\n\n\n\n commands\n\n .spawn_bundle((Transform::default(), GlobalTransform::default(), Root))\n\n .with_children(|parent| {\n\n parent.spawn().insert_bundle(PbrBundle {\n\n transform: Transform::from_translation(Vec3::new(-offset, -offset, -offset)),\n\n mesh: mesh.clone(),\n\n material: materials.add(StandardMaterial {\n\n base_color_texture: Some(asset_server.load(\"placeholder.png\")),\n\n unlit: true,\n\n ..Default::default()\n\n }),\n\n ..Default::default()\n", "file_path": "crates/brine_voxel/examples/common/mesh_viewer.rs", "rank": 42, "score": 94457.07100207428 }, { "content": "fn read_net_events(mut reader: EventReader<NetworkEvent<DummyCodec>>) {\n\n for event in reader.iter() {\n\n println!(\"NetworkEvent: {:?}\", event);\n\n }\n\n}\n", "file_path": "crates/brine_net/examples/dummy.rs", "rank": 43, "score": 94104.78676526653 }, { "content": "pub fn load_texture_table(assets: &AssetPack) -> Result<TextureTable> {\n\n let mut table = TextureTable::default();\n\n\n\n for texture_id in assets\n\n .enumerate_resources(\"minecraft\", ResourceKind::Texture)?\n\n .into_iter()\n\n {\n\n table.insert(texture_id);\n\n }\n\n\n\n Ok(table)\n\n}\n", "file_path": "crates/brine_asset/src/bakery/textures.rs", "rank": 44, "score": 93551.06628022241 }, { "content": "/// The [`minecraft_varint`] uses the wrong encoding for signed VarInts, so this\n\n/// is a workaround.\n\npub trait VarIntRead {\n\n fn read_var_i32(&mut self) -> io::Result<i32>;\n\n fn read_var_i64(&mut self) -> io::Result<i64>;\n\n}\n\n\n\nimpl<R: io::Read> VarIntRead for R {\n\n fn read_var_i32(&mut self) -> io::Result<i32> {\n\n minecraft_varint::VarIntRead::read_var_u32(self).map(|v| v.try_into().unwrap())\n\n }\n\n\n\n fn read_var_i64(&mut self) -> io::Result<i64> {\n\n minecraft_varint::VarIntRead::read_var_u64(self).map(|v| v.try_into().unwrap())\n\n }\n\n}\n", "file_path": "crates/brine_chunk/src/decode/varint.rs", "rank": 45, "score": 93087.0088530221 }, { "content": "fn handle_login(\n\n mut state: ResMut<State<ServerState>>,\n\n mut rx: EventReader<Login>,\n\n mut tx: EventWriter<LoginSuccess>,\n\n) {\n\n if let Some(login) = rx.iter().last() {\n\n debug!(\"Dummy server advancing to state Play\");\n\n state.set(ServerState::Play).unwrap();\n\n\n\n tx.send(LoginSuccess {\n\n uuid: Uuid::new_v4(),\n\n username: login.username.clone(),\n\n });\n\n }\n\n}\n", "file_path": "crates/brine_proto/src/plugin/successful_login.rs", "rank": 46, "score": 92989.04403669338 }, { "content": "pub fn load_unbaked_block_models(mc_assets: &AssetPack) -> Result<UnbakedModels> {\n\n let model_ids = mc_assets.enumerate_resources(\"minecraft\", ResourceKind::BlockModel)?;\n\n\n\n let unbaked_models = model_ids\n\n .into_iter()\n\n .map(|model_id| {\n\n let model = mc_assets.load_block_model(model_id.as_str())?;\n\n Ok((model_id, model))\n\n })\n\n .collect::<Result<_>>()?;\n\n\n\n Ok(unbaked_models)\n\n}\n", "file_path": "crates/brine_asset/src/bakery/models/unbaked.rs", "rank": 47, "score": 90007.84412064863 }, { "content": "/// System that can be chained onto the end of another system to log any errors.\n\npub fn log_error<T, E: fmt::Display>(In(result): In<Result<T, E>>) {\n\n if let Err(e) = result {\n\n error!(\"{}\", e);\n\n }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 48, "score": 88125.8719693737 }, { "content": "pub fn load_unbaked_block_states(mc_assets: &AssetPack) -> Result<UnbakedBlockStatesTable> {\n\n let block_ids = mc_assets.enumerate_resources(\"minecraft\", ResourceKind::BlockStates)?;\n\n\n\n let unbaked_block_states = block_ids\n\n .into_iter()\n\n .map(|block_id| {\n\n let model = mc_assets.load_blockstates(block_id.as_str())?;\n\n Ok((block_id.as_str().to_string(), model))\n\n })\n\n .collect::<Result<_>>()?;\n\n\n\n Ok(unbaked_block_states)\n\n}\n", "file_path": "crates/brine_asset/src/bakery/block_states/unbaked.rs", "rank": 49, "score": 86856.62425278098 }, { "content": "fn random_block_state() -> BlockState {\n\n let id = fastrand::u32(1..10000);\n\n BlockState(id)\n\n}\n\n\n", "file_path": "crates/brine_render/examples/bake_chunk.rs", "rank": 50, "score": 86675.74024008759 }, { "content": "pub fn bake_all(mc_data: &MinecraftData, asset_pack: &AssetPack) -> Result<BakedAssets> {\n\n let texture_table = bakery::textures::load_texture_table(asset_pack)?;\n\n\n\n let unbaked_models = bakery::models::load_unbaked_block_models(asset_pack)?;\n\n let model_bakery = ModelBakery::new(&unbaked_models, &texture_table);\n\n\n\n let unbaked_block_states = bakery::block_states::load_unbaked_block_states(asset_pack)?;\n\n let block_states_bakery = BlockStatesBakery::new(mc_data, &unbaked_block_states, model_bakery);\n\n\n\n // (Half-)Bake block states in parallel.\n\n let half_baked_block_states: Vec<(BlockStateId, HalfBakedBlockState)> = unbaked_block_states\n\n .par_iter()\n\n .map(|(key, _)| key)\n\n .flat_map(|block_name| block_states_bakery.bake_block_states_for_block(block_name))\n\n .collect();\n\n\n\n debug!(\"Finished half-baking block states\");\n\n // trace!(\n\n // \"Half-baked block states: {:#?}\",\n\n // &half_baked_block_states[0..100]\n", "file_path": "crates/brine_asset/src/bakery/bake.rs", "rank": 51, "score": 86119.13026424448 }, { "content": " trait PartialEqEps<T = Self, E = T> {\n\n fn eq_eps(&self, rhs: &T, eps: &E) -> bool;\n\n }\n\n\n\n impl PartialEqEps for f32 {\n\n fn eq_eps(&self, rhs: &Self, eps: &Self) -> bool {\n\n (self - rhs).abs() <= *eps\n\n }\n\n }\n\n\n\n impl PartialEqEps for Vec3A {\n\n fn eq_eps(&self, rhs: &Self, eps: &Self) -> bool {\n\n (*self - *rhs).abs().cmple(*eps).all()\n\n }\n\n }\n\n\n\n impl<const N: usize> PartialEqEps<Self, Vec3A> for [Vec3A; N] {\n\n fn eq_eps(&self, rhs: &Self, eps: &Vec3A) -> bool {\n\n self.iter()\n\n .zip(rhs.iter())\n", "file_path": "crates/brine_voxel/src/cuboid/transform.rs", "rank": 52, "score": 85708.28612432306 }, { "content": "type CodecWriteEvent<Codec> = Write<<Codec as Encode>::Item, Codec>;\n\n\n\nimpl<Codec> Plugin for NetworkPlugin<Codec>\n\nwhere\n\n Codec: Decode + Encode + Default + Clone + Unpin + Any + Send + Sync,\n\n <Codec as Decode>::Item: Debug + Send + Sync,\n\n <Codec as Encode>::Item: Debug + Send + Sync,\n\n <Codec as Decode>::Error: Debug + Send + Sync,\n\n <Codec as Encode>::Error: Debug + Send + Sync,\n\n{\n\n fn build(&self, app: &mut App) {\n\n app.add_event::<NetworkEvent<Codec>>();\n\n app.add_event::<CodecReadEvent<Codec>>();\n\n app.add_event::<CodecWriteEvent<Codec>>();\n\n\n\n let task_pool = app.world.get_resource::<IoTaskPool>().unwrap().clone();\n\n let net_resource = NetworkResource::<Codec>::new(task_pool.0);\n\n app.insert_resource(net_resource);\n\n\n\n app.add_system_to_stage(CoreStage::PreUpdate, Self::send_network_events);\n", "file_path": "crates/brine_net/src/plugin.rs", "rank": 53, "score": 80170.52937350106 }, { "content": "type CodecReadEvent<Codec> = Read<<Codec as Decode>::Item, Codec>;\n", "file_path": "crates/brine_net/src/plugin.rs", "rank": 54, "score": 80170.52937350106 }, { "content": " fn is_face_occluded(&self, x: u8, y: u8, z: u8, face: Direction) -> bool {\n\n match (face, x, y, z) {\n\n // Faces on the edge of the chunk are always visible.\n\n (Direction::XNeg, 0, _, _)\n\n | (Direction::YNeg, _, 0, _)\n\n | (Direction::ZNeg, _, _, 0)\n\n | (Direction::XPos, Self::MAX_X.., _, _)\n\n | (Direction::YPos, _, Self::MAX_Y.., _)\n\n | (Direction::ZPos, _, _, Self::MAX_Z..) => false,\n\n\n\n _ => {\n\n let [x, y, z] = face.translate_pos([x, y, z], 1).unwrap();\n\n !self.is_empty(x, y, z) && self.is_full_cube(x, y, z)\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn face_quads(&self, x: u8, y: u8, z: u8, face: Direction) -> Self::Quads {\n\n self.get_quads_for_block_face(x, y, z, Some(face))\n\n }\n\n\n\n #[inline]\n\n fn non_face_quads(&self, x: u8, y: u8, z: u8) -> Self::Quads {\n\n self.get_quads_for_block_face(x, y, z, None)\n\n }\n\n}\n", "file_path": "crates/brine_render/src/chunk/meshing_view/chunk_view.rs", "rank": 55, "score": 79178.79673697538 }, { "content": "use smallvec::SmallVec;\n\n\n\nuse brine_asset::{BakedModel, BlockFace, MinecraftAssets};\n\nuse brine_chunk::{ChunkSection, SECTION_HEIGHT, SECTION_WIDTH};\n\nuse brine_data::{blocks::Block, BlockStateId, MinecraftData};\n\nuse brine_voxel::{meshing::QuadPositions, Direction, MeshingView, VoxelView};\n\n\n\npub struct ChunkView<'a> {\n\n mc_data: &'a MinecraftData,\n\n mc_assets: &'a MinecraftAssets,\n\n chunk: &'a ChunkSection,\n\n}\n\n\n\nimpl<'a> ChunkView<'a> {\n\n const MAX_X: u8 = (SECTION_WIDTH as u8) - 1;\n\n const MAX_Y: u8 = (SECTION_HEIGHT as u8) - 1;\n\n const MAX_Z: u8 = (SECTION_WIDTH as u8) - 1;\n\n\n\n pub fn new(\n\n mc_data: &'a MinecraftData,\n", "file_path": "crates/brine_render/src/chunk/meshing_view/chunk_view.rs", "rank": 56, "score": 79176.10605995139 }, { "content": " y: u8,\n\n z: u8,\n\n face: Option<Direction>,\n\n ) -> SmallVec<[QuadPositions; 6]> {\n\n self.get_block_model(x, y, z)\n\n .map_or(Default::default(), |model| {\n\n let face = face.map(|direction| match direction {\n\n Direction::XNeg => BlockFace::West,\n\n Direction::XPos => BlockFace::East,\n\n Direction::YNeg => BlockFace::Down,\n\n Direction::YPos => BlockFace::Up,\n\n Direction::ZNeg => BlockFace::North,\n\n Direction::ZPos => BlockFace::South,\n\n });\n\n\n\n model\n\n .quads\n\n .iter()\n\n .filter(|quad| quad.cull_face == face)\n\n .map(|quad| {\n", "file_path": "crates/brine_render/src/chunk/meshing_view/chunk_view.rs", "rank": 57, "score": 79172.26088540623 }, { "content": " mc_assets: &'a MinecraftAssets,\n\n chunk: &'a ChunkSection,\n\n ) -> Self {\n\n Self {\n\n mc_data,\n\n mc_assets,\n\n chunk,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn get_block_state_id(&self, x: u8, y: u8, z: u8) -> BlockStateId {\n\n let block_state = self.chunk.get_block((x, y, z)).unwrap();\n\n BlockStateId(block_state.0 as u16)\n\n }\n\n\n\n #[inline]\n\n pub fn get_block(&self, x: u8, y: u8, z: u8) -> Option<Block<'a>> {\n\n let block_state_id = self.get_block_state_id(x, y, z);\n\n self.mc_data.blocks().get_by_state_id(block_state_id)\n", "file_path": "crates/brine_render/src/chunk/meshing_view/chunk_view.rs", "rank": 58, "score": 79168.89088482667 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn get_block_model(&self, x: u8, y: u8, z: u8) -> Option<&'a BakedModel> {\n\n let block_state_id = self.get_block_state_id(x, y, z);\n\n let baked_block_state = self.mc_assets.block_states().get_by_key(block_state_id)?;\n\n let model_key = baked_block_state.get_first_model()?;\n\n self.mc_assets.models().get_by_key(model_key)\n\n }\n\n\n\n #[inline]\n\n pub fn is_air(&self, x: u8, y: u8, z: u8) -> bool {\n\n self.get_block(x, y, z)\n\n .map_or(false, |block| block.is_air())\n\n }\n\n\n\n #[inline]\n\n fn get_quads_for_block_face(\n\n &self,\n\n x: u8,\n", "file_path": "crates/brine_render/src/chunk/meshing_view/chunk_view.rs", "rank": 59, "score": 79167.89404617216 }, { "content": " fn size_z(&self) -> u8 {\n\n SECTION_WIDTH as u8\n\n }\n\n}\n\n\n\nimpl<'a> MeshingView for ChunkView<'a> {\n\n type Quads = SmallVec<[QuadPositions; 6]>;\n\n\n\n #[inline]\n\n fn is_empty(&self, x: u8, y: u8, z: u8) -> bool {\n\n self.is_air(x, y, z)\n\n }\n\n\n\n #[inline]\n\n fn is_full_cube(&self, x: u8, y: u8, z: u8) -> bool {\n\n self.get_block_model(x, y, z)\n\n .map_or(false, |model| model.is_full_cube)\n\n }\n\n\n\n #[inline]\n", "file_path": "crates/brine_render/src/chunk/meshing_view/chunk_view.rs", "rank": 60, "score": 79165.61448548053 }, { "content": " quad.positions\n\n .map(|[x0, y0, z0]| [x0 + x as f32, y0 + y as f32, z0 + z as f32])\n\n })\n\n .collect()\n\n })\n\n }\n\n}\n\n\n\nimpl<'a> VoxelView for ChunkView<'a> {\n\n #[inline(always)]\n\n fn size_x(&self) -> u8 {\n\n SECTION_WIDTH as u8\n\n }\n\n\n\n #[inline(always)]\n\n fn size_y(&self) -> u8 {\n\n SECTION_HEIGHT as u8\n\n }\n\n\n\n #[inline(always)]\n", "file_path": "crates/brine_render/src/chunk/meshing_view/chunk_view.rs", "rank": 61, "score": 79159.0382157174 }, { "content": "mod chunk_view;\n\n\n\npub use chunk_view::ChunkView;\n", "file_path": "crates/brine_render/src/chunk/meshing_view/mod.rs", "rank": 80, "score": 71310.34132403495 }, { "content": " builder.min,\n\n builder.max,\n\n &builder.faces,\n\n &mut buffer,\n\n );\n\n BlockMeshOutput::GreedyQuads(buffer)\n\n })\n\n }\n\n}\n\n\n\nimpl ChunkBuilder for GreedyQuadsChunkBuilder {\n\n const TYPE: ChunkBuilderType = ChunkBuilderType::GREEDY_QUADS;\n\n\n\n fn build_chunk(&self, chunk: &Chunk) -> Vec<VoxelMesh> {\n\n Self::build_chunk(chunk)\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Eq, PartialEq)]\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 81, "score": 69598.11948091883 }, { "content": " builder.min,\n\n builder.max,\n\n &builder.faces,\n\n &mut buffer,\n\n );\n\n BlockMeshOutput::VisibleFaces(buffer)\n\n })\n\n }\n\n}\n\n\n\nimpl ChunkBuilder for VisibleFacesChunkBuilder {\n\n const TYPE: ChunkBuilderType = ChunkBuilderType::VISIBLE_FACES;\n\n\n\n fn build_chunk(&self, chunk: &Chunk) -> Vec<VoxelMesh> {\n\n Self::build_chunk(chunk)\n\n }\n\n}\n\n\n\n/// A [`ChunkBuilder`] that uses the [`greedy_quads`] algorithm from the\n\n/// [`block_mesh`] crate to build chunks.\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 82, "score": 69593.71027746964 }, { "content": "//! Two implementations of chunk builders using algorithms from the `block-mesh` crate.\n\n\n\nuse bevy::prelude::*;\n\nuse block_mesh::{\n\n ndshape::{ConstShape3u32, Shape},\n\n GreedyQuadsBuffer, MergeVoxel, OrientedBlockFace, UnitQuadBuffer, UnorientedQuad, Voxel,\n\n RIGHT_HANDED_Y_UP_CONFIG,\n\n};\n\n\n\nuse brine_chunk::{Chunk, ChunkSection, SECTION_WIDTH};\n\n\n\nuse crate::{\n\n chunk_builder::ChunkBuilderType,\n\n mesh::{Axis, VoxelFace, VoxelMesh},\n\n};\n\n\n\nuse super::ChunkBuilder;\n\n\n\n/// A [`ChunkBuilder`] that uses the [`visible_block_faces`] algorithm from the\n\n/// [`block_mesh`] crate to build chunks.\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 83, "score": 69592.37724169552 }, { "content": "///\n\n/// [`visible_block_faces`]: block_mesh::visible_block_faces\n\n#[derive(Default)]\n\npub struct VisibleFacesChunkBuilder;\n\n\n\nimpl VisibleFacesChunkBuilder {\n\n pub fn build_chunk(chunk: &Chunk) -> Vec<VoxelMesh> {\n\n chunk\n\n .sections\n\n .iter()\n\n .map(Self::build_chunk_section)\n\n .collect()\n\n }\n\n\n\n pub fn build_chunk_section(chunk_section: &ChunkSection) -> VoxelMesh {\n\n BlockMeshBuilder::new().build_with(chunk_section, |builder| {\n\n let mut buffer = UnitQuadBuffer::new();\n\n block_mesh::visible_block_faces(\n\n &builder.voxels[..],\n\n &builder.shape,\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 84, "score": 69592.23334418621 }, { "content": "///\n\n/// [`greedy_quads`]: block_mesh::greedy_quads\n\n#[derive(Default)]\n\npub struct GreedyQuadsChunkBuilder;\n\n\n\nimpl GreedyQuadsChunkBuilder {\n\n pub fn build_chunk(chunk: &Chunk) -> Vec<VoxelMesh> {\n\n chunk\n\n .sections\n\n .iter()\n\n .map(Self::build_chunk_section)\n\n .collect()\n\n }\n\n\n\n pub fn build_chunk_section(chunk_section: &ChunkSection) -> VoxelMesh {\n\n BlockMeshBuilder::new().build_with(chunk_section, |builder| {\n\n let mut buffer = GreedyQuadsBuffer::new(builder.voxels.len());\n\n block_mesh::greedy_quads(\n\n &builder.voxels[..],\n\n &builder.shape,\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 85, "score": 69587.79270019248 }, { "content": " positions,\n\n tex_coords,\n\n indices,\n\n });\n\n });\n\n\n\n VoxelMesh { faces }\n\n }\n\n\n\n fn get_axis(face: &OrientedBlockFace) -> Axis {\n\n match face.signed_normal().to_array() {\n\n [1, 0, 0] => Axis::XPos,\n\n [-1, 0, 0] => Axis::XNeg,\n\n [0, 1, 0] => Axis::YPos,\n\n [0, -1, 0] => Axis::YNeg,\n\n [0, 0, 1] => Axis::ZPos,\n\n [0, 0, -1] => Axis::ZNeg,\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 86, "score": 69586.71733235971 }, { "content": "\n\n fn build_with<F>(&mut self, chunk_section: &ChunkSection, func: F) -> VoxelMesh\n\n where\n\n F: FnOnce(&BlockMeshBuilder) -> BlockMeshOutput,\n\n {\n\n for (x, y, z, block_state) in chunk_section.block_states.iter() {\n\n let index = self\n\n .shape\n\n .linearize([x as u32 + 1, y as u32 + 1, z as u32 + 1]);\n\n self.voxels[index as usize] = BlockState(block_state);\n\n }\n\n\n\n let output = func(self);\n\n\n\n let voxel_mesh = self.generate_voxel_mesh(output);\n\n\n\n debug!(\"built chunk\");\n\n\n\n voxel_mesh\n\n }\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 87, "score": 69585.45837632839 }, { "content": "\n\n fn generate_voxel_mesh(&self, output: BlockMeshOutput) -> VoxelMesh {\n\n let num_faces = output.num_quads();\n\n let mut faces = Vec::with_capacity(num_faces);\n\n\n\n output.for_each_quad_and_face(&self.faces, |quad, face| {\n\n let [x, y, z] = quad.minimum.map(|elt| elt as u8);\n\n let axis = Self::get_axis(face);\n\n let tex_coords = face.tex_coords(RIGHT_HANDED_Y_UP_CONFIG.u_flip_face, true, &quad);\n\n let indices = face.quad_mesh_indices(0).map(|i| i as u8);\n\n\n\n // Mesh needs to be offset by [-1, -1, -1] to be properly aligned.\n\n let voxel = [x - 1, y - 1, z - 1];\n\n let positions = face\n\n .quad_mesh_positions(&quad, 1.0)\n\n .map(|[x, y, z]| [x - 1.0, y - 1.0, z - 1.0]);\n\n\n\n faces.push(VoxelFace {\n\n voxel,\n\n axis,\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 88, "score": 69584.36000596515 }, { "content": " match self {\n\n Self::VisibleFaces(buffer) => {\n\n for (group, face) in buffer.groups.into_iter().zip(faces.iter()) {\n\n for quad in group.into_iter() {\n\n func(quad.into(), face);\n\n }\n\n }\n\n }\n\n Self::GreedyQuads(buffer) => {\n\n for (group, face) in buffer.quads.groups.into_iter().zip(faces.iter()) {\n\n for quad in group.into_iter() {\n\n func(quad, face)\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crates/brine_voxel_v1/src/chunk_builder/block_mesh.rs", "rank": 89, "score": 69580.43199018536 }, { "content": "#[derive(Parser)]\n\nstruct Args {\n\n /// Run with additional debug utilities (e.g., egui inspector).\n\n #[clap(short, long)]\n\n debug: bool,\n\n\n\n /// Run with a fake server that serves chunks from a directory of chunk files.\n\n #[clap(name = \"chunks\", long, value_name = \"CHUNK_DIR\")]\n\n chunk_dir: Option<PathBuf>,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 90, "score": 69549.69381248213 }, { "content": "#[derive(Debug, Clone)]\n\nstruct LoginInfo {\n\n server: String,\n\n username: String,\n\n exit_on_disconnect: bool,\n\n}\n\n\n\n/// Simple plugin that initiates login to a Minecraft server on app startup.\n\npub struct LoginPlugin {\n\n info: LoginInfo,\n\n}\n\n\n\nimpl LoginPlugin {\n\n pub fn new(server: String, username: String) -> Self {\n\n Self {\n\n info: LoginInfo {\n\n server,\n\n username,\n\n exit_on_disconnect: false,\n\n },\n\n }\n", "file_path": "src/login.rs", "rank": 91, "score": 68400.65377366953 }, { "content": "#[derive(Default)]\n\nstruct TheAtlas {\n\n handle: Handle<TextureAtlas>,\n\n}\n\n\n", "file_path": "crates/brine_render/examples/texture_atlas.rs", "rank": 92, "score": 65293.20545119303 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\nenum AtlasState {\n\n Idle,\n\n LoadingTextures,\n\n Stitched,\n\n}\n\n\n", "file_path": "crates/brine_render/examples/texture_atlas.rs", "rank": 93, "score": 64976.972811535874 }, { "content": "#[derive(Default)]\n\nstruct TheAtlas {\n\n handle: Handle<TextureAtlas>,\n\n}\n\n\n", "file_path": "crates/brine_render/src/texture/mc_textures.rs", "rank": 94, "score": 64360.342918617105 }, { "content": "#[derive(Default)]\n\nstruct Atlas {\n\n handle: Option<Handle<TextureAtlas>>,\n\n}\n\n\n", "file_path": "crates/brine_voxel_v1/examples/texture_builder.rs", "rank": 95, "score": 64360.342918617105 }, { "content": "struct MinecraftDataInner {\n\n pub blocks: Blocks,\n\n pub version: Version,\n\n}\n", "file_path": "crates/brine_data/src/data.rs", "rank": 96, "score": 64355.83718071174 }, { "content": "struct PendingAtlas {\n\n /// Strong handle to each texture that will eventually be added to the atlas.\n\n textures: Vec<Handle<Image>>,\n\n\n\n /// Strong handle that we will eventually populate with a built atlas.\n\n handle: Handle<TextureAtlas>,\n\n}\n\n\n\nimpl PendingAtlas {\n\n fn all_textures_loaded(&self, asset_server: &AssetServer) -> bool {\n\n self.textures.iter().all(|handle| {\n\n let load_state = asset_server.get_load_state(handle);\n\n load_state != LoadState::Loading\n\n })\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct BlockTextures {\n\n /// Strong handle to a placeholder texture.\n", "file_path": "crates/brine_voxel_v1/src/texture.rs", "rank": 97, "score": 64355.83718071174 }, { "content": "fn main() {\n\n let args = Args::parse();\n\n\n\n let mut app = App::new();\n\n\n\n // Default plugins.\n\n\n\n app.insert_resource(LogSettings {\n\n level: Level::DEBUG,\n\n filter: String::from(DEFAULT_LOG_FILTER),\n\n });\n\n app.add_plugins(DefaultPlugins);\n\n\n\n // Brine-specific plugins.\n\n\n\n app.add_plugin(ProtocolPlugin);\n\n\n\n if let Some(chunk_dir) = args.chunk_dir {\n\n app.add_plugin(AlwaysSuccessfulLoginPlugin);\n\n app.add_plugin(ServeChunksFromDirectoryPlugin::new(chunk_dir));\n", "file_path": "src/main.rs", "rank": 98, "score": 63521.12920244793 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\nenum LoginState {\n\n Idle,\n\n\n\n // Phase 1\n\n StatusAwaitingConnect,\n\n StatusAwaitingResponse,\n\n StatusAwaitingDisconnect,\n\n\n\n // Phase 2\n\n LoginAwaitingConnect,\n\n LoginAwaitingSuccess,\n\n\n\n Play,\n\n}\n\n\n", "file_path": "crates/brine_proto_backend/src/backend_stevenarella/login.rs", "rank": 99, "score": 63239.88483719373 } ]
Rust
src/generator/painter/stroke.rs
zeh/art-generator
916ea37631dc9a0030187af06afad0914b0a39ff
use std::collections::HashMap; use image::{Pixel, Rgb, RgbImage}; use crate::generator::painter::Painter; use crate::generator::utils::color::BlendingMode; use crate::generator::utils::geom::find_target_draw_rect; use crate::generator::utils::pixel::{blend, blend_linear}; use crate::generator::utils::random::{ get_noise_value, get_random_entry_weighted, get_random_noise_sequence, get_random_range, get_random_ranges_bias_weighted, get_random_size_ranges_bias_weighted, get_rng, }; use crate::generator::utils::units::{Margins, SizeUnit, WeightedValue}; use crate::generator::utils::{image::get_pixel_interpolated, random::get_random_color}; pub struct StrokePainter { pub options: Options, } pub struct Options { pub blending_mode: Vec<WeightedValue<BlendingMode>>, pub alpha: Vec<WeightedValue<(f64, f64)>>, pub alpha_bias: f64, pub width: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub width_bias: f64, pub height_bias: f64, pub wave_height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub wave_height_bias: f64, pub wave_length: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub wave_length_bias: f64, pub anti_alias: bool, pub color_seed: f64, pub rng_seed: u32, pub margins: Margins<SizeUnit>, } impl StrokePainter { pub fn new() -> Self { let options = Options { blending_mode: vec![WeightedValue { value: BlendingMode::default(), weight: 1.0, }], alpha: vec![WeightedValue { value: (1.0, 1.0), weight: 1.0, }], alpha_bias: 0.0, width: vec![WeightedValue { value: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)), weight: 1.0, }], width_bias: 0.0, height: vec![WeightedValue { value: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)), weight: 1.0, }], height_bias: 0.0, wave_height: vec![WeightedValue { value: (SizeUnit::Fraction(0.01), SizeUnit::Fraction(0.01)), weight: 1.0, }], wave_height_bias: 0.0, wave_length: vec![WeightedValue { value: (SizeUnit::Fraction(0.5), SizeUnit::Fraction(0.5)), weight: 1.0, }], wave_length_bias: 0.0, anti_alias: true, color_seed: 0.0, rng_seed: 0, margins: Margins::<SizeUnit> { top: SizeUnit::Pixels(0), right: SizeUnit::Pixels(0), bottom: SizeUnit::Pixels(0), left: SizeUnit::Pixels(0), }, }; StrokePainter { options, } } } impl Painter for StrokePainter { fn paint(&self, canvas: &RgbImage, iteration: u32, seed_map: &RgbImage) -> Result<RgbImage, &str> { let mut rng = get_rng(self.options.rng_seed, iteration); let image_area = canvas.dimensions(); let target_area = match find_target_draw_rect(image_area, &self.options.margins) { Ok(rect) => rect, Err(err) => return Err(err), }; let target_visible_area = (image_area.0.min(target_area.width as u32), image_area.1.min(target_area.height as u32)); let rect_w = get_random_size_ranges_bias_weighted( &mut rng, &self.options.width, self.options.width_bias, target_visible_area.0, ); let rect_h = get_random_size_ranges_bias_weighted( &mut rng, &self.options.height, self.options.height_bias, target_visible_area.1, ); let rect_x = get_random_range( &mut rng, target_area.x as f64, (target_area.x + target_area.width) as f64 - rect_w, ); let rect_y = get_random_range( &mut rng, target_area.y as f64, (target_area.y + target_area.height) as f64 - rect_h, ); let x1 = rect_x.round().max(0.0).min(image_area.0 as f64) as u32; let x2 = (rect_x + rect_w).round().max(0.0).min(image_area.0 as f64) as u32; let y1 = rect_y.round().max(0.0).min(image_area.1 as f64) as u32; let y2 = (rect_y + rect_h).round().max(0.0).min(image_area.1 as f64) as u32; let random_color = get_random_color(&mut rng); let seed_color = get_pixel_interpolated(seed_map, (x1 + x2) as f64 / 2.0, (y1 + y2) as f64 / 2.0); let color = blend_linear(&random_color, &seed_color, self.options.color_seed); let alpha = get_random_ranges_bias_weighted(&mut rng, &self.options.alpha, self.options.alpha_bias); let wave_height = get_random_size_ranges_bias_weighted( &mut rng, &self.options.wave_height, self.options.wave_height_bias, target_visible_area.0 as u32, ); let wave_length = get_random_size_ranges_bias_weighted( &mut rng, &self.options.wave_length, self.options.wave_length_bias, target_visible_area.1 as u32, ); let blending_mode = get_random_entry_weighted(&mut rng, &self.options.blending_mode); let mut painted_canvas = canvas.clone(); if wave_height == 0.0 || wave_length == 0.0 { for x in x1..x2 { for y in y1..y2 { let new_pixel = Rgb(blend(painted_canvas.get_pixel(x, y).channels(), &color, alpha, &blending_mode)); painted_canvas.put_pixel(x, y, new_pixel); } } } else { let margins: f64 = wave_height / 2.0; let margin_ceil: u32 = margins.ceil() as u32; let noise = get_random_noise_sequence(&mut rng, -margins, margins); let noise_freq = wave_length; let x1_safe = (x1 as i64 - margin_ceil as i64).max(0) as u32; let x2_safe = (x2 + margin_ceil).min(image_area.0 as u32); let y1_safe = (y1 as i64 - margin_ceil as i64).max(0) as u32; let y2_safe = (y2 + margin_ceil).min(image_area.1 as u32); for x in x1_safe..x2_safe { for y in y1_safe..y2_safe { let alpha_x = if x >= x1 + margin_ceil && x < x2 - margin_ceil { 1.0 } else { let noise_x = get_noise_value(noise, y as f64 / noise_freq); let offset_x1 = x as f64 - (x1 as f64 + noise_x); let alpha_x1 = if offset_x1 > 0.5 { 1.0 } else if offset_x1 < -0.5 { 0.0 } else { offset_x1 + 0.5 }; let offset_x2 = (x2 as f64 + noise_x) - x as f64; let alpha_x2 = if offset_x2 > 0.5 { 1.0 } else if offset_x2 < -0.5 { 0.0 } else { offset_x2 + 0.5 }; alpha_x1 * alpha_x2 }; let alpha_y = if y >= y1 + margin_ceil && y < y2 - margin_ceil { 1.0 } else { let noise_y = get_noise_value(noise, x as f64 / noise_freq); let offset_y1 = y as f64 - (y1 as f64 + noise_y); let alpha_y1 = if offset_y1 > 0.5 { 1.0 } else if offset_y1 < -0.5 { 0.0 } else { offset_y1 + 0.5 }; let offset_y2 = (y2 as f64 + noise_y) - y as f64; let alpha_y2 = if offset_y2 > 0.5 { 1.0 } else if offset_y2 < -0.5 { 0.0 } else { offset_y2 + 0.5 }; alpha_y1 * alpha_y2 }; let new_pixel = Rgb(blend( painted_canvas.get_pixel(x, y).channels(), &color, if self.options.anti_alias { alpha_x * alpha_y * alpha } else { if alpha_x * alpha_y >= 0.5 { 1.0 } else { 0.0 } }, &blending_mode, )); painted_canvas.put_pixel(x, y, new_pixel); } } } Ok(painted_canvas) } fn get_metadata(&self) -> HashMap<String, String> { let mut data = HashMap::new(); data.insert(String::from("RNG seed"), format!("{}", &self.options.rng_seed)); data } }
use std::collections::HashMap; use image::{Pixel, Rgb, RgbImage}; use crate::generator::painter::Painter; use crate::generator::utils::color::BlendingMode; use crate::generator::utils::geom::find_target_draw_rect; use crate::generator::utils::pixel::{blend, blend_linear}; use crate::generator::utils::random::{ get_noise_value, get_random_entry_weighted, get_random_noise_sequence, get_random_range, get_random_ranges_bias_weighted, get_random_size_ranges_bias_weighted, get_rng, }; use crate::generator::utils::units::{Margins, SizeUnit, WeightedValue}; use crate::generator::utils::{image::get_pixel_interpolated, random::get_random_color}; pub struct StrokePainter { pub options: Options, } pub struct Options { pub blending_mode: Vec<WeightedValue<BlendingMode>>, pub alpha: Vec<WeightedValue<(f64, f64)>>, pub alpha_bias: f64, pub width: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub width_bias: f64, pub height_bias: f64, pub wave_height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub wave_height_bias: f64, pub wave_length: Vec<WeightedValue<(SizeUnit, SizeUnit)>>, pub wave_length_bias: f64, pub anti_alias: bool, pub color_seed: f64, pub rng_seed: u32, pub margins: Margins<SizeUnit>, } impl StrokePainter { pub fn new() -> Self { let options = Options { blending_mode: vec![WeightedValue { value: BlendingMode::default(), weight: 1.0, }], alpha: vec![WeightedValue { value: (1.0, 1.0), weight: 1.0, }], alpha_bias: 0.0, width: vec![WeightedValue { value: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)), weight: 1.0, }], width_bias: 0.0, height: vec![WeightedValue { value: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)), weight: 1.0, }], height_bias: 0.0, wave_height: vec![WeightedValue { value: (SizeUnit::Fraction(0.01), SizeUnit::Fraction(0.01)), weight: 1.0, }], wave_height_bias: 0.0, wave_length: vec![WeightedValue { value: (SizeUnit::Fraction(0.5), SizeUnit::Fraction(0.5)), weight: 1.0, }], wave_length_bias: 0.0, anti_alias: true, color_seed: 0.0, rng_seed: 0, margins: Margins::<SizeUnit> { top: SizeUnit::Pixels(0), right: SizeUnit::Pixels(0), bottom: SizeUnit::Pixels(0), left: SizeUnit::Pixels(0), }, }; StrokePainter { options, } } } impl Painter for StrokePainter { fn paint(&self, canvas: &RgbImage, iteration: u32, seed_map: &RgbImage) -> Result<RgbImage, &str> { let mut rng = get_rng(self.options.rng_seed, iteration); let image_area = canvas.dimensions(); let target_area = match find_target_draw_rect(image_area, &self.options.margins) { Ok(rect) => rect, Err(err) => return Err(err), }; let target_visible_area = (image_area.0.min(target_area.width as u32), image_area.1.min(target_area.height as u32)); let rect_w = get_random_size_ranges_bias_weighted( &mut rng, &self.options.width, self.options.width_bias, target_visible_area.0, ); let rect_h = get_random_size_ranges_bias_weighted( &mut rng, &self.options.height, self.options.height_bias, target_visible_area.1, ); let rect_x = get_random_range( &mut rng, target_area.x as f64, (target_area.x + target_area.width) as f64 - rect_w, ); let rect_y = get_random_range( &mut rng, target_area.y as f64, (target_area.y + target_area.height) as f64 - rect_h, ); let x1 = rect_x.round().max(0.0).min(image_area.0 as f64) as u32; let x2 = (rect_x + rect_w).round().max(0.0).min(image_area.0 as f64) as u32; let y1 = rect_y.round().max(0.0).min(image_area.1 as f64) as u32; let y2 = (rect_y + rect_h).round().max(0.0).min(image_area.1 as f64) as u32; let random_color = get_random_color(&mut rng); let seed_color = get_pixel_interpolated(seed_map, (x1 + x2) as f64 / 2.0, (y1 + y2) as f64 / 2.0); let color = blend_linear(&random_color, &seed_color, self.options.color_seed); let alpha = get_random_ranges_bias_weighted(&mut rng, &self.options.alpha, self.options.alpha_bias); let wave_height = get_random_size_ranges_bias_weighted( &mut rng, &self.options.wave_height, self.options.wave_height_bias, target_visible_area.0 as u32, ); let wave_length = get_random_size_ranges_bias_weighted( &mut rng, &self.options.wave_length, self.options.wave_length_bias, target_visible_area.1 as u32, ); let blending_mode = get_random_entry_weighted(&mut rng, &self.options.blending_mode); let mut painted_canvas = canvas.clone(); if wave_height == 0.0 || wave_length == 0.0 { for x in x1..x2 { for y in y1..y2 { let new_pixel = Rgb(blend(painted_canvas.get_pixel(x, y).channels(), &color, alpha, &blending_mode)); painted_canvas.put_pixel(x, y, new_pixel); } } } else { let margins: f64 = wave_height / 2.0; let margin_ceil: u32 = margins.ceil() as u32; let noise = get_random_noise_sequence(&mut rng, -margins, margins); let noise_freq = wave_length; let x1_safe = (x1 as i64 - margin_ceil as i64).max(0) as u32; let x2_safe = (x2 + margin_ceil).min(image_area.0 as u32); let y1_safe = (y1 as i64 - margin_ceil as i64).max(0) as u32; let y2_safe = (y2 + margin_ceil).min(image_area.1 as u32); for x in x1_safe..x2_safe { for y in y1_safe..y2_safe { let alpha_x = if x >= x1 + margin_ceil && x < x2 - margin_ceil { 1.0 } else { let noise_x = get_noise_value(noise, y as f64 / noise_freq); let offset_x1 = x as f64 - (x1 as f64 + noise_x); let alpha_x1 =
; let offset_x2 = (x2 as f64 + noise_x) - x as f64; let alpha_x2 = if offset_x2 > 0.5 { 1.0 } else if offset_x2 < -0.5 { 0.0 } else { offset_x2 + 0.5 }; alpha_x1 * alpha_x2 }; let alpha_y = if y >= y1 + margin_ceil && y < y2 - margin_ceil { 1.0 } else { let noise_y = get_noise_value(noise, x as f64 / noise_freq); let offset_y1 = y as f64 - (y1 as f64 + noise_y); let alpha_y1 = if offset_y1 > 0.5 { 1.0 } else if offset_y1 < -0.5 { 0.0 } else { offset_y1 + 0.5 }; let offset_y2 = (y2 as f64 + noise_y) - y as f64; let alpha_y2 = if offset_y2 > 0.5 { 1.0 } else if offset_y2 < -0.5 { 0.0 } else { offset_y2 + 0.5 }; alpha_y1 * alpha_y2 }; let new_pixel = Rgb(blend( painted_canvas.get_pixel(x, y).channels(), &color, if self.options.anti_alias { alpha_x * alpha_y * alpha } else { if alpha_x * alpha_y >= 0.5 { 1.0 } else { 0.0 } }, &blending_mode, )); painted_canvas.put_pixel(x, y, new_pixel); } } } Ok(painted_canvas) } fn get_metadata(&self) -> HashMap<String, String> { let mut data = HashMap::new(); data.insert(String::from("RNG seed"), format!("{}", &self.options.rng_seed)); data } }
if offset_x1 > 0.5 { 1.0 } else if offset_x1 < -0.5 { 0.0 } else { offset_x1 + 0.5 }
if_condition
[ { "content": "#[inline(always)]\n\npub fn distance(x1: f64, y1: f64, x2: f64, y2: f64) -> f64 {\n\n\tlet x = x1 - x2;\n\n\tlet y = y1 - y2;\n\n\t(x * x + y * y).sqrt()\n\n}\n\n\n", "file_path": "src/generator/utils/geom.rs", "rank": 0, "score": 301141.6146164706 }, { "content": "/// Parses a float pair with a weight (e.f. \"1-2@1\", \"10.2\", \"5.2-10@2\") into a WeightedValue<>\n\npub fn parse_weighted_float_pair(src: &str) -> Result<WeightedValue<(f64, f64)>, &str> {\n\n\tmatch parse_weight(src) {\n\n\t\tOk((src_value, weight)) => match parse_float_pair(src_value) {\n\n\t\t\tOk(value) => Ok(WeightedValue {\n\n\t\t\t\tvalue,\n\n\t\t\t\tweight,\n\n\t\t\t}),\n\n\t\t\tErr(err) => Err(err),\n\n\t\t},\n\n\t\tErr(err) => Err(err),\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 1, "score": 234002.1259021284 }, { "content": "pub fn get_rng(seed: u32, iteration: u32) -> Rng {\n\n\t// Seeds close to each other produce very similar results, so we multiply them a bit\n\n\tRng::from_seed(seed.wrapping_add(Rng::from_seed(iteration).next()))\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 2, "score": 231607.4254587189 }, { "content": "#[inline(always)]\n\npub fn get_random_range(rng: &mut Rng, min: f64, pseudo_max: f64) -> f64 {\n\n\trng.next_f64_range(min, pseudo_max)\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 3, "score": 219467.49558220644 }, { "content": "pub fn get_random_noise_sequence(rng: &mut Rng, min: f64, max: f64) -> [f64; 256] {\n\n\tlet mut sequence = [0f64; 256];\n\n\tfor i in 0..256 {\n\n\t\tsequence[i] = get_random_range(rng, min, max);\n\n\t}\n\n\treturn sequence;\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 4, "score": 216948.95952721837 }, { "content": "#[inline(always)]\n\npub fn get_random_range_bias(rng: &mut Rng, min: f64, max: f64, bias: f64) -> f64 {\n\n\tif min == max {\n\n\t\treturn min;\n\n\t};\n\n\tlet mut r = rng.next_f64();\n\n\tif bias < 0.0f64 {\n\n\t\tr = r.powf(-bias + 1.0f64);\n\n\t} else if bias > 0.0f64 {\n\n\t\tr = 1.0f64 - (1.0f64 - r).powf(bias + 1.0f64);\n\n\t}\n\n\tmin + r * (max - min)\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 5, "score": 215259.40710548378 }, { "content": "#[inline(always)]\n\npub fn blend_linear(bottom: &[u8], top: &[u8], opacity: f64) -> [u8; 3] {\n\n\tif opacity == 1.0 {\n\n\t\t[top[0], top[1], top[2]]\n\n\t} else {\n\n\t\tblend(bottom, top, opacity, &BlendingMode::Normal)\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/pixel.rs", "rank": 6, "score": 207550.44019734004 }, { "content": "/// Parses \"*@n\" into a string \"*\" with n weight. This is used so we can have pairs with weights.\n\npub fn parse_weight(src: &str) -> Result<(&str, f64), &str> {\n\n\tlet values = src.split('@').collect::<Vec<&str>>();\n\n\tmatch values.len() {\n\n\t\t1 => Ok((src, 1.0)),\n\n\t\t2 => match parse_float(values[1]) {\n\n\t\t\tOk(val) => Ok((values[0].clone(), val)),\n\n\t\t\tErr(err) => Err(err),\n\n\t\t},\n\n\t\t_ => Err(\"Value cannot contain more than one weight value\"),\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 7, "score": 205885.53504892043 }, { "content": "pub fn get_random_entry_weighted<'a, T>(rng: &mut Rng, entries: &'a Vec<WeightedValue<T>>) -> &'a T {\n\n\tlet total_weight = entries.iter().map(|r| r.weight).sum();\n\n\tlet desired_position = get_random_range(rng, 0.0, total_weight);\n\n\tlet mut acc = 0.0f64;\n\n\t&entries\n\n\t\t.iter()\n\n\t\t.find(|&r| {\n\n\t\t\tacc += r.weight;\n\n\t\t\tacc >= desired_position\n\n\t\t})\n\n\t\t.expect(\"finding weighted random value\")\n\n\t\t.value\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 8, "score": 203597.35449815862 }, { "content": "pub fn get_random_color(rng: &mut Rng) -> [u8; 3] {\n\n\t[rng.next_u32_range(0, 256) as u8, rng.next_u32_range(0, 256) as u8, rng.next_u32_range(0, 256) as u8]\n\n}\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 9, "score": 203228.12711579926 }, { "content": "pub fn parse_color_matrix(src: &str) -> Result<[f64; 12], &str> {\n\n\tlet values = parse_float_list(&src, ',')?;\n\n\tmatch values.len() {\n\n\t\t12 => values.try_into().or(Err(\"Could not convert float list\")) as Result<[f64; 12], &str>,\n\n\t\t_ => Err(\"Matrix length must be 12\"),\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 10, "score": 197408.28109223073 }, { "content": "/// Parses a size pair with a weight (e.f. \"1-2@1\", \"10%\", \"5-10%@2\") into a WeightedValue<>\n\npub fn parse_weighted_size_pair(src: &str) -> Result<WeightedValue<(SizeUnit, SizeUnit)>, &str> {\n\n\tmatch parse_weight(src) {\n\n\t\tOk((src_value, weight)) => match parse_size_pair(src_value) {\n\n\t\t\tOk(value) => Ok(WeightedValue {\n\n\t\t\t\tvalue,\n\n\t\t\t\tweight,\n\n\t\t\t}),\n\n\t\t\tErr(err) => Err(err),\n\n\t\t},\n\n\t\tErr(err) => Err(err),\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 11, "score": 197191.2288997067 }, { "content": "/// Parses \"1.0\", \"0.9-1.0\" into (1.0, 1.0), (0.9, 1.0)\n\npub fn parse_float_pair(src: &str) -> Result<(f64, f64), &str> {\n\n\tlet values = parse_float_list(&src, '-')?;\n\n\tmatch values.len() {\n\n\t\t1 => Ok((values[0], values[0])),\n\n\t\t2 => Ok((values[0], values[1])),\n\n\t\t_ => Err(\"Float range must be 1-2\"),\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 12, "score": 183134.90416681574 }, { "content": "/// Parses a blending mode with a weight (e.f. \"normal\", \"screen@2\") into a WeightedValue<>\n\npub fn parse_weighted_blending_mode(src: &str) -> Result<WeightedValue<BlendingMode>, &str> {\n\n\tmatch parse_weight(src) {\n\n\t\tOk((src_value, weight)) => match BlendingMode::from_str(src_value) {\n\n\t\t\tOk(value) => Ok(WeightedValue {\n\n\t\t\t\tvalue,\n\n\t\t\t\tweight,\n\n\t\t\t}),\n\n\t\t\tErr(_) => Err(\"Cannot parse value variant for blending mode\"),\n\n\t\t},\n\n\t\tErr(err) => Err(err),\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn test_parse_color() {\n\n\t\tassert_eq!(parse_color(\"white\"), Ok((255, 255, 255)));\n", "file_path": "src/generator/utils/parsing.rs", "rank": 13, "score": 181109.35510737984 }, { "content": "#[inline(always)]\n\npub fn blend(bottom: &[u8], top: &[u8], opacity: f64, blending_mode: &BlendingMode) -> [u8; 3] {\n\n\tif opacity == 0.0 {\n\n\t\t[bottom[0], bottom[1], bottom[2]]\n\n\t} else {\n\n\t\t[\n\n\t\t\tchannel_f64_to_u8(blending_mode.blend_with_opacity(\n\n\t\t\t\tchannel_u8_to_f64(bottom[0]),\n\n\t\t\t\tchannel_u8_to_f64(top[0]),\n\n\t\t\t\topacity,\n\n\t\t\t)),\n\n\t\t\tchannel_f64_to_u8(blending_mode.blend_with_opacity(\n\n\t\t\t\tchannel_u8_to_f64(bottom[1]),\n\n\t\t\t\tchannel_u8_to_f64(top[1]),\n\n\t\t\t\topacity,\n\n\t\t\t)),\n\n\t\t\tchannel_f64_to_u8(blending_mode.blend_with_opacity(\n\n\t\t\t\tchannel_u8_to_f64(bottom[2]),\n\n\t\t\t\tchannel_u8_to_f64(top[2]),\n\n\t\t\t\topacity,\n\n\t\t\t)),\n\n\t\t]\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/pixel.rs", "rank": 14, "score": 178421.75306183132 }, { "content": "pub fn parse_size_margins(src: &str) -> Result<Margins<SizeUnit>, &str> {\n\n\tlet values = parse_size_list(src, ',')?;\n\n\tmatch values.len() {\n\n\t\t1 => Ok(Margins::<SizeUnit> {\n\n\t\t\ttop: values[0].clone(),\n\n\t\t\tright: values[0].clone(),\n\n\t\t\tbottom: values[0].clone(),\n\n\t\t\tleft: values[0].clone(),\n\n\t\t}),\n\n\t\t2 => Ok(Margins::<SizeUnit> {\n\n\t\t\ttop: values[0].clone(),\n\n\t\t\tright: values[1].clone(),\n\n\t\t\tbottom: values[0].clone(),\n\n\t\t\tleft: values[1].clone(),\n\n\t\t}),\n\n\t\t3 => Ok(Margins::<SizeUnit> {\n\n\t\t\ttop: values[0].clone(),\n\n\t\t\tright: values[1].clone(),\n\n\t\t\tbottom: values[2].clone(),\n\n\t\t\tleft: values[1].clone(),\n", "file_path": "src/generator/utils/parsing.rs", "rank": 15, "score": 177266.18775727513 }, { "content": "pub fn parse_float(src: &str) -> Result<f64, &str> {\n\n\tsrc.parse::<f64>().or(Err(\"Could not parse float value\"))\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 16, "score": 175324.00966009608 }, { "content": "/// Parses \"10%\", \"20.3\" into 0.1, 20.3\n\npub fn parse_scale(src: &str) -> Result<f64, &str> {\n\n\tif src.ends_with(\"%\") {\n\n\t\tmatch src[..src.len() - 1].parse::<f64>() {\n\n\t\t\tOk(value) => Ok(value / 100.0),\n\n\t\t\t_ => Err(\"Could not parse scale percent value\"),\n\n\t\t}\n\n\t} else {\n\n\t\tmatch src.parse::<f64>() {\n\n\t\t\tOk(value) => Ok(value),\n\n\t\t\t_ => Err(\"Could not parse scale float value\"),\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 17, "score": 175324.00966009608 }, { "content": "pub fn color_transform(image: &RgbImage, matrix: [f64; 12]) -> RgbImage {\n\n\tlet mut transformed_image = image.clone();\n\n\tfor (_x, _y, pixel) in transformed_image.enumerate_pixels_mut() {\n\n\t\t*pixel = image::Rgb(pixel::color_matrix(pixel.channels(), matrix));\n\n\t}\n\n\ttransformed_image\n\n}\n\n\n", "file_path": "src/generator/utils/image.rs", "rank": 18, "score": 172467.34931981598 }, { "content": "#[inline(always)]\n\nfn color_matrix_channel(rgb: [f64; 3], rgb_mul: [f64; 3], offset: f64) -> u8 {\n\n\tlet result = rgb[0] * rgb_mul[0] + rgb[1] * rgb_mul[1] + rgb[2] * rgb_mul[2] + offset;\n\n\tresult.round().max(0.0).min(255.0) as u8\n\n}\n\n\n", "file_path": "src/generator/utils/pixel.rs", "rank": 19, "score": 170785.84944788326 }, { "content": "pub fn get_pixel_interpolated(image: &RgbImage, x: f64, y: f64) -> [u8; 3] {\n\n\t// Quick path if in a round pixel\n\n\tlet width: f64 = image.width() as f64;\n\n\tlet height: f64 = image.height() as f64;\n\n\tlet xx = f64::max(0.0f64, f64::min(width - 1.0, x));\n\n\tlet yy = f64::max(0.0f64, f64::min(height - 1.0, y));\n\n\tlet xf = xx.fract();\n\n\tlet yf = yy.fract();\n\n\tif xf == 0f64 && yf == 0f64 {\n\n\t\treturn image\n\n\t\t\t.get_pixel(xx as u32, yy as u32)\n\n\t\t\t.channels()\n\n\t\t\t.to_owned()\n\n\t\t\t.try_into()\n\n\t\t\t.expect(\"converting pixels to array\");\n\n\t}\n\n\n\n\t// Otherwise, do bilinear interpolation\n\n\tlet x1 = xx.floor();\n\n\tlet x2 = xx.ceil();\n", "file_path": "src/generator/utils/image.rs", "rank": 20, "score": 164713.35453724576 }, { "content": "#[inline(always)]\n\npub fn get_noise_value(noise: [f64; 256], position: f64) -> f64 {\n\n\tlet pp = if position < 0.0 {\n\n\t\t1.0 - position.abs()\n\n\t} else {\n\n\t\tposition\n\n\t};\n\n\tlet pp = pp.fract() * 256.0f64;\n\n\tlet p1 = pp.floor() as usize;\n\n\tlet p2 = (p1 + 1) % 256;\n\n\n\n\tlet v1 = noise[p1];\n\n\tlet v2 = noise[p2];\n\n\n\n\t// Phase\n\n\tlet f = pp.fract();\n\n\n\n\t// Remap phase for smoothstep\n\n\tlet f = (1.0 - (f * PI).cos()) * 0.5;\n\n\n\n\tv1 + (v2 - v1) * f\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 21, "score": 159854.09962086394 }, { "content": "pub fn parse_float_list(src: &str, divider: char) -> Result<Vec<f64>, &str> {\n\n\tsrc.split(divider).collect::<Vec<&str>>().iter().map(|&e| parse_float(e)).collect()\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 22, "score": 159121.56821130263 }, { "content": "pub fn diff(a: &RgbImage, b: &RgbImage) -> f64 {\n\n\tlet w = a.dimensions().0;\n\n\tlet h = a.dimensions().1;\n\n\tlet num_pixels = w * h;\n\n\n\n\tlet mut diff_sum_r: i32 = 0;\n\n\tlet mut diff_sum_g: i32 = 0;\n\n\tlet mut diff_sum_b: i32 = 0;\n\n\n\n\tlet samples_a = a.as_flat_samples().samples;\n\n\tlet samples_b = b.as_flat_samples().samples;\n\n\n\n\tlet skip_step = 1;\n\n\n\n\tfor (p_a, p_b) in samples_a.chunks_exact(3).zip(samples_b.chunks_exact(3)).step_by(skip_step) {\n\n\t\tdiff_sum_r += (p_a[0] as i32 - p_b[0] as i32).abs();\n\n\t\tdiff_sum_g += (p_a[1] as i32 - p_b[1] as i32).abs();\n\n\t\tdiff_sum_b += (p_a[2] as i32 - p_b[2] as i32).abs();\n\n\t}\n\n\n\n\tlet lr = LUMA_R / 255.0;\n\n\tlet lg = LUMA_G / 255.0;\n\n\tlet lb = LUMA_B / 255.0;\n\n\tlet diff_sum = diff_sum_r as f64 * lr + diff_sum_g as f64 * lg + diff_sum_b as f64 * lb;\n\n\n\n\tdiff_sum / (num_pixels as f64 / skip_step as f64)\n\n}\n\n\n", "file_path": "src/generator/utils/image.rs", "rank": 23, "score": 155378.5213882611 }, { "content": "pub fn parse_color(src: &str) -> Result<(u8, u8, u8), &str> {\n\n\tmatch Color::new_string(src) {\n\n\t\tSome(color) => {\n\n\t\t\tlet rgb = color.get_rgba();\n\n\t\t\tlet r = (rgb.0 * 255.0).round() as u8;\n\n\t\t\tlet g = (rgb.1 * 255.0).round() as u8;\n\n\t\t\tlet b = (rgb.2 * 255.0).round() as u8;\n\n\t\t\tOk((r, g, b))\n\n\t\t}\n\n\t\tNone => Err(\"Cannot parse color string\"),\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 24, "score": 151362.797306484 }, { "content": "pub fn parse_size(src: &str) -> Result<SizeUnit, &str> {\n\n\tif src.ends_with(\"%\") {\n\n\t\tmatch src[..src.len() - 1].parse::<f64>() {\n\n\t\t\tOk(value) => Ok(SizeUnit::Fraction(value / 100.0f64)),\n\n\t\t\t_ => Err(\"Could not parse fraction value\"),\n\n\t\t}\n\n\t} else {\n\n\t\tmatch src.parse::<f64>() {\n\n\t\t\tOk(value) => Ok(SizeUnit::Pixels(value.round() as i64)),\n\n\t\t\t_ => Err(\"Could not parse pixel value\"),\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 25, "score": 147516.5968175239 }, { "content": "// Parses \"100%\", \"90%-100%\", \"10-20\", \"2\" into pairs of SizeUnits\n\npub fn parse_size_pair(src: &str) -> Result<(SizeUnit, SizeUnit), &str> {\n\n\tlet values = parse_size_list(&src, '-')?;\n\n\tmatch values.len() {\n\n\t\t1 => Ok((values[0].clone(), values[0].clone())),\n\n\t\t2 => Ok((values[0].clone(), values[1].clone())),\n\n\t\t_ => Err(\"Size range length must be 2\"),\n\n\t}\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 26, "score": 146468.85210413768 }, { "content": "#[inline(always)]\n\nfn channel_u8_to_f64(color: u8) -> f64 {\n\n\tcolor as f64 / 255.0\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn test_blend() {\n\n\t\tassert_eq!(blend(&[0, 10, 250], &[255, 128, 0], 0.0, &BlendingMode::Normal), [0, 10, 250]);\n\n\t\tassert_eq!(blend(&[0, 10, 250], &[255, 128, 0], 0.5, &BlendingMode::Normal), [128, 69, 125]);\n\n\t\tassert_eq!(blend(&[0, 10, 250], &[255, 128, 0], 1.0, &BlendingMode::Normal), [255, 128, 0]);\n\n\n\n\t\t// Actual individual blending mode tests are part of the \"color\" module,\n\n\t\t// this is just to verify that the parameters are respected\n\n\t\tassert_eq!(blend(&[0, 10, 250], &[255, 128, 0], 0.0, &BlendingMode::Multiply), [0, 10, 250]);\n\n\t\tassert_eq!(blend(&[0, 10, 250], &[255, 128, 0], 0.5, &BlendingMode::Multiply), [0, 8, 125]);\n\n\t\tassert_eq!(blend(&[0, 10, 250], &[255, 128, 0], 1.0, &BlendingMode::Multiply), [0, 5, 0]);\n\n\t}\n", "file_path": "src/generator/utils/pixel.rs", "rank": 27, "score": 143147.34564199406 }, { "content": "#[inline(always)]\n\nfn channel_f64_to_u8(color: f64) -> u8 {\n\n\t(color * 255.0).round() as u8\n\n}\n\n\n", "file_path": "src/generator/utils/pixel.rs", "rank": 28, "score": 143147.34564199406 }, { "content": "#[inline(always)]\n\npub fn color_matrix(pixel: &[u8], matrix: [f64; 12]) -> [u8; 3] {\n\n\tlet rgb = [pixel[0] as f64, pixel[1] as f64, pixel[2] as f64];\n\n\t[\n\n\t\tcolor_matrix_channel(rgb, [matrix[0], matrix[1], matrix[2]], matrix[3]),\n\n\t\tcolor_matrix_channel(rgb, [matrix[4], matrix[5], matrix[6]], matrix[7]),\n\n\t\tcolor_matrix_channel(rgb, [matrix[8], matrix[9], matrix[10]], matrix[11]),\n\n\t]\n\n}\n\n\n", "file_path": "src/generator/utils/pixel.rs", "rank": 29, "score": 139945.4052322744 }, { "content": "pub fn parse_size_list(src: &str, divider: char) -> Result<Vec<SizeUnit>, &str> {\n\n\tsrc.split(divider).collect::<Vec<&str>>().iter().map(|&e| parse_size(e)).collect()\n\n}\n\n\n", "file_path": "src/generator/utils/parsing.rs", "rank": 30, "score": 133827.3912231612 }, { "content": "pub fn get_random_seed() -> u32 {\n\n\tRng::new().next()\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 31, "score": 131264.7874568949 }, { "content": "pub fn format_time(ms: f64) -> String {\n\n\tlet seconds = ms / 1000.0;\n\n\tlet minutes = seconds / 60.0;\n\n\tlet hours = minutes / 60.0;\n\n\n\n\tlet r_seconds = (seconds % 60.0).floor();\n\n\tlet r_minutes = (minutes % 60.0).floor();\n\n\tlet r_hours = hours.floor();\n\n\n\n\tif hours >= 1.0 {\n\n\t\tformat!(\"{:.0}h {:02.0}m {:02.0}s\", r_hours, r_minutes, r_seconds).to_owned()\n\n\t} else if minutes >= 1.0 {\n\n\t\tformat!(\"{:.0}m {:02.0}s\", r_minutes, r_seconds).to_owned()\n\n\t} else {\n\n\t\tformat!(\"{:.0}s\", r_seconds).to_owned()\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/generator/utils/formatting.rs", "rank": 32, "score": 123319.84280669867 }, { "content": "pub fn find_target_draw_rect(\n\n\tdimensions: (u32, u32),\n\n\tmargins: &Margins<SizeUnit>,\n\n) -> Result<Rectangle<i64>, &'static str> {\n\n\tlet pixel_margins = margins.to_pixels(dimensions.0, dimensions.1);\n\n\tlet mut rect = Rectangle::<i64> {\n\n\t\tx: 0,\n\n\t\ty: 0,\n\n\t\twidth: dimensions.0 as i64,\n\n\t\theight: dimensions.1 as i64,\n\n\t};\n\n\trect.apply_margins(pixel_margins);\n\n\tif rect.width <= 0 || rect.height <= 0 {\n\n\t\tErr(\"Cannot have a resulting rectangle of negative or empty area\")\n\n\t} else {\n\n\t\tOk(rect)\n\n\t}\n\n}\n", "file_path": "src/generator/utils/geom.rs", "rank": 33, "score": 112387.35681383403 }, { "content": "pub trait Painter {\n\n\tfn paint(&self, canvas: &RgbImage, iteration: u32, seed_map: &RgbImage) -> Result<RgbImage, &str>;\n\n\tfn get_metadata(&self) -> HashMap<String, String>;\n\n}\n", "file_path": "src/generator/painter/mod.rs", "rank": 34, "score": 111694.09650750639 }, { "content": "pub fn get_random_ranges_bias_weighted(\n\n\trng: &mut Rng,\n\n\tranges: &Vec<WeightedValue<(f64, f64)>>,\n\n\tbias: f64,\n\n) -> f64 {\n\n\tlet range = get_random_entry_weighted(rng, ranges);\n\n\tget_random_range_bias(rng, range.0, range.1, bias)\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 35, "score": 107927.87993988632 }, { "content": "pub fn get_random_size_ranges_bias_weighted(\n\n\trng: &mut Rng,\n\n\tranges: &Vec<WeightedValue<(SizeUnit, SizeUnit)>>,\n\n\tbias: f64,\n\n\tpixel_size: u32,\n\n) -> f64 {\n\n\tlet range = get_random_entry_weighted(rng, ranges);\n\n\tget_random_size_range_bias(rng, &range.0, &range.1, bias, pixel_size)\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 36, "score": 105852.02722394417 }, { "content": "pub fn write_image(image_buffer: RgbImage, path: &Path) {\n\n\tlet image = DynamicImage::ImageRgb8(image_buffer);\n\n\tlet image_format = ImageFileFormat::from_path(path).expect(\"parsing image format\");\n\n\tlet mut output_file = File::create(path).expect(\"creating output file\");\n\n\t// We could also have used \"image.save(output_path)\"\n\n\timage.write_to(&mut output_file, image_format.get_native_format()).unwrap();\n\n}\n\n\n", "file_path": "src/generator/utils/files/mod.rs", "rank": 37, "score": 96984.87569943999 }, { "content": "pub fn generate_image(image_buffer: RgbImage, image_format: ImageFileFormat) -> Bytes {\n\n\tlet image = DynamicImage::ImageRgb8(image_buffer);\n\n\n\n\t// Encode the image first\n\n\tlet mut image_writer = BytesMut::new().writer();\n\n\timage.write_to(&mut image_writer, image_format.get_native_format()).expect(\"writing image to Bytes\");\n\n\tBytes::from(image_writer.into_inner().freeze().to_vec())\n\n}\n\n\n", "file_path": "src/generator/utils/files/mod.rs", "rank": 38, "score": 89321.34720599814 }, { "content": "pub fn write_image_with_metadata(image_buffer: RgbImage, path: &Path, comments: Vec<String>) {\n\n\tlet image_format = ImageFileFormat::from_path(path).expect(\"parsing image format\");\n\n\tlet image_bytes = generate_image_with_metadata(image_buffer, image_format, comments);\n\n\tlet mut output_file = File::create(path).expect(\"creating output file with metadata\");\n\n\toutput_file.write(&image_bytes[..]).expect(\"writing output file with metadata\");\n\n}\n", "file_path": "src/generator/utils/files/mod.rs", "rank": 39, "score": 87722.01907085636 }, { "content": "pub fn cursor_up() {\n\n\tprint!(\"\\u{1b}[1;A\");\n\n}\n\n\n", "file_path": "src/generator/utils/terminal.rs", "rank": 40, "score": 87107.4786370454 }, { "content": "pub fn erase_line_to_end() {\n\n\tprint!(\"\\u{1b}[0;K\");\n\n}\n", "file_path": "src/generator/utils/terminal.rs", "rank": 41, "score": 83634.73457185201 }, { "content": "fn print_benchmark(bench: &TimerBenchmark, label: &str) {\n\n\tprintln!(\n\n\t\t\"[BENCH] {:}: min {:.3}ms, avg {:.3}ms, median {:.3}, max {:.3}ms\",\n\n\t\tlabel,\n\n\t\tbench.min_ms(),\n\n\t\tbench.average_ms(),\n\n\t\tbench.median_ms(),\n\n\t\tbench.max_ms()\n\n\t);\n\n}\n\n\n\nimpl Generator {\n\n\tpub fn from_image(target_image: DynamicImage, scale: f64) -> Generator {\n\n\t\tlet mut target = target_image.to_rgb8();\n\n\t\tif scale != 1.0f64 {\n\n\t\t\ttarget = image_scale(&target, scale);\n\n\t\t}\n\n\t\tlet current = RgbImage::new(target.dimensions().0, target.dimensions().1);\n\n\t\tGenerator {\n\n\t\t\ttarget,\n", "file_path": "src/generator/mod.rs", "rank": 42, "score": 83560.3227219472 }, { "content": "pub fn generate_image_with_metadata(\n\n\timage_buffer: RgbImage,\n\n\timage_format: ImageFileFormat,\n\n\tcomments: Vec<String>,\n\n) -> bytes::Bytes {\n\n\tlet image_bytes = generate_image(image_buffer, image_format);\n\n\tlet mut image_and_meta_writer = BytesMut::new().writer();\n\n\n\n\t// Additional metadata\n\n\tlet meta_software = format!(\"Random Art Generator v{}\", crate_version!());\n\n\n\n\t// Save differently based on file format\n\n\tmatch image_format {\n\n\t\tImageFileFormat::PNG => {\n\n\t\t\t// Is PNG, add chunks\n\n\t\t\tlet mut png = Png::from_bytes(image_bytes).expect(\"reading encoded PNG image\");\n\n\n\n\t\t\tlet comments_chunk =\n\n\t\t\t\tPngChunk::new(*b\"tEXt\", Bytes::from(format!(\"Comment\\u{0}{}\", comments.join(\" \\r\\n\"))));\n\n\t\t\tlet software_chunk =\n", "file_path": "src/generator/utils/files/mod.rs", "rank": 43, "score": 82035.91150394906 }, { "content": "pub fn scale<I: GenericImageView>(\n\n\timage: &I,\n\n\tscale: f64,\n\n) -> ImageBuffer<I::Pixel, Vec<<I::Pixel as Pixel>::Subpixel>>\n\nwhere\n\n\tI::Pixel: 'static,\n\n\t<I::Pixel as Pixel>::Subpixel: 'static,\n\n{\n\n\tlet width = (image.dimensions().0 as f64 * scale).round() as u32;\n\n\tlet height = (image.dimensions().1 as f64 * scale).round() as u32;\n\n\tresize(image, width, height)\n\n}\n\n\n", "file_path": "src/generator/utils/image.rs", "rank": 44, "score": 79228.54733120115 }, { "content": "pub fn resize<I: GenericImageView>(\n\n\timage: &I,\n\n\twidth: u32,\n\n\theight: u32,\n\n) -> ImageBuffer<I::Pixel, Vec<<I::Pixel as Pixel>::Subpixel>>\n\nwhere\n\n\tI::Pixel: 'static,\n\n\t<I::Pixel as Pixel>::Subpixel: 'static,\n\n{\n\n\timageops::resize(image, width, height, imageops::FilterType::CatmullRom)\n\n}\n\n\n", "file_path": "src/generator/utils/image.rs", "rank": 45, "score": 79228.54733120115 }, { "content": "pub fn get_random_size_range_bias(\n\n\trng: &mut Rng,\n\n\tmin: &SizeUnit,\n\n\tmax: &SizeUnit,\n\n\tbias: f64,\n\n\tpixel_size: u32,\n\n) -> f64 {\n\n\tlet min_pixels = min.to_pixels(pixel_size);\n\n\tlet max_pixels = max.to_pixels(pixel_size);\n\n\tget_random_range_bias(rng, min_pixels as f64, max_pixels as f64, bias)\n\n}\n\n\n", "file_path": "src/generator/utils/random/mod.rs", "rank": 46, "score": 79078.86479401754 }, { "content": "fn get_options() -> Opt {\n\n\treturn Opt::from_args();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 47, "score": 75428.07642390965 }, { "content": "\tpub blending_mode: Vec<WeightedValue<BlendingMode>>,\n\n\tpub alpha: Vec<WeightedValue<(f64, f64)>>,\n\n\tpub alpha_bias: f64,\n\n\tpub width: Vec<WeightedValue<(SizeUnit, SizeUnit)>>,\n\n\tpub height: Vec<WeightedValue<(SizeUnit, SizeUnit)>>,\n\n\tpub width_bias: f64, // 0 = normal; -1 = quad bias towards small, 1 = quad bias towards big, etc\n\n\tpub height_bias: f64, // 0 = normal; -1 = quad bias towards small, 1 = quad bias towards big, etc\n\n\tpub color_seed: f64,\n\n\tpub rng_seed: u32,\n\n\tpub margins: Margins<SizeUnit>,\n\n}\n\n\n\nimpl RectPainter {\n\n\tpub fn new() -> Self {\n\n\t\tlet options = Options {\n\n\t\t\tblending_mode: vec![WeightedValue {\n\n\t\t\t\tvalue: BlendingMode::default(),\n\n\t\t\t\tweight: 1.0,\n\n\t\t\t}],\n\n\t\t\talpha: vec![WeightedValue {\n", "file_path": "src/generator/painter/rect.rs", "rank": 48, "score": 69421.71965135285 }, { "content": "\t\t\t(target_area.x + target_area.width) as f64 - rect_w,\n\n\t\t);\n\n\t\tlet rect_y = get_random_range(\n\n\t\t\t&mut rng,\n\n\t\t\ttarget_area.y as f64,\n\n\t\t\t(target_area.y + target_area.height) as f64 - rect_h,\n\n\t\t);\n\n\n\n\t\t// Find final, round positions\n\n\t\tlet x1 = rect_x.round().max(0.0).min(image_area.0 as f64) as u32;\n\n\t\tlet x2 = (rect_x + rect_w).round().max(0.0).min(image_area.0 as f64) as u32;\n\n\t\tlet y1 = rect_y.round().max(0.0).min(image_area.1 as f64) as u32;\n\n\t\tlet y2 = (rect_y + rect_h).round().max(0.0).min(image_area.1 as f64) as u32;\n\n\n\n\t\t// Determine color\n\n\t\tlet random_color = get_random_color(&mut rng);\n\n\t\tlet seed_color = get_pixel_interpolated(seed_map, (x1 + x2) as f64 / 2.0, (y1 + y2) as f64 / 2.0);\n\n\t\tlet color = blend_linear(&random_color, &seed_color, self.options.color_seed);\n\n\t\tlet alpha = get_random_ranges_bias_weighted(&mut rng, &self.options.alpha, self.options.alpha_bias);\n\n\n", "file_path": "src/generator/painter/rect.rs", "rank": 49, "score": 69421.20062329168 }, { "content": "\t\t\t\tleft: SizeUnit::Pixels(0),\n\n\t\t\t},\n\n\t\t};\n\n\n\n\t\tRectPainter {\n\n\t\t\toptions,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Painter for RectPainter {\n\n\tfn paint(&self, canvas: &RgbImage, iteration: u32, seed_map: &RgbImage) -> Result<RgbImage, &str> {\n\n\t\tlet mut rng = get_rng(self.options.rng_seed, iteration);\n\n\n\n\t\tlet image_area = canvas.dimensions();\n\n\t\tlet target_area = match find_target_draw_rect(image_area, &self.options.margins) {\n\n\t\t\tOk(rect) => rect,\n\n\t\t\tErr(err) => return Err(err),\n\n\t\t};\n\n\t\tlet target_visible_area =\n", "file_path": "src/generator/painter/rect.rs", "rank": 50, "score": 69419.85061179333 }, { "content": "\t\t// Decide on blending mode\n\n\t\tlet blending_mode = get_random_entry_weighted(&mut rng, &self.options.blending_mode);\n\n\n\n\t\t// Finally, paint\n\n\t\tlet mut painted_canvas = canvas.clone();\n\n\t\tfor x in x1..x2 {\n\n\t\t\tfor y in y1..y2 {\n\n\t\t\t\tlet new_pixel =\n\n\t\t\t\t\tRgb(blend(painted_canvas.get_pixel(x, y).channels(), &color, alpha, &blending_mode));\n\n\t\t\t\tpainted_canvas.put_pixel(x, y, new_pixel);\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\tOk(painted_canvas)\n\n\t}\n\n\n\n\tfn get_metadata(&self) -> HashMap<String, String> {\n\n\t\tlet mut data = HashMap::new();\n\n\t\tdata.insert(String::from(\"RNG seed\"), format!(\"{}\", &self.options.rng_seed));\n\n\t\tdata\n\n\t}\n\n}\n", "file_path": "src/generator/painter/rect.rs", "rank": 51, "score": 69413.0544051897 }, { "content": "\t\t\t\tvalue: (1.0, 1.0),\n\n\t\t\t\tweight: 1.0,\n\n\t\t\t}],\n\n\t\t\talpha_bias: 0.0,\n\n\t\t\twidth: vec![WeightedValue {\n\n\t\t\t\tvalue: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)),\n\n\t\t\t\tweight: 1.0,\n\n\t\t\t}],\n\n\t\t\twidth_bias: 0.0,\n\n\t\t\theight: vec![WeightedValue {\n\n\t\t\t\tvalue: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(1.0)),\n\n\t\t\t\tweight: 1.0,\n\n\t\t\t}],\n\n\t\t\theight_bias: 0.0,\n\n\t\t\tcolor_seed: 0.0,\n\n\t\t\trng_seed: 0,\n\n\t\t\tmargins: Margins::<SizeUnit> {\n\n\t\t\t\ttop: SizeUnit::Pixels(0),\n\n\t\t\t\tright: SizeUnit::Pixels(0),\n\n\t\t\t\tbottom: SizeUnit::Pixels(0),\n", "file_path": "src/generator/painter/rect.rs", "rank": 52, "score": 69411.0823878768 }, { "content": "\t\t\t(image_area.0.min(target_area.width as u32), image_area.1.min(target_area.height as u32));\n\n\n\n\t\t// Find random dimensions for rect to be painted\n\n\t\tlet rect_w = get_random_size_ranges_bias_weighted(\n\n\t\t\t&mut rng,\n\n\t\t\t&self.options.width,\n\n\t\t\tself.options.width_bias,\n\n\t\t\ttarget_visible_area.0,\n\n\t\t);\n\n\t\tlet rect_h = get_random_size_ranges_bias_weighted(\n\n\t\t\t&mut rng,\n\n\t\t\t&self.options.height,\n\n\t\t\tself.options.height_bias,\n\n\t\t\ttarget_visible_area.1,\n\n\t\t);\n\n\n\n\t\t// Distribute along the axis too\n\n\t\tlet rect_x = get_random_range(\n\n\t\t\t&mut rng,\n\n\t\t\ttarget_area.x as f64,\n", "file_path": "src/generator/painter/rect.rs", "rank": 53, "score": 69408.96995440326 }, { "content": "use std::collections::HashMap;\n\n\n\nuse image::{Pixel, Rgb, RgbImage};\n\n\n\nuse crate::generator::painter::Painter;\n\nuse crate::generator::utils::color::BlendingMode;\n\nuse crate::generator::utils::geom::find_target_draw_rect;\n\nuse crate::generator::utils::pixel::{blend, blend_linear};\n\nuse crate::generator::utils::random::{\n\n\tget_random_entry_weighted, get_random_range, get_random_ranges_bias_weighted,\n\n\tget_random_size_ranges_bias_weighted, get_rng,\n\n};\n\nuse crate::generator::utils::units::{Margins, SizeUnit, WeightedValue};\n\nuse crate::generator::utils::{image::get_pixel_interpolated, random::get_random_color};\n\n\n\npub struct RectPainter {\n\n\tpub options: Options,\n\n}\n\n\n\npub struct Options {\n", "file_path": "src/generator/painter/rect.rs", "rank": 54, "score": 69408.956104353 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Opt {\n\n\t/// The target image.\n\n\t///\n\n\t/// The painting algorithms will try matching this image, without copying directly from it.\n\n\t#[structopt(parse(from_os_str))]\n\n\ttarget: PathBuf,\n\n\n\n\t/// Maximum number of image generation tries (successful or nor) to run.\n\n\t///\n\n\t/// On each try, the painter algorithm tries creating an image that is closer to the target image (with several \"candidates\" per try).\n\n\t///\n\n\t/// The more complex the result image gets, the harder it is to create an improved image, so it's common to have many unsuccessful tries. Use this option to set a maximum number of tries.\n\n\t///\n\n\t/// Using a limited number of tries can give a predicted time for completion, but also gives an unpredictable number of successful paints. Use the `-g`/`--generations` parameter to control the number of desired paints instead.\n\n\t///\n\n\t/// Set to `0` if no limit is desired.\n\n\t#[structopt(short = \"t\", long, default_value = \"0\", required_if(\"generations\", \"0\"))]\n\n\tmax_tries: u32,\n\n\n\n\t/// Number of successful generations desired.\n", "file_path": "src/main.rs", "rank": 55, "score": 62287.48289525819 }, { "content": "fn main() {\n\n\tlet options = get_options();\n\n\n\n\t// Target\n\n\tlet target_file = options.target.as_path();\n\n\tlet target_image = image::open(target_file).expect(\"Cannot open target file {:?}, exiting\");\n\n\n\n\tprintln!(\"Using target image of {:?} with dimensions of {:?}.\", target_file, target_image.dimensions());\n\n\n\n\t// Create Generator\n\n\tlet mut gen = match options.target_color_matrix {\n\n\t\tSome(color_matrix) => {\n\n\t\t\t// Target has a color matrix, parse it first\n\n\t\t\tgenerator::Generator::from_image_and_matrix(target_image, options.scale, color_matrix)\n\n\t\t}\n\n\t\tNone => {\n\n\t\t\t// No color matrix needed, generate with the image\n\n\t\t\tgenerator::Generator::from_image(target_image, options.scale)\n\n\t\t}\n\n\t};\n", "file_path": "src/main.rs", "rank": 56, "score": 44015.43013420763 }, { "content": "fn on_processed(generator: &Generator, result: ProcessCallbackResult) {\n\n\t// Ignore unsuccessful generations\n\n\tif !result.is_success {\n\n\t\treturn;\n\n\t}\n\n\n\n\tlet options = get_options();\n\n\n\n\t// Only write the file if it's the final generation, or it's meant to save often\n\n\tif !result.is_final && !options.save_often {\n\n\t\treturn;\n\n\t}\n\n\n\n\tlet output_path = options.output.as_path();\n\n\n\n\tif options.no_metadata {\n\n\t\t// No metadata wanted, write the file directly\n\n\t\tfiles::write_image(generator.get_current(), output_path);\n\n\t} else {\n\n\t\t// Write the file with metadata\n", "file_path": "src/main.rs", "rank": 57, "score": 36397.74741388787 }, { "content": "\t#[strum(serialize = \"hard-light\")]\n\n\tHardLight,\n\n\t#[strum(serialize = \"soft-light\")]\n\n\tSoftLight,\n\n\t#[strum(serialize = \"difference\")]\n\n\tDifference,\n\n\t#[strum(serialize = \"exclusion\")]\n\n\tExclusion,\n\n}\n\n\n\nimpl BlendingMode {\n\n\t#[inline(always)]\n\n\tpub fn blend(&self, bottom: f64, top: f64) -> f64 {\n\n\t\tmatch self {\n\n\t\t\tSelf::Normal => top,\n\n\t\t\tSelf::Multiply => bottom * top,\n\n\t\t\tSelf::Screen => 1.0 - (1.0 - bottom) * (1.0 - top),\n\n\t\t\tSelf::Overlay => {\n\n\t\t\t\tif bottom < 0.5 {\n\n\t\t\t\t\t2.0 * bottom * top\n", "file_path": "src/generator/utils/color.rs", "rank": 58, "score": 34461.31706094265 }, { "content": "\t\t\t\t\tbottom + (2.0 * top - 1.0) * (d - bottom)\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tSelf::Difference => (bottom - top).abs().max(0.0).min(1.0),\n\n\t\t\tSelf::Exclusion => bottom + top - 2.0 * bottom * top,\n\n\t\t}\n\n\t}\n\n\n\n\t/// Interpolates between the bottom color, and the resulting\n\n\t/// color if the top color was applied with this blend mode\n\n\t#[inline(always)]\n\n\tpub fn blend_with_opacity(&self, bottom: f64, top: f64, opacity: f64) -> f64 {\n\n\t\treturn if opacity == 0.0 {\n\n\t\t\tbottom\n\n\t\t} else {\n\n\t\t\tlet opaque_result = &self.blend(bottom, top);\n\n\t\t\topaque_result * opacity + bottom * (1.0 - opacity)\n\n\t\t};\n\n\t}\n\n}\n", "file_path": "src/generator/utils/color.rs", "rank": 59, "score": 34460.2095802881 }, { "content": "\t\t\t\t} else {\n\n\t\t\t\t\t1.0 - 2.0 * (1.0 - bottom) * (1.0 - top)\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tSelf::Darken => bottom.min(top),\n\n\t\t\tSelf::Lighten => bottom.max(top),\n\n\t\t\tSelf::ColorDodge => {\n\n\t\t\t\tif bottom == 0.0 {\n\n\t\t\t\t\t0.0\n\n\t\t\t\t} else if top == 1.0 {\n\n\t\t\t\t\t1.0\n\n\t\t\t\t} else {\n\n\t\t\t\t\t(bottom / (1.0 - top)).min(1.0)\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tSelf::ColorBurn => {\n\n\t\t\t\tif bottom == 1.0 {\n\n\t\t\t\t\t1.0\n\n\t\t\t\t} else if top == 0.0 {\n\n\t\t\t\t\t0.0\n", "file_path": "src/generator/utils/color.rs", "rank": 60, "score": 34455.34966901617 }, { "content": "\t\t\t\t} else {\n\n\t\t\t\t\t1.0 - ((1.0 - bottom) / top).min(1.0)\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tSelf::HardLight => {\n\n\t\t\t\tif top <= 0.5 {\n\n\t\t\t\t\t2.0 * bottom * top\n\n\t\t\t\t} else {\n\n\t\t\t\t\t1.0 - (1.0 - bottom) * (1.0 - (2.0 * top - 1.0))\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tSelf::SoftLight => {\n\n\t\t\t\tif top <= 0.5 {\n\n\t\t\t\t\tbottom - (1.0 - 2.0 * top) * bottom * (1.0 - bottom)\n\n\t\t\t\t} else {\n\n\t\t\t\t\tlet d = if bottom <= 0.25 {\n\n\t\t\t\t\t\t((16.0 * bottom - 12.0) * bottom + 4.0) * bottom\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\tbottom.sqrt()\n\n\t\t\t\t\t};\n", "file_path": "src/generator/utils/color.rs", "rank": 64, "score": 34451.333428027174 }, { "content": "\tpub blending_mode: Vec<WeightedValue<BlendingMode>>,\n\n\tpub alpha: Vec<WeightedValue<(f64, f64)>>,\n\n\tpub alpha_bias: f64,\n\n\tpub radius: Vec<WeightedValue<(SizeUnit, SizeUnit)>>,\n\n\tpub radius_bias: f64, // 0 = normal; -1 = quad bias towards small, 1 = quad bias towards big, etc\n\n\tpub anti_alias: bool,\n\n\tpub color_seed: f64,\n\n\tpub rng_seed: u32,\n\n\tpub margins: Margins<SizeUnit>,\n\n}\n\n\n\nimpl CirclePainter {\n\n\tpub fn new() -> Self {\n\n\t\tlet options = Options {\n\n\t\t\tblending_mode: vec![WeightedValue {\n\n\t\t\t\tvalue: BlendingMode::default(),\n\n\t\t\t\tweight: 1.0,\n\n\t\t\t}],\n\n\t\t\talpha: vec![WeightedValue {\n\n\t\t\t\tvalue: (1.0, 1.0),\n", "file_path": "src/generator/painter/circle.rs", "rank": 66, "score": 34450.94998984182 }, { "content": "\t\t\toptions,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Painter for CirclePainter {\n\n\tfn paint(&self, canvas: &RgbImage, iteration: u32, seed_map: &RgbImage) -> Result<RgbImage, &str> {\n\n\t\tlet mut rng = get_rng(self.options.rng_seed, iteration);\n\n\n\n\t\tlet image_area = canvas.dimensions();\n\n\t\tlet target_area = match find_target_draw_rect(image_area, &self.options.margins) {\n\n\t\t\tOk(rect) => rect,\n\n\t\t\tErr(err) => return Err(err),\n\n\t\t};\n\n\t\tlet target_visible_area =\n\n\t\t\t(image_area.0.min(target_area.width as u32), image_area.1.min(target_area.height as u32));\n\n\n\n\t\t// Find random radius for the circle to be painted\n\n\t\tlet max_dimension = target_visible_area.0.min(target_visible_area.1);\n\n\t\tlet radius = get_random_size_ranges_bias_weighted(\n", "file_path": "src/generator/painter/circle.rs", "rank": 67, "score": 34449.763404390884 }, { "content": "\t\tlet y1 = (circle_y - radius).floor().max(0.0).min(image_area.1 as f64) as u32;\n\n\t\tlet x2 = (circle_x + radius).ceil().max(0.0).min(image_area.0 as f64) as u32;\n\n\t\tlet y2 = (circle_y + radius).ceil().max(0.0).min(image_area.1 as f64) as u32;\n\n\n\n\t\t// Determine color\n\n\t\tlet random_color = get_random_color(&mut rng);\n\n\t\tlet seed_color = get_pixel_interpolated(seed_map, circle_x, circle_y);\n\n\t\tlet color = blend_linear(&random_color, &seed_color, self.options.color_seed);\n\n\t\tlet alpha = get_random_ranges_bias_weighted(&mut rng, &self.options.alpha, self.options.alpha_bias);\n\n\n\n\t\t// Decide on blending mode\n\n\t\tlet blending_mode = get_random_entry_weighted(&mut rng, &self.options.blending_mode);\n\n\n\n\t\t// Finally, paint\n\n\t\tlet mut painted_canvas = canvas.clone();\n\n\t\tfor x in x1..x2 {\n\n\t\t\tfor y in y1..y2 {\n\n\t\t\t\tlet dist = distance(circle_x, circle_y, x as f64, y as f64);\n\n\t\t\t\tif dist <= radius {\n\n\t\t\t\t\tlet abs = radius - dist;\n", "file_path": "src/generator/painter/circle.rs", "rank": 68, "score": 34448.25538548095 }, { "content": "use strum_macros::{Display, EnumString};\n\n\n\n#[derive(Clone, Debug, Display, EnumString, PartialEq)]\n\npub enum BlendingMode {\n\n\t#[strum(serialize = \"normal\")]\n\n\tNormal,\n\n\t#[strum(serialize = \"multiply\")]\n\n\tMultiply,\n\n\t#[strum(serialize = \"screen\")]\n\n\tScreen,\n\n\t#[strum(serialize = \"overlay\")]\n\n\tOverlay,\n\n\t#[strum(serialize = \"darken\")]\n\n\tDarken,\n\n\t#[strum(serialize = \"lighten\")]\n\n\tLighten,\n\n\t#[strum(serialize = \"color-dodge\")]\n\n\tColorDodge,\n\n\t#[strum(serialize = \"color-burn\")]\n\n\tColorBurn,\n", "file_path": "src/generator/utils/color.rs", "rank": 69, "score": 34445.95930812536 }, { "content": "\n\nimpl Default for BlendingMode {\n\n\tfn default() -> Self {\n\n\t\tBlendingMode::Normal\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn test_blend_normal() {\n\n\t\t// Opaque\n\n\t\tassert_eq!(BlendingMode::Normal.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend(0.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend(0.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend(0.5, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend(0.5, 0.5), 0.5);\n", "file_path": "src/generator/utils/color.rs", "rank": 70, "score": 34444.70793363002 }, { "content": "\t\t\t\tweight: 1.0,\n\n\t\t\t}],\n\n\t\t\talpha_bias: 0.0,\n\n\t\t\tradius: vec![WeightedValue {\n\n\t\t\t\tvalue: (SizeUnit::Fraction(0.0), SizeUnit::Fraction(0.5)),\n\n\t\t\t\tweight: 1.0,\n\n\t\t\t}],\n\n\t\t\tradius_bias: 0.0,\n\n\t\t\tanti_alias: true,\n\n\t\t\tcolor_seed: 0.0,\n\n\t\t\trng_seed: 0,\n\n\t\t\tmargins: Margins::<SizeUnit> {\n\n\t\t\t\ttop: SizeUnit::Pixels(0),\n\n\t\t\t\tright: SizeUnit::Pixels(0),\n\n\t\t\t\tbottom: SizeUnit::Pixels(0),\n\n\t\t\t\tleft: SizeUnit::Pixels(0),\n\n\t\t\t},\n\n\t\t};\n\n\n\n\t\tCirclePainter {\n", "file_path": "src/generator/painter/circle.rs", "rank": 71, "score": 34443.43175493877 }, { "content": "\t\tassert_eq!(BlendingMode::ColorBurn.blend(0.0, 1.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend(0.5, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend(0.5, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend(0.5, 1.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend(1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend(1.0, 0.5), 1.0);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend(1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend_with_opacity(0.25, 0.25, 0.25), 0.1875);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend_with_opacity(0.25, 0.25, 0.75), 0.0625);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend_with_opacity(0.25, 0.75, 0.25), 0.1875);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend_with_opacity(0.25, 0.75, 0.75), 0.0625);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend_with_opacity(0.75, 0.25, 0.25), 0.5625);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend_with_opacity(0.75, 0.25, 0.75), 0.1875);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend_with_opacity(0.75, 0.75, 0.25), 0.7291666666666667);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend_with_opacity(0.75, 0.75, 0.75), 0.6875);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_hard_light() {\n", "file_path": "src/generator/utils/color.rs", "rank": 73, "score": 34441.96481813629 }, { "content": "\t\tassert_eq!(BlendingMode::ColorDodge.blend(1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend(1.0, 0.5), 1.0);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend(1.0, 1.0), 1.0);\n\n\n\n\t\t// These are a little bit different by +- 2 (on a 0-255 range)\n\n\t\t// when compared to Photopea results\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend_with_opacity(0.25, 0.25, 0.25), 0.2708333333333333);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend_with_opacity(0.25, 0.25, 0.75), 0.3125);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend_with_opacity(0.25, 0.75, 0.25), 0.4375);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend_with_opacity(0.25, 0.75, 0.75), 0.8125);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend_with_opacity(0.75, 0.25, 0.25), 0.8125);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend_with_opacity(0.75, 0.25, 0.75), 0.9375);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend_with_opacity(0.75, 0.75, 0.25), 0.8125);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend_with_opacity(0.75, 0.75, 0.75), 0.9375);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_color_burn() {\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::ColorBurn.blend(0.0, 0.5), 0.0);\n", "file_path": "src/generator/utils/color.rs", "rank": 74, "score": 34441.95521707235 }, { "content": "\t\tassert_eq!(BlendingMode::Lighten.blend(1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Lighten.blend_with_opacity(0.25, 0.25, 0.25), 0.25);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend_with_opacity(0.25, 0.25, 0.75), 0.25);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend_with_opacity(0.25, 0.75, 0.25), 0.375);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend_with_opacity(0.25, 0.75, 0.75), 0.625);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend_with_opacity(0.75, 0.25, 0.25), 0.75);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend_with_opacity(0.75, 0.25, 0.75), 0.75);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend_with_opacity(0.75, 0.75, 0.25), 0.75);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend_with_opacity(0.75, 0.75, 0.75), 0.75);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_color_dodge() {\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend(0.0, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend(0.0, 1.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend(0.5, 0.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend(0.5, 0.5), 1.0);\n\n\t\tassert_eq!(BlendingMode::ColorDodge.blend(0.5, 1.0), 1.0);\n", "file_path": "src/generator/utils/color.rs", "rank": 75, "score": 34441.419333023216 }, { "content": "use std::collections::HashMap;\n\n\n\nuse image::{Pixel, Rgb, RgbImage};\n\n\n\nuse crate::generator::painter::Painter;\n\nuse crate::generator::utils::color::BlendingMode;\n\nuse crate::generator::utils::geom::{distance, find_target_draw_rect};\n\nuse crate::generator::utils::pixel::{blend, blend_linear};\n\nuse crate::generator::utils::random::{\n\n\tget_random_entry_weighted, get_random_range, get_random_ranges_bias_weighted,\n\n\tget_random_size_ranges_bias_weighted, get_rng,\n\n};\n\nuse crate::generator::utils::units::{Margins, SizeUnit, WeightedValue};\n\nuse crate::generator::utils::{image::get_pixel_interpolated, random::get_random_color};\n\n\n\npub struct CirclePainter {\n\n\tpub options: Options,\n\n}\n\n\n\npub struct Options {\n", "file_path": "src/generator/painter/circle.rs", "rank": 79, "score": 34439.75770314174 }, { "content": "\t\tassert_eq!(BlendingMode::Difference.blend_with_opacity(0.75, 0.75, 0.25), 0.5625);\n\n\t\tassert_eq!(BlendingMode::Difference.blend_with_opacity(0.75, 0.75, 0.75), 0.1875);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_exclusion() {\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(0.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(0.0, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(0.5, 0.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(0.5, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(0.5, 1.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(1.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend(1.0, 1.0), 0.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend_with_opacity(0.25, 0.25, 0.25), 0.28125);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend_with_opacity(0.25, 0.25, 0.75), 0.34375);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend_with_opacity(0.25, 0.75, 0.25), 0.34375);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend_with_opacity(0.25, 0.75, 0.75), 0.53125);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend_with_opacity(0.75, 0.25, 0.25), 0.71875);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend_with_opacity(0.75, 0.25, 0.75), 0.65625);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend_with_opacity(0.75, 0.75, 0.25), 0.65625);\n\n\t\tassert_eq!(BlendingMode::Exclusion.blend_with_opacity(0.75, 0.75, 0.75), 0.46875);\n\n\t}\n\n}\n", "file_path": "src/generator/utils/color.rs", "rank": 82, "score": 34437.63903135375 }, { "content": "\t#[test]\n\n\tfn test_blend_soft_light() {\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(0.0, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(0.0, 1.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(0.5, 0.0), 0.25);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(0.5, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(0.5, 1.0), 0.7071067811865476);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(1.0, 0.5), 1.0);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend(1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend_with_opacity(0.25, 0.25, 0.25), 0.2265625);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend_with_opacity(0.25, 0.25, 0.75), 0.1796875);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend_with_opacity(0.25, 0.75, 0.25), 0.28125);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend_with_opacity(0.25, 0.75, 0.75), 0.34375);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend_with_opacity(0.75, 0.25, 0.25), 0.7265625);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend_with_opacity(0.75, 0.25, 0.75), 0.6796875);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend_with_opacity(0.75, 0.75, 0.25), 0.7645031754730548);\n\n\t\tassert_eq!(BlendingMode::SoftLight.blend_with_opacity(0.75, 0.75, 0.75), 0.7935095264191645);\n", "file_path": "src/generator/utils/color.rs", "rank": 83, "score": 34437.63903135375 }, { "content": "\t\tassert_eq!(BlendingMode::Overlay.blend_with_opacity(0.25, 0.75, 0.25), 0.28125);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend_with_opacity(0.25, 0.75, 0.75), 0.34375);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend_with_opacity(0.75, 0.25, 0.25), 0.71875);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend_with_opacity(0.75, 0.25, 0.75), 0.65625);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend_with_opacity(0.75, 0.75, 0.25), 0.78125);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend_with_opacity(0.75, 0.75, 0.75), 0.84375);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_darken() {\n\n\t\tassert_eq!(BlendingMode::Darken.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Darken.blend(0.0, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::Darken.blend(0.0, 1.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Darken.blend(0.5, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Darken.blend(0.5, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Darken.blend(0.5, 1.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Darken.blend(1.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Darken.blend(1.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Darken.blend(1.0, 1.0), 1.0);\n\n\n", "file_path": "src/generator/utils/color.rs", "rank": 84, "score": 34437.63903135375 }, { "content": "\t\tassert_eq!(BlendingMode::Normal.blend(0.5, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend(1.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend(1.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend(1.0, 1.0), 1.0);\n\n\n\n\t\t// With transparency\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.0, 0.25), 0.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.0, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.0, 0.75), 0.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.0, 1.0), 0.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.5, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.5, 0.25), 0.125);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.5, 0.5), 0.25);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.5, 0.75), 0.375);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 0.5, 1.0), 0.5);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 1.0, 0.0), 0.0);\n", "file_path": "src/generator/utils/color.rs", "rank": 85, "score": 34437.63903135375 }, { "content": "\t\tassert_eq!(BlendingMode::Screen.blend_with_opacity(0.75, 0.25, 0.25), 0.765625);\n\n\t\tassert_eq!(BlendingMode::Screen.blend_with_opacity(0.75, 0.25, 0.75), 0.796875);\n\n\t\tassert_eq!(BlendingMode::Screen.blend_with_opacity(0.75, 0.75, 0.25), 0.796875);\n\n\t\tassert_eq!(BlendingMode::Screen.blend_with_opacity(0.75, 0.75, 0.75), 0.890625);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_overlay() {\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(0.0, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(0.0, 1.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(0.5, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(0.5, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(0.5, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(1.0, 0.5), 1.0);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend(1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Overlay.blend_with_opacity(0.25, 0.25, 0.25), 0.21875);\n\n\t\tassert_eq!(BlendingMode::Overlay.blend_with_opacity(0.25, 0.25, 0.75), 0.15625);\n", "file_path": "src/generator/utils/color.rs", "rank": 86, "score": 34437.63903135375 }, { "content": "\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_multiply() {\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(0.0, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(0.0, 1.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(0.5, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(0.5, 0.5), 0.25);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(0.5, 1.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(1.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(1.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend(1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Multiply.blend_with_opacity(0.25, 0.25, 0.25), 0.203125);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend_with_opacity(0.25, 0.25, 0.75), 0.109375);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend_with_opacity(0.25, 0.75, 0.25), 0.234375);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend_with_opacity(0.25, 0.75, 0.75), 0.203125);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend_with_opacity(0.75, 0.25, 0.25), 0.609375);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend_with_opacity(0.75, 0.25, 0.75), 0.328125);\n", "file_path": "src/generator/utils/color.rs", "rank": 87, "score": 34437.63903135375 }, { "content": "\t\tassert_eq!(BlendingMode::HardLight.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend(0.0, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend(0.0, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend(0.5, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend(0.5, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend(0.5, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend(1.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend(1.0, 0.5), 1.0);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend(1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::HardLight.blend_with_opacity(0.25, 0.25, 0.25), 0.21875);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend_with_opacity(0.25, 0.25, 0.75), 0.15625);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend_with_opacity(0.25, 0.75, 0.25), 0.34375);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend_with_opacity(0.25, 0.75, 0.75), 0.53125);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend_with_opacity(0.75, 0.25, 0.25), 0.65625);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend_with_opacity(0.75, 0.25, 0.75), 0.46875);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend_with_opacity(0.75, 0.75, 0.25), 0.78125);\n\n\t\tassert_eq!(BlendingMode::HardLight.blend_with_opacity(0.75, 0.75, 0.75), 0.84375);\n\n\t}\n\n\n", "file_path": "src/generator/utils/color.rs", "rank": 88, "score": 34437.63903135375 }, { "content": "\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 1.0, 0.75), 0.875);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.0, 0.25), 0.75);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.0, 0.75), 0.25);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.0, 1.0), 0.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.5, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.5, 0.25), 0.875);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.5, 0.5), 0.75);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.5, 0.75), 0.625);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 0.5, 1.0), 0.5);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 1.0, 0.25), 1.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 1.0, 0.5), 1.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 1.0, 0.75), 1.0);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(1.0, 1.0, 1.0), 1.0);\n", "file_path": "src/generator/utils/color.rs", "rank": 89, "score": 34437.63903135375 }, { "content": "\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 1.0, 0.25), 0.25);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 1.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 1.0, 0.75), 0.75);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.0, 1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.0, 0.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.0, 0.25), 0.375);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.0, 0.5), 0.25);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.0, 0.75), 0.125);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.0, 1.0), 0.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.5, 0.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.5, 0.25), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.5, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.5, 0.75), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 0.5, 1.0), 0.5);\n\n\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 1.0, 0.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 1.0, 0.25), 0.625);\n\n\t\tassert_eq!(BlendingMode::Normal.blend_with_opacity(0.5, 1.0, 0.5), 0.75);\n", "file_path": "src/generator/utils/color.rs", "rank": 90, "score": 34437.63903135375 }, { "content": "\t\tassert_eq!(BlendingMode::Darken.blend_with_opacity(0.25, 0.25, 0.25), 0.25);\n\n\t\tassert_eq!(BlendingMode::Darken.blend_with_opacity(0.25, 0.25, 0.75), 0.25);\n\n\t\tassert_eq!(BlendingMode::Darken.blend_with_opacity(0.25, 0.75, 0.25), 0.25);\n\n\t\tassert_eq!(BlendingMode::Darken.blend_with_opacity(0.25, 0.75, 0.75), 0.25);\n\n\t\tassert_eq!(BlendingMode::Darken.blend_with_opacity(0.75, 0.25, 0.25), 0.625);\n\n\t\tassert_eq!(BlendingMode::Darken.blend_with_opacity(0.75, 0.25, 0.75), 0.375);\n\n\t\tassert_eq!(BlendingMode::Darken.blend_with_opacity(0.75, 0.75, 0.25), 0.75);\n\n\t\tassert_eq!(BlendingMode::Darken.blend_with_opacity(0.75, 0.75, 0.75), 0.75);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_lighten() {\n\n\t\tassert_eq!(BlendingMode::Lighten.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend(0.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend(0.0, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend(0.5, 0.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend(0.5, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend(0.5, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend(1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Lighten.blend(1.0, 0.5), 1.0);\n", "file_path": "src/generator/utils/color.rs", "rank": 91, "score": 34437.63903135375 }, { "content": "\t\tassert_eq!(BlendingMode::Multiply.blend_with_opacity(0.75, 0.75, 0.25), 0.703125);\n\n\t\tassert_eq!(BlendingMode::Multiply.blend_with_opacity(0.75, 0.75, 0.75), 0.609375);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_screen() {\n\n\t\tassert_eq!(BlendingMode::Screen.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Screen.blend(0.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Screen.blend(0.0, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Screen.blend(0.5, 0.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Screen.blend(0.5, 0.5), 0.75);\n\n\t\tassert_eq!(BlendingMode::Screen.blend(0.5, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Screen.blend(1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Screen.blend(1.0, 0.5), 1.0);\n\n\t\tassert_eq!(BlendingMode::Screen.blend(1.0, 1.0), 1.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Screen.blend_with_opacity(0.25, 0.25, 0.25), 0.296875);\n\n\t\tassert_eq!(BlendingMode::Screen.blend_with_opacity(0.25, 0.25, 0.75), 0.390625);\n\n\t\tassert_eq!(BlendingMode::Screen.blend_with_opacity(0.25, 0.75, 0.25), 0.390625);\n\n\t\tassert_eq!(BlendingMode::Screen.blend_with_opacity(0.25, 0.75, 0.75), 0.671875);\n", "file_path": "src/generator/utils/color.rs", "rank": 92, "score": 34437.63903135375 }, { "content": "\t}\n\n\n\n\t#[test]\n\n\tfn test_blend_difference() {\n\n\t\tassert_eq!(BlendingMode::Difference.blend(0.0, 0.0), 0.0);\n\n\t\tassert_eq!(BlendingMode::Difference.blend(0.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Difference.blend(0.0, 1.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Difference.blend(0.5, 0.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Difference.blend(0.5, 0.5), 0.0);\n\n\t\tassert_eq!(BlendingMode::Difference.blend(0.5, 1.0), 0.5);\n\n\t\tassert_eq!(BlendingMode::Difference.blend(1.0, 0.0), 1.0);\n\n\t\tassert_eq!(BlendingMode::Difference.blend(1.0, 0.5), 0.5);\n\n\t\tassert_eq!(BlendingMode::Difference.blend(1.0, 1.0), 0.0);\n\n\n\n\t\tassert_eq!(BlendingMode::Difference.blend_with_opacity(0.25, 0.25, 0.25), 0.1875);\n\n\t\tassert_eq!(BlendingMode::Difference.blend_with_opacity(0.25, 0.25, 0.75), 0.0625);\n\n\t\tassert_eq!(BlendingMode::Difference.blend_with_opacity(0.25, 0.75, 0.25), 0.3125);\n\n\t\tassert_eq!(BlendingMode::Difference.blend_with_opacity(0.25, 0.75, 0.75), 0.4375);\n\n\t\tassert_eq!(BlendingMode::Difference.blend_with_opacity(0.75, 0.25, 0.25), 0.6875);\n\n\t\tassert_eq!(BlendingMode::Difference.blend_with_opacity(0.75, 0.25, 0.75), 0.5625);\n", "file_path": "src/generator/utils/color.rs", "rank": 93, "score": 34437.63903135375 }, { "content": "\t\t\t&mut rng,\n\n\t\t\t&self.options.radius,\n\n\t\t\tself.options.radius_bias,\n\n\t\t\tmax_dimension,\n\n\t\t);\n\n\n\n\t\t// Distribute along the axis too\n\n\t\tlet circle_x = get_random_range(\n\n\t\t\t&mut rng,\n\n\t\t\ttarget_area.x as f64 + radius,\n\n\t\t\t(target_area.x + target_area.width) as f64 - radius,\n\n\t\t);\n\n\t\tlet circle_y = get_random_range(\n\n\t\t\t&mut rng,\n\n\t\t\ttarget_area.y as f64 + radius,\n\n\t\t\t(target_area.y + target_area.height) as f64 - radius,\n\n\t\t);\n\n\n\n\t\t// Find final, round positions\n\n\t\tlet x1 = (circle_x - radius).floor().max(0.0).min(image_area.0 as f64) as u32;\n", "file_path": "src/generator/painter/circle.rs", "rank": 95, "score": 34434.06439710469 }, { "content": "\t\t\t\t\tlet new_alpha = if abs > 1.0 {\n\n\t\t\t\t\t\t1.0\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\tif self.options.anti_alias {\n\n\t\t\t\t\t\t\tabs\n\n\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tif abs >= 0.5 {\n\n\t\t\t\t\t\t\t\t1.0\n\n\t\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\t\t0.0\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t};\n\n\t\t\t\t\tlet new_pixel = Rgb(blend(\n\n\t\t\t\t\t\tpainted_canvas.get_pixel(x, y).channels(),\n\n\t\t\t\t\t\t&color,\n\n\t\t\t\t\t\tnew_alpha * alpha,\n\n\t\t\t\t\t\t&blending_mode,\n\n\t\t\t\t\t));\n\n\t\t\t\t\tpainted_canvas.put_pixel(x, y, new_pixel);\n", "file_path": "src/generator/painter/circle.rs", "rank": 96, "score": 34431.87353342277 }, { "content": "\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\tOk(painted_canvas)\n\n\t}\n\n\n\n\tfn get_metadata(&self) -> HashMap<String, String> {\n\n\t\tlet mut data = HashMap::new();\n\n\t\tdata.insert(String::from(\"RNG seed\"), format!(\"{}\", &self.options.rng_seed));\n\n\t\tdata\n\n\t}\n\n}\n", "file_path": "src/generator/painter/circle.rs", "rank": 99, "score": 34426.95380017932 } ]
Rust
src/main.rs
wayfair-tremor/uring
483adf33d61a767a2de9a5ed4ed5cc272cc62ac2
#![recursion_limit = "2048"] mod codec; #[allow(unused)] pub mod errors; pub mod network; mod protocol; mod pubsub; pub mod raft_node; pub mod service; pub mod storage; pub mod version; use crate::network::{ws, Network, RaftNetworkMsg}; use crate::raft_node::*; use crate::service::mring::{self, placement::continuous}; use crate::service::{kv, Service}; use crate::storage::URRocksStorage; use async_std::task; use clap::{App as ClApp, Arg}; use futures::{select, FutureExt, StreamExt}; use serde_derive::{Deserialize, Serialize}; use slog::{Drain, Logger}; use slog_json; use std::time::{Duration, Instant}; pub use uring_common::*; use ws_proto::PSURing; const CHANNEL_SIZE: usize = 64usize; #[macro_use] extern crate slog; #[derive(Deserialize, Serialize)] pub struct KV { key: String, value: serde_json::Value, } #[derive(Deserialize, Serialize, Debug)] pub struct Event { nid: Option<NodeId>, eid: EventId, sid: ServiceId, data: Vec<u8>, } #[derive(Deserialize, Serialize)] pub struct KVs { scope: u16, key: Vec<u8>, value: Vec<u8>, } async fn raft_loop<N: Network>( id: NodeId, bootstrap: bool, ring_size: Option<u64>, pubsub: pubsub::Channel, network: N, logger: Logger, ) where N: 'static, { let mut node: RaftNode<URRocksStorage, _> = if bootstrap { RaftNode::create_raft_leader(&logger, id, pubsub, network).await } else { RaftNode::create_raft_follower(&logger, id, pubsub, network).await }; node.set_raft_tick_duration(Duration::from_millis(100)); node.log().await; let kv = kv::Service::new(&logger, 0); node.add_service(kv::ID, Box::new(kv)); let mut vnode: mring::Service<continuous::Strategy> = mring::Service::new(); if let Some(size) = ring_size { if bootstrap { vnode .execute( node.raft_group.as_ref().unwrap(), &mut node.pubsub, service::mring::Event::set_size(size), ) .await .unwrap(); } } node.add_service(mring::ID, Box::new(vnode)); let version = crate::service::version::Service::new(&logger); node.add_service(crate::service::version::ID, Box::new(version)); let status = crate::service::status::Service::new(&logger); let status = Box::new(status); node.add_service(service::status::ID, status); node.node_loop().await.unwrap() } fn main() -> std::io::Result<()> { use version::VERSION; let matches = ClApp::new("cake") .version(VERSION) .author("The Treamor Team") .about("Uring Demo") .arg( Arg::with_name("id") .short("i") .long("id") .value_name("ID") .help("The Node ID") .takes_value(true), ) .arg( Arg::with_name("bootstrap") .short("b") .long("bootstrap") .value_name("BOOTSTRAP") .help("Sets the node to bootstrap mode and become leader") .takes_value(false), ) .arg( Arg::with_name("ring-size") .short("r") .long("ring-size") .value_name("RING_SIZE") .help("Initialized mring size, only has an effect when used together with --bootstrap") .takes_value(true), ) .arg( Arg::with_name("http-endpoint") .long("http") .value_name("HTTP") .help("http endpoint to listen to") .takes_value(true), ) .arg( Arg::with_name("no-json") .short("n") .long("no-json") .value_name("NOJSON") .help("don't log via json") .takes_value(false), ) .arg( Arg::with_name("peers") .short("p") .long("peers") .value_name("PEERS") .multiple(true) .takes_value(true) .help("Peers to connet to"), ) .arg( Arg::with_name("endpoint") .short("e") .long("endpoint") .value_name("ENDPOINT") .takes_value(true) .default_value("127.0.0.1:8080") .help("Peers to connet to"), ) .get_matches(); let logger = if matches.is_present("no-json") { let decorator = slog_term::TermDecorator::new().build(); let drain = slog_term::FullFormat::new(decorator).build().fuse(); let drain = slog_async::Async::new(drain).build().fuse(); slog::Logger::root(drain, o!()) } else { let drain = slog_json::Json::default(std::io::stderr()).map(slog::Fuse); let drain = slog_async::Async::new(drain).build().fuse(); slog::Logger::root(drain, o!()) }; let peers = matches.values_of_lossy("peers").unwrap_or(vec![]); let ring_size: Option<u64> = matches.value_of("ring-size").map(|s| s.parse().unwrap()); let bootstrap = matches.is_present("bootstrap"); let endpoint = matches.value_of("endpoint").unwrap_or("127.0.0.1:8080"); let id = NodeId(matches.value_of("id").unwrap_or("1").parse().unwrap()); let loop_logger = logger.clone(); let rest_endpoint = matches.value_of("http-endpoint"); let ps_tx = pubsub::start(&logger); let network = ws::Network::new(&logger, id, endpoint, rest_endpoint, peers, ps_tx.clone()); task::block_on(raft_loop( id, bootstrap, ring_size, ps_tx, network, loop_logger, )); Ok(()) }
#![recursion_limit = "2048"] mod codec; #[allow(unused)] pub mod errors; pub mod network; mod protocol; mod pubsub; pub mod raft_node; pub mod service; pub mod storage; pub mod version; use crate::network::{ws, Network, RaftNetworkMsg}; use crate::raft_node::*; use crate::service::mring::{self, placement::continuous}; use crate::service::{kv, Service}; use crate::storage::URRocksStorage; use async_std::task; use clap::{App as ClApp, Arg}; use futures::{select, FutureExt, StreamExt}; use serde_derive::{Deserialize, Serialize}; use slog::{Drain, Logger}; use slog_json; use std::time::{Duration, Instant}; pub use uring_common::*; use ws_proto::PSURing; const CHANNEL_SIZE: usize = 64usize; #[macro_use] extern crate slog; #[derive(Deserialize, Serialize)] pub struct KV { key: String, value: serde_json::Value, } #[derive(Deserialize, Serialize, Debug)] pub struct Event { nid: Option<NodeId>, eid: EventId, sid: ServiceId, data: Vec<u8>, } #[derive(Deserialize, Serialize)] pub struct KVs { scope: u16, key: Vec<u8>, value: Vec<u8>, } async fn raft_loop<N: Network>( id: NodeId, bootstrap: bool, ring_size: Option<u64>, pubsub: pubsub::Channel, network: N, logger: Logger, ) where N: 'static, { let mut node: RaftNode<URRocksStorage, _> = if bootstrap { RaftNode::create_raft_leader(&logger, id, pubsub, network).await } else { RaftNode::create_raft_follower(&logger, id, pubsub, network).await }; node.set_raft_tick_duration(Duration::from_millis(100)); node.log().await; let kv = kv::Service::new(&logger, 0); node.add_service(kv::ID, Box::new(kv)); let mut vnode: mring::Service<continuous::Strategy> = mring::Service::new(); if let Some(size) = ring_size { if bootstrap { vnode .execute( node.raft_group.as_ref().unwrap(), &mut node.pubsub, service::mring::Event::set_size(size), ) .await .unwrap(); } } node.add_service(mring::ID, Box::new(vnode)); let version = crate::service::version::Service::new(&logger); node.add_service(crate::service::version::ID, Box::new(version)); let status = crate::service::status::Service::new(&logger); let status = Box::new(status); node.add_service(service::status::ID, status); node.node_loop().await.unwrap() } fn main() -> std::io::Result<()> { use version::VERSION; let matches = ClApp::new("cake") .version(VERSION) .author("The Treamor Team") .about("Uring Demo") .arg( Arg::with_name("id") .short("i") .long("id") .value_name("ID")
_tx = pubsub::start(&logger); let network = ws::Network::new(&logger, id, endpoint, rest_endpoint, peers, ps_tx.clone()); task::block_on(raft_loop( id, bootstrap, ring_size, ps_tx, network, loop_logger, )); Ok(()) }
.help("The Node ID") .takes_value(true), ) .arg( Arg::with_name("bootstrap") .short("b") .long("bootstrap") .value_name("BOOTSTRAP") .help("Sets the node to bootstrap mode and become leader") .takes_value(false), ) .arg( Arg::with_name("ring-size") .short("r") .long("ring-size") .value_name("RING_SIZE") .help("Initialized mring size, only has an effect when used together with --bootstrap") .takes_value(true), ) .arg( Arg::with_name("http-endpoint") .long("http") .value_name("HTTP") .help("http endpoint to listen to") .takes_value(true), ) .arg( Arg::with_name("no-json") .short("n") .long("no-json") .value_name("NOJSON") .help("don't log via json") .takes_value(false), ) .arg( Arg::with_name("peers") .short("p") .long("peers") .value_name("PEERS") .multiple(true) .takes_value(true) .help("Peers to connet to"), ) .arg( Arg::with_name("endpoint") .short("e") .long("endpoint") .value_name("ENDPOINT") .takes_value(true) .default_value("127.0.0.1:8080") .help("Peers to connet to"), ) .get_matches(); let logger = if matches.is_present("no-json") { let decorator = slog_term::TermDecorator::new().build(); let drain = slog_term::FullFormat::new(decorator).build().fuse(); let drain = slog_async::Async::new(drain).build().fuse(); slog::Logger::root(drain, o!()) } else { let drain = slog_json::Json::default(std::io::stderr()).map(slog::Fuse); let drain = slog_async::Async::new(drain).build().fuse(); slog::Logger::root(drain, o!()) }; let peers = matches.values_of_lossy("peers").unwrap_or(vec![]); let ring_size: Option<u64> = matches.value_of("ring-size").map(|s| s.parse().unwrap()); let bootstrap = matches.is_present("bootstrap"); let endpoint = matches.value_of("endpoint").unwrap_or("127.0.0.1:8080"); let id = NodeId(matches.value_of("id").unwrap_or("1").parse().unwrap()); let loop_logger = logger.clone(); let rest_endpoint = matches.value_of("http-endpoint"); let ps
function_block-random_span
[ { "content": "pub fn log(logger: &Logger) {\n\n info!(logger, \"uring version: {}\", VERSION);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn for_coverage_only() {\n\n print();\n\n log(&slog::Logger::root(slog::Discard, o!()));\n\n }\n\n}\n", "file_path": "src/version.rs", "rank": 0, "score": 181433.25458372006 }, { "content": "fn make_data_key(prefix: u16, key_s: &[u8]) -> Vec<u8> {\n\n let mut key = vec![0; 8 + key_s.len()];\n\n\n\n {\n\n key.put_u32_le(DATA_PREFIX as u32);\n\n key.put_u32_le(prefix as u32);\n\n key.write_all(key_s).unwrap();\n\n }\n\n\n\n key\n\n}\n", "file_path": "src/storage.rs", "rank": 1, "score": 156784.0339858317 }, { "content": "fn main() {\n\n let decorator = slog_term::TermDecorator::new().build();\n\n let drain = slog_term::FullFormat::new(decorator).build().fuse();\n\n let drain = slog_async::Async::new(drain).build().fuse();\n\n let logger = slog::Logger::root(drain, o!());\n\n\n\n let (tasks_tx, tasks_rx) = channel(crate::CHANNEL_SIZE);\n\n\n\n let local = env::args()\n\n .nth(1)\n\n .unwrap_or_else(|| panic!(\"this program requires at least two arguments\"));\n\n\n\n // Specify the server address to which the client will be connecting.\n\n let remote = env::args()\n\n .nth(2)\n\n .unwrap_or_else(|| panic!(\"this program requires at least two argument\"));\n\n\n\n task::spawn(vnode::run(\n\n logger.clone(),\n\n local.clone(),\n", "file_path": "mring-node/src/main.rs", "rank": 2, "score": 149034.61756042228 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]\n\nstruct VNode {\n\n id: u64,\n\n handoff: Option<handoff::Handoff>,\n\n data: Vec<String>,\n\n}\n\n\n\npub(crate) enum Task {\n\n HandoffOut {\n\n target: String,\n\n vnode: u64,\n\n },\n\n Assign {\n\n vnodes: Vec<u64>,\n\n },\n\n Update {\n\n next: MRingNodes,\n\n },\n\n HandoffInStart {\n\n src: String,\n\n vnode: u64,\n", "file_path": "mring-node/src/vnode.rs", "rank": 3, "score": 145415.0411650187 }, { "content": "fn param_err<T: std::fmt::Debug>(e: ParamError<T>) -> Error {\n\n Error::Param(format!(\"{:?}\", e))\n\n}\n\n\n\npub(crate) async fn get(cx: Request<Node>) -> Result<Response> {\n\n let (tx, rx) = channel(crate::CHANNEL_SIZE);\n\n let key: String = cx.param(\"id\").map_err(param_err)?;\n\n let id = key.clone().into_bytes();\n\n info!(cx.state().logger, \"GET /kv/{}\", key);\n\n request(cx, UrMsg::Get(id.clone(), reply(tx)), rx).await\n\n}\n\n\n", "file_path": "src/network/ws/rest/kv.rs", "rank": 4, "score": 145095.14555913673 }, { "content": "pub fn print() {\n\n eprintln!(\"uring version: {}\", VERSION);\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 5, "score": 136972.91926858787 }, { "content": "fn param_err<T: std::fmt::Debug>(e: ParamError<T>) -> Error {\n\n Error::Param(format!(\"{:?}\", e))\n\n}\n\nasync fn uring_get(cx: Request<Node>) -> Result<Response> {\n\n let (tx, mut rx) = channel(crate::CHANNEL_SIZE);\n\n let id: u64 = cx.param(\"id\").map_err(param_err)?;\n\n cx.state()\n\n .tx\n\n .unbounded_send(UrMsg::GetNode(NodeId(id), tx))?;\n\n\n\n rx.next()\n\n .await\n\n .ok_or(StatusCode::NOT_FOUND.into())\n\n .and_then(response_json_200)\n\n}\n\n\n\nasync fn uring_post(cx: Request<Node>) -> Result<Response> {\n\n let (tx, mut rx) = channel(crate::CHANNEL_SIZE);\n\n let id: u64 = cx.param(\"id\").map_err(param_err)?;\n\n cx.state()\n", "file_path": "src/network/ws/rest.rs", "rank": 6, "score": 125868.76915773288 }, { "content": "#[derive(Serialize, Deserialize, Clone, Debug)]\n\nstruct HandoffKV {\n\n key: String,\n\n value: String,\n\n}\n\nimpl URRocksStorage {\n\n fn get_hard_state(&self) -> HardState {\n\n let mut hs = HardState::new();\n\n if let Ok(Some(data)) = self.backend.get(&HARD_STATE) {\n\n hs.merge_from_bytes(&data).unwrap();\n\n };\n\n hs\n\n }\n\n fn get_conf_state(&self) -> ConfState {\n\n let mut cs = ConfState::new();\n\n if let Ok(Some(data)) = self.backend.get(&CONF_STATE) {\n\n cs.merge_from_bytes(&data).unwrap();\n\n };\n\n cs\n\n }\n\n fn clear_log(&self) {\n", "file_path": "src/storage.rs", "rank": 7, "score": 122844.63147517857 }, { "content": "#[derive(Default)]\n\nstruct State {\n\n vnodes: HashMap<u64, VNode>,\n\n mappings: HashMap<u64, String>,\n\n}\n\n\n\nimpl State {\n\n pub fn update_ring(&mut self, mapping: MRingNodes) {\n\n for node in mapping.into_iter() {\n\n for vnode in &node.vnodes {\n\n self.mappings.insert(*vnode, node.id.clone());\n\n }\n\n }\n\n }\n\n}\n\n\n\nasync fn handle_cmd(\n\n logger: &Logger,\n\n cmd: Option<Cmd>,\n\n state: &mut State,\n\n tasks_tx: &mut Sender<Task>,\n", "file_path": "mring-node/src/vnode.rs", "rank": 8, "score": 117676.7721012773 }, { "content": "fn response_json<S: Serialize>(c: u16, v: S) -> Result<Response> {\n\n let mut r = Response::new(c);\n\n r.set_body(serde_json::to_vec(&v)?);\n\n Ok(r)\n\n}\n\n\n", "file_path": "src/network/ws/rest.rs", "rank": 9, "score": 113529.91581177052 }, { "content": "// The message can be used to initialize a raft node or not.\n\nfn is_initial_msg(msg: &Message) -> bool {\n\n let msg_type = msg.get_msg_type();\n\n msg_type == MessageType::MsgRequestVote\n\n || msg_type == MessageType::MsgRequestPreVote\n\n || (msg_type == MessageType::MsgHeartbeat && msg.commit == 0)\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]\n\npub struct RaftNodeStatus {\n\n id: u64,\n\n role: String,\n\n promotable: bool,\n\n pass_election_timeout: bool,\n\n election_elapsed: usize,\n\n randomized_election_timeout: usize,\n\n term: u64,\n\n last_index: u64,\n\n}\n\n\n\n// unsafe impl Send for RaftNodeStatus {}\n", "file_path": "src/raft_node.rs", "rank": 10, "score": 111041.04556732281 }, { "content": "#[async_trait]\n\npub trait Service<Storage>: Send + Sync\n\nwhere\n\n Storage: storage::Storage + Send + Sync,\n\n{\n\n async fn execute(\n\n &mut self,\n\n node: &Mutex<RawNode<Storage>>,\n\n pubsub: &mut pubsub::Channel,\n\n event: Vec<u8>,\n\n ) -> Result<(u16, Vec<u8>), Error>;\n\n fn is_local(&self, event: &[u8]) -> Result<bool, Error>;\n\n}\n", "file_path": "src/service.rs", "rank": 11, "score": 109970.30179586282 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct PostBody {\n\n value: String,\n\n}\n\n\n\npub(crate) async fn post(mut cx: Request<Node>) -> Result<Response> {\n\n let (tx, rx) = channel(crate::CHANNEL_SIZE);\n\n let key: String = cx.param(\"id\").map_err(param_err)?;\n\n let id = key.clone().into_bytes();\n\n let body: PostBody = cx.body_json().await?;\n\n info!(cx.state().logger, \"POST /kv/{} -> {}\", key, body.value);\n\n request(\n\n cx,\n\n UrMsg::Put(id, body.value.clone().into_bytes(), reply(tx)),\n\n rx,\n\n )\n\n .await\n\n}\n\n\n", "file_path": "src/network/ws/rest/kv.rs", "rank": 12, "score": 109441.71351090731 }, { "content": "#[derive(Deserialize)]\n\nstruct CasBody {\n\n check: Option<String>,\n\n store: String,\n\n}\n\n\n\npub(crate) async fn cas(mut cx: Request<Node>) -> Result<Response> {\n\n let (tx, rx) = channel(crate::CHANNEL_SIZE);\n\n let id: String = cx.param(\"id\").map_err(param_err)?;\n\n let id = id.into_bytes();\n\n let body: CasBody = cx.body_json().await?;\n\n\n\n request(\n\n cx,\n\n UrMsg::Cas(\n\n id,\n\n body.check.clone().map(String::into_bytes),\n\n body.store.clone().into_bytes(),\n\n reply(tx),\n\n ),\n\n rx,\n", "file_path": "src/network/ws/rest/kv.rs", "rank": 13, "score": 109437.40732746993 }, { "content": "#[async_trait]\n\npub trait Network: Send + Sync {\n\n async fn next(&mut self) -> Option<RaftNetworkMsg>;\n\n async fn ack_proposal(\n\n &mut self,\n\n to: NodeId,\n\n pid: ProposalId,\n\n success: bool,\n\n ) -> Result<(), Error>;\n\n async fn event_reply(&mut self, id: EventId, code: u16, reply: Vec<u8>) -> Result<(), Error>;\n\n async fn send_msg(&mut self, msg: RaftMessage) -> Result<(), Error>;\n\n fn connections(&self) -> Vec<NodeId>;\n\n async fn forward_proposal(\n\n &mut self,\n\n from: NodeId,\n\n to: NodeId,\n\n pid: ProposalId,\n\n sid: ServiceId,\n\n eid: EventId,\n\n data: Vec<u8>,\n\n ) -> Result<(), Error>;\n", "file_path": "src/network.rs", "rank": 15, "score": 97287.01968989123 }, { "content": "fn make_log_key(idx: u64) -> Vec<u8> {\n\n let mut key: Vec<u8> = vec![0; 16];\n\n\n\n {\n\n // let mut key = Cursor::new(&mut key[..]);\n\n key.put_u64_le(RAFT_PREFIX as u64);\n\n key.put_u64_le(idx);\n\n }\n\n\n\n key\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 16, "score": 93524.81737801738 }, { "content": "#[async_trait]\n\npub trait WriteStorage {\n\n async fn append(&self, entries: &[Entry]) -> RaftResult<()>;\n\n async fn apply_snapshot(&mut self, snapshot: Snapshot) -> RaftResult<()>;\n\n async fn set_conf_state(&mut self, cs: ConfState) -> RaftResult<()>;\n\n async fn set_hard_state(&mut self, commit: u64, term: u64) -> RaftResult<()>;\n\n async fn get(&self, scope: u16, key: &[u8]) -> Option<Vec<u8>>;\n\n async fn put(&self, keyscope: u16, key: &[u8], value: &[u8]);\n\n async fn cas(\n\n &self,\n\n keyscope: u16,\n\n key: &[u8],\n\n check_value: Option<&[u8]>,\n\n store_value: &[u8],\n\n ) -> Option<Option<Vec<u8>>>;\n\n async fn delete(&self, scope: u16, key: &[u8]) -> Option<Vec<u8>>;\n\n}\n\n\n\nuse rocksdb::{Direction, IteratorMode, WriteBatch, DB};\n\n\n\nconst CONF_STATE: &'static [u8; 16] = b\"\\0\\0\\0\\0\\0\\0\\0ConfState\";\n", "file_path": "src/storage.rs", "rank": 17, "score": 92269.84931064796 }, { "content": "#[async_trait]\n\npub trait Storage: WriteStorage + ReadStorage {\n\n async fn new_with_conf_state(id: NodeId, state: ConfState) -> Self;\n\n async fn new(id: NodeId) -> Self;\n\n}\n\n\n\n/// The missing storage trait from raft-rs ...\n", "file_path": "src/storage.rs", "rank": 18, "score": 89025.50758142347 }, { "content": "struct Connection {\n\n addr: SocketAddr,\n\n rx: Receiver<TungstenMessage>,\n\n tx: Sender<TungstenMessage>,\n\n tasks: Sender<vnode::Task>,\n\n vnode: Option<u64>,\n\n}\n\n\n\nasync fn handle_connection(logger: Logger, mut connection: Connection) {\n\n while let Some(msg) = connection.rx.next().await {\n\n info!(\n\n logger,\n\n \"Received a message from {}: {}\", connection.addr, msg\n\n );\n\n match serde_json::from_slice(&msg.into_data()) {\n\n Ok(Message::HandoffStart { src, vnode }) => {\n\n assert!(connection.vnode.is_none());\n\n connection.vnode = Some(vnode);\n\n connection\n\n .tasks\n", "file_path": "mring-node/src/handoff.rs", "rank": 19, "score": 85416.43550218601 }, { "content": "fn response_json_200<S: Serialize>(v: S) -> Result<Response> {\n\n response_json(200, v)\n\n}\n\n\n\nasync fn version(cx: Request<Node>) -> Result<Response> {\n\n let (tx, rx) = channel(crate::CHANNEL_SIZE);\n\n request(cx, UrMsg::Version(RequestId(666), tx), rx).await\n\n}\n\n\n\nasync fn status(cx: Request<Node>) -> Result<Response> {\n\n let (tx, rx) = channel(crate::CHANNEL_SIZE);\n\n request(cx, UrMsg::Status(RequestId(666), tx), rx).await\n\n}\n\n\n", "file_path": "src/network/ws/rest.rs", "rank": 20, "score": 84795.28465985243 }, { "content": "type RemoteMailboxes = HashMap<NodeId, Sender<WsMessage>>;\n\n\n\n#[derive(Clone)]\n\npub(crate) struct Node {\n\n id: NodeId,\n\n tx: UnboundedSender<UrMsg>,\n\n logger: Logger,\n\n pubsub: pubsub::Channel,\n\n}\n\n\n\npub struct Network {\n\n id: NodeId,\n\n local_mailboxes: LocalMailboxes,\n\n remote_mailboxes: RemoteMailboxes,\n\n known_peers: HashMap<NodeId, String>,\n\n endpoint: String,\n\n logger: Logger,\n\n rx: UnboundedReceiver<UrMsg>,\n\n tx: UnboundedSender<UrMsg>,\n\n next_eid: u64,\n", "file_path": "src/network/ws.rs", "rank": 21, "score": 83950.42806598268 }, { "content": "type LocalMailboxes = HashMap<NodeId, Sender<WsMessage>>;\n", "file_path": "src/network/ws.rs", "rank": 22, "score": 83950.42806598268 }, { "content": "pub trait Model {}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Clone, Debug)]\n\npub enum EventType {\n\n Hup,\n\n Beat,\n\n Propose,\n\n Append,\n\n AppendResponse,\n\n RequestVote,\n\n RequestVoteResponse,\n\n Snapshot,\n\n Heartbeat,\n\n HeartbeatResponse,\n\n Unreachable,\n\n SnapStatus,\n\n CheckQuorum,\n\n TransferLeader,\n\n TimeoutNow,\n\n ReadIndex,\n", "file_path": "src/codec/json.rs", "rank": 23, "score": 82348.73818422535 }, { "content": "#[async_trait]\n\npub trait Intercept {\n\n async fn inbound(&mut self, msg: HandlerInboundMessage) -> Reply;\n\n async fn outbound(&mut self, id: RequestId, data: Vec<u8>) -> Result<Vec<u8>, DriverErrorType> {\n\n if let Some(rid) = self.result_id_map(id) {\n\n let data = serde_json::from_slice(&data).unwrap();\n\n Ok(serde_json::to_vec(&JsonReply { rid, data }).unwrap())\n\n } else {\n\n Ok(data)\n\n }\n\n }\n\n\n\n fn result_id_map(&mut self, id: RequestId) -> Option<RequestId> {\n\n let _id = id;\n\n None\n\n }\n\n}\n\n\n\npub struct Interceptor<Handler>\n\nwhere\n\n Handler: Intercept,\n", "file_path": "protocol-driver/src/interceptor.rs", "rank": 24, "score": 79322.51818106374 }, { "content": "pub trait Placement {\n\n fn add_node(count: u64, current: MRingNodes, new: String) -> (MRingNodes, Relocations);\n\n fn remove_node(count: u64, current: MRingNodes, old: String) -> (MRingNodes, Relocations);\n\n fn new(count: u64, new: String) -> MRingNodes;\n\n fn name() -> String;\n\n}\n", "file_path": "src/service/mring/placement.rs", "rank": 25, "score": 79110.45753241846 }, { "content": "fn example_config() -> Config {\n\n Config {\n\n election_tick: 10,\n\n heartbeat_tick: 3,\n\n pre_vote: true,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/raft_node.rs", "rank": 26, "score": 78401.36504080518 }, { "content": "type DriverOutboundData = Result<Vec<u8>, DriverError>;\n\n\n\npub type ClientId = u64;\n\npub type CorrelationId = u64;\n\n\n\npub struct ClientConnection {\n\n protocol: Protocol,\n\n enabled_protocols: Vec<CustomProtocol>,\n\n}\n\n\n\nimpl Default for ClientConnection {\n\n fn default() -> Self {\n\n ClientConnection {\n\n protocol: Protocol::Connect,\n\n enabled_protocols: vec![],\n\n }\n\n }\n\n}\n\n\n\n#[derive(Hash, PartialEq, Eq, Clone, Copy, Debug)]\n", "file_path": "protocol-driver/src/lib.rs", "rank": 27, "score": 73408.56706631446 }, { "content": " Ok(Request::Subscribe { channel: c }) => {\n\n let (tx, mut rx) = channel(64);\n\n let mut outbound = msg.outbound_channel;\n\n let id = msg.id;\n\n let data = serde_json::to_vec(&Reply::Subscribed { channel: c.clone() }).unwrap();\n\n outbound\n\n .send(HandlerOutboundMessage::partial(id, data))\n\n .await\n\n .unwrap();\n\n\n\n task::spawn(async move {\n\n while let Some(msg) = dbg!(rx.next().await) {\n\n let data = serde_json::to_vec(&msg).unwrap();\n\n outbound\n\n .send(HandlerOutboundMessage::partial(id, data))\n\n .await\n\n .unwrap();\n\n }\n\n });\n\n self.pubsub\n", "file_path": "src/protocol/pubsub.rs", "rank": 28, "score": 70502.01865647615 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::pubsub;\n\nuse async_std::task;\n\nuse async_trait::async_trait;\n\nuse futures::channel::mpsc::channel;\n\nuse futures::{SinkExt, StreamExt};\n\nuse protocol_driver::{\n\n interceptor, DriverErrorType, HandlerInboundMessage, HandlerOutboundMessage,\n\n};\n\nuse serde_derive::{Deserialize, Serialize};\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n", "file_path": "src/protocol/pubsub.rs", "rank": 29, "score": 70500.16048715138 }, { "content": " .send(pubsub::Msg::Subscribe { channel: c, tx })\n\n .await\n\n .unwrap();\n\n\n\n interceptor::Reply::Terminate\n\n }\n\n Err(_) => interceptor::Reply::Err(DriverErrorType::BadInput),\n\n }\n\n }\n\n}\n", "file_path": "src/protocol/pubsub.rs", "rank": 30, "score": 70491.40588908423 }, { "content": " use crate::pubsub;\n\n use crate::service::Service;\n\n use crate::storage;\n\n use crate::NodeId;\n\n use crate::RaftNode;\n\n use futures::channel::mpsc::{channel, Sender};\n\n use futures::executor::block_on;\n\n\n\n #[test]\n\n fn test_get_version() {\n\n let logger = &slog::Logger::root(slog::Discard, o!());\n\n let mut s = super::Service::new(&logger);\n\n let id = NodeId(42);\n\n let get = serde_json::to_vec(&Event::Get).ok().unwrap();\n\n let network = network::NullNetwork::default();\n\n\n\n let fake = channel(1);\n\n let (tx, _rx) = fake;\n\n let topic = Sender::<pubsub::Msg>::from(tx);\n\n\n", "file_path": "src/service/version.rs", "rank": 31, "score": 70284.92494054418 }, { "content": "}\n\n\n\nimpl Event {\n\n pub fn get() -> Vec<u8> {\n\n serde_json::to_vec(&Event::Get).unwrap()\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<Storage> super::Service<Storage> for Service\n\nwhere\n\n Storage: storage::Storage + Send + Sync + 'static,\n\n{\n\n async fn execute(\n\n &mut self,\n\n _node: &Mutex<RawNode<Storage>>,\n\n _pubsub: &mut pubsub::Channel,\n\n event: Vec<u8>,\n\n ) -> Result<(u16, Vec<u8>), Error> {\n\n match serde_json::from_slice(&event) {\n", "file_path": "src/service/version.rs", "rank": 32, "score": 70284.42702950492 }, { "content": " block_on(async {\n\n let mut node: RaftNode<storage::NullStorage, _> =\n\n RaftNode::create_raft_leader(&logger, id, topic, network).await;\n\n let version = s\n\n .execute(node.raft_group.as_ref().unwrap(), &mut node.pubsub, get)\n\n .await\n\n .ok()\n\n .unwrap()\n\n .1;\n\n assert_eq!(\n\n format!(\"\\\"{}\\\"\", VERSION),\n\n String::from_utf8(version).ok().unwrap()\n\n );\n\n });\n\n }\n\n}\n", "file_path": "src/service/version.rs", "rank": 33, "score": 70282.78230997194 }, { "content": " Ok(Event::Get) => {\n\n debug!(self.logger, \"GET\",);\n\n Ok((\n\n 200,\n\n serde_json::to_vec(&serde_json::Value::String(VERSION.to_string())).unwrap(),\n\n ))\n\n }\n\n _ => Err(Error::UnknownEvent),\n\n }\n\n }\n\n\n\n fn is_local(&self, _event: &[u8]) -> Result<bool, Error> {\n\n Ok(true)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::network;\n", "file_path": "src/service/version.rs", "rank": 34, "score": 70281.44115926145 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\nuse slog::Logger;\n\n\n\npub const ID: ServiceId = ServiceId(2);\n\n\n\npub struct Service {\n\n logger: Logger,\n\n}\n\n\n\nimpl Service {\n\n pub fn new(logger: &Logger) -> Self {\n\n Self {\n\n logger: logger.clone(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum Event {\n\n Get,\n", "file_path": "src/service/version.rs", "rank": 35, "score": 70274.10852200609 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse super::*;\n\nuse crate::version::VERSION;\n\nuse crate::{storage, ServiceId};\n\nuse async_std::sync::Mutex;\n\nuse async_trait::async_trait;\n\nuse raft::RawNode;\n", "file_path": "src/service/version.rs", "rank": 36, "score": 70262.7083705622 }, { "content": "}\n\n\n\n#[derive(Default)]\n\npub struct Handler {\n\n ids: HashMap<RequestId, RequestId>,\n\n}\n\n\n\n#[async_trait]\n\nimpl interceptor::Intercept for Handler {\n\n async fn inbound(&mut self, mut msg: HandlerInboundMessage) -> interceptor::Reply {\n\n use kv::Event;\n\n msg.service_id = Some(kv::ID);\n\n msg.data = match dbg!(serde_json::from_slice(&msg.data)) {\n\n Ok(Request::Get { key, rid }) => {\n\n self.ids.insert(msg.id, rid);\n\n Event::get(key.into_bytes())\n\n }\n\n Ok(Request::Put { key, store, rid }) => {\n\n self.ids.insert(msg.id, rid);\n\n Event::put(key.into_bytes(), store.into_bytes())\n", "file_path": "src/protocol/kv.rs", "rank": 37, "score": 70239.84261528005 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::service::kv;\n\nuse async_trait::async_trait;\n\nuse protocol_driver::{interceptor, DriverErrorType, HandlerInboundMessage, RequestId};\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n", "file_path": "src/protocol/kv.rs", "rank": 38, "score": 70232.77458162814 }, { "content": " }\n\n Ok(Request::Delete { key, rid }) => {\n\n self.ids.insert(msg.id, rid);\n\n Event::delete(key.into_bytes())\n\n }\n\n Ok(Request::Cas {\n\n key,\n\n check,\n\n store,\n\n rid,\n\n }) => {\n\n self.ids.insert(msg.id, rid);\n\n kv::Event::cas(\n\n key.into_bytes(),\n\n check.map(String::into_bytes),\n\n store.into_bytes(),\n\n )\n\n }\n\n Err(_) => return interceptor::Reply::Err(DriverErrorType::BadInput),\n\n };\n", "file_path": "src/protocol/kv.rs", "rank": 39, "score": 70226.73364041484 }, { "content": " interceptor::Reply::Ok(msg)\n\n }\n\n fn result_id_map(&mut self, id: RequestId) -> Option<RequestId> {\n\n self.ids.remove(&id)\n\n }\n\n}\n\n\n\n/*\n\n{\"Connect\": [\"kv\", \"pubsub\"]}\n\n\n\n{\"Select\": \"pubsub\"}\n\n{\"Subscribe\": {\"channel\": \"kv\"}}\n\n\n\n{\"Select\": \"kv\"}\n\n{\"Put\": {\"key\": \"snot\", \"store\": \"badger\"}}\n\n{\"Get\": {\"key\": \"snot\"}}\n\n*/\n", "file_path": "src/protocol/kv.rs", "rank": 40, "score": 70226.09057569463 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\nuse slog::Logger;\n\n\n\npub const ID: ServiceId = ServiceId(0);\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub(crate) enum PSEvent {\n\n Put {\n\n scope: u16,\n\n key: String,\n\n new: String,\n\n old: Option<String>,\n\n },\n\n Cas {\n\n scope: u16,\n\n key: String,\n\n new: String,\n\n old: Option<String>,\n\n },\n\n CasConflict {\n", "file_path": "src/service/kv.rs", "rank": 41, "score": 70016.37755418572 }, { "content": " serde_json::to_vec(&Event::Delete { key }).unwrap()\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<Storage> super::Service<Storage> for Service\n\nwhere\n\n Storage: storage::Storage + Send + Sync + 'static,\n\n{\n\n async fn execute(\n\n &mut self,\n\n node: &Mutex<RawNode<Storage>>,\n\n pubsub: &mut pubsub::Channel,\n\n event: Vec<u8>,\n\n ) -> Result<(u16, Vec<u8>), Error> {\n\n let raft_node = node.try_lock().unwrap();\n\n let storage = raft_node.store();\n\n match serde_json::from_slice(&event) {\n\n Ok(Event::Get { key }) => {\n\n debug!(\n", "file_path": "src/service/kv.rs", "rank": 42, "score": 70016.21510379855 }, { "content": " );\n\n let old = storage\n\n .get(self.scope, &key)\n\n .await\n\n .and_then(|value| String::from_utf8(value).ok());\n\n storage.put(self.scope, &key, &value).await;\n\n let msg = serde_json::to_value(&PSEvent::Put {\n\n scope: self.scope,\n\n key: String::from_utf8(key).unwrap_or_default(),\n\n new: String::from_utf8(value).unwrap_or_default(),\n\n old: old.clone(),\n\n })\n\n .unwrap();\n\n pubsub\n\n .send(pubsub::Msg::Msg {\n\n channel: \"kv\".into(),\n\n msg: msg,\n\n })\n\n .await\n\n .unwrap();\n", "file_path": "src/service/kv.rs", "rank": 43, "score": 70005.89748680103 }, { "content": " self.logger,\n\n \"READ {:?}\",\n\n String::from_utf8(key.clone()).ok()\n\n );\n\n if let Some(s) = storage\n\n .get(self.scope, &key)\n\n .await\n\n .and_then(|v| String::from_utf8(v).ok())\n\n {\n\n Ok((200u16, serde_json::to_vec(&s).unwrap()))\n\n } else {\n\n Ok((404u16, serde_json::to_vec(&\"not found\").ok().unwrap()))\n\n }\n\n }\n\n Ok(Event::Put { key, value }) => {\n\n debug!(\n\n self.logger,\n\n \"WROTE {:?}: {:?}\",\n\n String::from_utf8(key.clone()).ok(),\n\n String::from_utf8(value.clone()).ok()\n", "file_path": "src/service/kv.rs", "rank": 44, "score": 70004.81359936662 }, { "content": " .unwrap();\n\n Ok((201, serde_json::to_vec(&\"set\").unwrap()))\n\n }\n\n }\n\n Ok(Event::Delete { key }) => Ok((\n\n 200u16,\n\n storage\n\n .delete(self.scope, &key)\n\n .await\n\n .and_then(|v| String::from_utf8(v).ok())\n\n .and_then(|s| serde_json::to_vec(&serde_json::Value::String(s)).ok())\n\n .unwrap(),\n\n )),\n\n _ => Err(Error::UnknownEvent),\n\n }\n\n }\n\n fn is_local(&self, event: &[u8]) -> Result<bool, Error> {\n\n match serde_json::from_slice(&event) {\n\n Ok(Event::Get { .. }) => Ok(true),\n\n Ok(Event::Put { .. }) => Ok(false),\n\n Ok(Event::Cas { .. }) => Ok(false),\n\n Ok(Event::Delete { .. }) => Ok(false),\n\n _ => Err(Error::UnknownEvent),\n\n }\n\n }\n\n}\n", "file_path": "src/service/kv.rs", "rank": 45, "score": 70002.67598590982 }, { "content": " ))\n\n } else {\n\n Ok((409, serde_json::to_vec(&serde_json::Value::Null).unwrap()))\n\n }\n\n } else {\n\n let old = check_value.and_then(|c| String::from_utf8(c).ok());\n\n let new = String::from_utf8(store_value).ok();\n\n let msg = serde_json::to_value(&PSEvent::Cas {\n\n scope: self.scope,\n\n key: String::from_utf8(key).unwrap_or_default(),\n\n new: new.clone().unwrap_or_default(),\n\n old,\n\n })\n\n .unwrap();\n\n pubsub\n\n .send(pubsub::Msg::Msg {\n\n channel: \"kv\".into(),\n\n msg: msg,\n\n })\n\n .await\n", "file_path": "src/service/kv.rs", "rank": 46, "score": 69998.86661173122 }, { "content": " {\n\n let conflict = conflict.and_then(|o| String::from_utf8(o).ok());\n\n let msg = serde_json::to_value(&PSEvent::CasConflict {\n\n scope: self.scope,\n\n key: String::from_utf8(key).unwrap_or_default(),\n\n new: String::from_utf8(store_value).unwrap_or_default(),\n\n conflict: conflict.clone(),\n\n })\n\n .unwrap();\n\n pubsub\n\n .send(pubsub::Msg::Msg {\n\n channel: \"kv\".into(),\n\n msg: msg,\n\n })\n\n .await\n\n .unwrap();\n\n if let Some(conflict) = conflict {\n\n Ok((\n\n 409,\n\n serde_json::to_vec(&serde_json::Value::String(conflict)).unwrap(),\n", "file_path": "src/service/kv.rs", "rank": 47, "score": 69998.7477664063 }, { "content": " scope: u16,\n\n key: String,\n\n new: String,\n\n conflict: Option<String>,\n\n },\n\n Delete {\n\n scope: u16,\n\n key: String,\n\n old: Option<String>,\n\n },\n\n}\n\npub struct Service {\n\n scope: u16,\n\n logger: Logger,\n\n}\n\n\n\nimpl Service {\n\n pub fn new(logger: &Logger, scope: u16) -> Self {\n\n Self {\n\n scope,\n", "file_path": "src/service/kv.rs", "rank": 48, "score": 69998.70798161671 }, { "content": " logger: logger.clone(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum Event {\n\n Get {\n\n key: Vec<u8>,\n\n },\n\n Put {\n\n key: Vec<u8>,\n\n value: Vec<u8>,\n\n },\n\n Cas {\n\n key: Vec<u8>,\n\n check_value: Option<Vec<u8>>,\n\n store_value: Vec<u8>,\n\n },\n\n Delete {\n", "file_path": "src/service/kv.rs", "rank": 49, "score": 69992.23951913981 }, { "content": "\n\n if let Some(old) = old {\n\n Ok((201, serde_json::to_vec(&old).unwrap()))\n\n } else {\n\n Ok((201, serde_json::to_vec(&serde_json::Value::Null).unwrap()))\n\n }\n\n }\n\n Ok(Event::Cas {\n\n key,\n\n check_value,\n\n store_value,\n\n }) => {\n\n if let Some(conflict) = storage\n\n .cas(\n\n self.scope,\n\n &key,\n\n check_value.as_ref().map(|v| v.as_slice()),\n\n &store_value,\n\n )\n\n .await\n", "file_path": "src/service/kv.rs", "rank": 50, "score": 69991.76281612825 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse super::*;\n\nuse crate::{pubsub, storage, ServiceId};\n\nuse async_std::sync::Mutex;\n\nuse async_trait::async_trait;\n\nuse futures::SinkExt;\n\nuse raft::RawNode;\n", "file_path": "src/service/kv.rs", "rank": 51, "score": 69990.64409512808 }, { "content": " key: Vec<u8>,\n\n },\n\n}\n\n\n\nimpl Event {\n\n pub fn get(key: Vec<u8>) -> Vec<u8> {\n\n serde_json::to_vec(&Event::Get { key }).unwrap()\n\n }\n\n pub fn put(key: Vec<u8>, value: Vec<u8>) -> Vec<u8> {\n\n serde_json::to_vec(&Event::Put { key, value }).unwrap()\n\n }\n\n pub fn cas(key: Vec<u8>, check_value: Option<Vec<u8>>, store_value: Vec<u8>) -> Vec<u8> {\n\n serde_json::to_vec(&Event::Cas {\n\n key,\n\n check_value,\n\n store_value,\n\n })\n\n .unwrap()\n\n }\n\n pub fn delete(key: Vec<u8>) -> Vec<u8> {\n", "file_path": "src/service/kv.rs", "rank": 52, "score": 69988.83512321199 }, { "content": "pub(crate) struct Handler {\n\n network: UnboundedSender<UrMsg>,\n\n}\n\nimpl Handler {\n\n pub fn new(network: UnboundedSender<UrMsg>) -> Self {\n\n Self { network }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl interceptor::Intercept for Handler {\n\n async fn inbound(&mut self, msg: HandlerInboundMessage) -> interceptor::Reply {\n\n if let Some(service_id) = msg.service_id {\n\n if self\n\n .network\n\n .send(UrMsg::Protocol(ProtocolMessage::Event {\n\n id: msg.id,\n\n service_id,\n\n event: msg.data,\n\n reply: msg.outbound_channel,\n", "file_path": "src/protocol/network.rs", "rank": 53, "score": 69215.31487341243 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::network::ws::{ProtocolMessage, UrMsg};\n\nuse async_trait::async_trait;\n\nuse futures::channel::mpsc::UnboundedSender;\n\nuse futures::SinkExt;\n\nuse protocol_driver::{interceptor, DriverErrorType, HandlerInboundMessage};\n\n\n", "file_path": "src/protocol/network.rs", "rank": 54, "score": 69200.65843097266 }, { "content": " }))\n\n .await\n\n .is_ok()\n\n {\n\n interceptor::Reply::Terminate\n\n } else {\n\n interceptor::Reply::Err(DriverErrorType::SystemError)\n\n }\n\n } else {\n\n interceptor::Reply::Err(DriverErrorType::LogicalError)\n\n }\n\n }\n\n}\n", "file_path": "src/protocol/network.rs", "rank": 55, "score": 69190.038523311 }, { "content": "#[derive(Deserialize, Serialize, Debug)]\n\nenum Request {\n\n Subscribe { channel: String },\n\n}\n\n\n", "file_path": "src/protocol/pubsub.rs", "rank": 56, "score": 67164.98749936814 }, { "content": "#[derive(Deserialize, Serialize, Debug)]\n\nenum Reply {\n\n Subscribed { channel: String },\n\n}\n\n\n\n// FIXME: collect dead destinations\n\n// FIXME: guarantee unique ids\n\npub struct Handler {\n\n pubsub: pubsub::Channel,\n\n}\n\n\n\nimpl Handler {\n\n pub fn new(pubsub: pubsub::Channel) -> Self {\n\n Self { pubsub }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl interceptor::Intercept for Handler {\n\n async fn inbound(&mut self, msg: HandlerInboundMessage) -> interceptor::Reply {\n\n match dbg!(serde_json::from_slice(&msg.data)) {\n", "file_path": "src/protocol/pubsub.rs", "rank": 57, "score": 67164.98749936814 }, { "content": "#[derive(Deserialize, Serialize, Debug)]\n\nenum Request {\n\n Get {\n\n key: String,\n\n rid: RequestId,\n\n },\n\n Put {\n\n key: String,\n\n store: String,\n\n rid: RequestId,\n\n },\n\n Delete {\n\n key: String,\n\n rid: RequestId,\n\n },\n\n Cas {\n\n key: String,\n\n check: Option<String>,\n\n store: String,\n\n rid: RequestId,\n\n },\n", "file_path": "src/protocol/kv.rs", "rank": 58, "score": 66906.17564161558 }, { "content": "#[cfg(not(feature = \"json-proto\"))]\n\nfn encode_ws(msg: RaftMessage) -> Bytes {\n\n use protobuf::Message;\n\n msg.write_to_bytes().unwrap().into()\n\n}\n\n\n\nimpl Network {\n\n pub fn new(\n\n logger: &Logger,\n\n id: NodeId,\n\n ws_endpoint: &str,\n\n rest_endpoint: Option<&str>,\n\n peers: Vec<String>,\n\n pubsub: pubsub::Channel,\n\n ) -> Self {\n\n let (tx, rx) = unbounded();\n\n\n\n for peer in peers {\n\n let logger = logger.clone();\n\n let tx = tx.clone();\n\n task::spawn(client::remote_endpoint(peer, tx, logger));\n", "file_path": "src/network/ws.rs", "rank": 59, "score": 66767.3142796004 }, { "content": "#[cfg(not(feature = \"json-proto\"))]\n\nfn decode_ws(bin: &[u8]) -> RaftMessage {\n\n use protobuf::Message;\n\n let mut msg = RaftMessage::default();\n\n msg.merge_from_bytes(bin).unwrap();\n\n msg\n\n}\n\n\n", "file_path": "src/network/ws.rs", "rank": 60, "score": 66767.3142796004 }, { "content": " error!(logger, \"no handoff in progress for data vnode {}\", vnode)\n\n }\n\n },\n\n Some(Task::HandoffInEnd { vnode }) => {\n\n if let Some(node) = state.vnodes.get_mut(&vnode) {\n\n if let Some(ref mut handoff) = &mut node.handoff {\n\n assert_eq!(handoff.direction, handoff::Direction::Inbound);\n\n } else {\n\n error!(logger, \"no handoff in progress for data vnode {}\", vnode)\n\n }\n\n node.handoff = None;\n\n node.data.push(id.to_string());\n\n } else {\n\n error!(logger, \"no handoff in progress for data vnode {}\", vnode)\n\n }\n\n }\n\n Some(Task::Assign{vnodes: ids}) => {\n\n info!(logger, \"Initializing with {:?}\", ids);\n\n let my_id = &id;\n\n for id in ids {\n", "file_path": "mring-node/src/vnode.rs", "rank": 61, "score": 66542.97549836323 }, { "content": " state.vnodes.insert(\n\n id,\n\n VNode {\n\n handoff: None,\n\n id,\n\n data: vec![my_id.to_string()],\n\n },\n\n );\n\n }\n\n }\n\n None => return false,\n\n },\n\n complete => return false,\n\n default => ()\n\n }\n\n true\n\n}\n\n\n\npub(crate) async fn run(\n\n logger: Logger,\n", "file_path": "mring-node/src/vnode.rs", "rank": 62, "score": 66542.38368253647 }, { "content": ") {\n\n match cmd {\n\n Some(Cmd::GetHandoffData {\n\n vnode,\n\n chunk,\n\n mut reply,\n\n }) => {\n\n if let Some(vnode) = state.vnodes.get_mut(&vnode) {\n\n if let Some(ref mut handoff) = vnode.handoff {\n\n assert_eq!(chunk, handoff.chunk);\n\n assert_eq!(handoff.direction, handoff::Direction::Outbound);\n\n if let Some(data) = vnode.data.get(chunk as usize) {\n\n reply.send((chunk, vec![data.clone()])).await.unwrap();\n\n } else {\n\n reply.send((chunk, vec![])).await.unwrap();\n\n };\n\n handoff.chunk += 1;\n\n } else {\n\n info!(logger, \"Not in a migraiton\");\n\n }\n", "file_path": "mring-node/src/vnode.rs", "rank": 63, "score": 66538.26760691272 }, { "content": " Some(Cmd::FinishHandoff { vnode }) => {\n\n let v = state.vnodes.remove(&vnode).unwrap();\n\n let m = v.handoff.unwrap();\n\n assert_eq!(m.direction, handoff::Direction::Outbound);\n\n }\n\n None => (),\n\n }\n\n}\n\n\n\nasync fn handle_tick(\n\n logger: &Logger,\n\n id: &str,\n\n state: &mut State,\n\n tasks: &mut Receiver<Task>,\n\n cnc_tx: &Sender<Cmd>,\n\n) -> bool {\n\n select! {\n\n task = tasks.next() =>\n\n match task {\n\n Some(Task::Update{next}) => {\n", "file_path": "mring-node/src/vnode.rs", "rank": 64, "score": 66536.89036769941 }, { "content": " Some(Task::HandoffInStart { vnode, src }) => {\n\n state.vnodes.remove(&vnode);\n\n state.vnodes.insert(vnode, VNode{id: vnode, data: vec![], handoff: Some(handoff::Handoff{partner: src, chunk: 0, direction: handoff::Direction::Inbound})});\n\n }\n\n Some(Task::HandoffIn { mut data, vnode, chunk }) => {\n\n info!(\n\n logger,\n\n \"accepting vnode {} with: {:?}\", vnode, data\n\n );\n\n if let Some(node) = state.vnodes.get_mut(&vnode) {\n\n if let Some(ref mut handoff) = &mut node.handoff {\n\n assert_eq!(handoff.chunk, chunk);\n\n assert_eq!(handoff.direction, handoff::Direction::Inbound);\n\n node.data.append(&mut data);\n\n handoff.chunk = chunk + 1;\n\n } else {\n\n error!(logger, \"no handoff in progress for data vnode {}\", vnode)\n\n\n\n }\n\n } else {\n", "file_path": "mring-node/src/vnode.rs", "rank": 65, "score": 66535.71973127361 }, { "content": " state.update_ring(next);\n\n }\n\n Some(Task::HandoffOut { target, vnode }) => {\n\n if let Some(vnode) = state.vnodes.get_mut(&vnode){\n\n info!(\n\n logger,\n\n \"relocating vnode {} to node {}\", vnode.id, target\n\n );\n\n assert!(vnode.handoff.is_none());\n\n vnode.handoff = Some(handoff::Handoff {\n\n partner: target.clone(),\n\n chunk: 0,\n\n direction: handoff::Direction::Outbound\n\n\n\n });\n\n if let Ok(worker) = handoff::Worker::new(logger.clone(), id.to_string(), target, vnode.id, cnc_tx.clone()).await {\n\n task::spawn(worker.handoff());\n\n }\n\n }\n\n },\n", "file_path": "mring-node/src/vnode.rs", "rank": 66, "score": 66532.89560234584 }, { "content": " } else {\n\n info!(logger, \"Unknown vnode\");\n\n }\n\n }\n\n Some(Cmd::CancelHandoff { vnode, target }) => {\n\n if let Some(node) = state.vnodes.get_mut(&vnode) {\n\n assert!(node.handoff.is_some());\n\n node.handoff = None;\n\n warn!(\n\n logger,\n\n \"Canceling handoff of vnode {} to {} - requeueing to restart\", vnode, target\n\n );\n\n tasks_tx\n\n .send(Task::HandoffOut { target, vnode })\n\n .await\n\n .unwrap();\n\n } else {\n\n info!(logger, \"Unknown vnode\");\n\n }\n\n }\n", "file_path": "mring-node/src/vnode.rs", "rank": 67, "score": 66532.57541049605 }, { "content": " id: String,\n\n mut tasks: Receiver<Task>,\n\n mut tasks_tx: Sender<Task>,\n\n) {\n\n let mut state = State::default();\n\n\n\n let (cnc_tx, mut cnc_rx) = channel(crate::CHANNEL_SIZE);\n\n\n\n let mut ticks = async_std::stream::interval(Duration::from_secs(1));\n\n loop {\n\n select! {\n\n cmd = cnc_rx.next() => handle_cmd(&logger, cmd, &mut state, &mut tasks_tx).await,\n\n tick = ticks.next().fuse() => if ! handle_tick(&logger, &id, &mut state, &mut tasks, &cnc_tx).await {\n\n break\n\n },\n\n }\n\n }\n\n}\n", "file_path": "mring-node/src/vnode.rs", "rank": 68, "score": 66532.56641786419 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::handoff;\n\nuse async_std::task;\n\nuse futures::channel::mpsc::{channel, Receiver, Sender};\n\nuse futures::{select, FutureExt, SinkExt, StreamExt};\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse slog::Logger;\n\nuse std::collections::HashMap;\n\nuse std::time::Duration;\n\nuse uring_common::MRingNodes;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]\n", "file_path": "mring-node/src/vnode.rs", "rank": 69, "score": 66531.31169318306 }, { "content": " },\n\n HandoffIn {\n\n vnode: u64,\n\n chunk: u64,\n\n data: Vec<String>,\n\n },\n\n HandoffInEnd {\n\n vnode: u64,\n\n },\n\n}\n\n\n\npub(crate) enum Cmd {\n\n GetHandoffData {\n\n vnode: u64,\n\n chunk: u64,\n\n reply: Sender<(u64, Vec<String>)>,\n\n },\n\n FinishHandoff {\n\n vnode: u64,\n\n },\n\n CancelHandoff {\n\n vnode: u64,\n\n target: String,\n\n },\n\n}\n\n\n", "file_path": "mring-node/src/vnode.rs", "rank": 70, "score": 66530.0778305325 }, { "content": "use async_std::task;\n\nuse futures::channel::mpsc::channel;\n\nuse slog::Drain;\n\nuse std::env;\n\n\n\nconst CHANNEL_SIZE: usize = 64usize;\n\n\n\n#[macro_use]\n\nextern crate slog;\n\n\n", "file_path": "mring-node/src/main.rs", "rank": 71, "score": 66498.39083725939 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n#![recursion_limit = \"2048\"]\n\n\n\nmod handoff;\n\nmod uring;\n\nmod vnode;\n\n\n", "file_path": "mring-node/src/main.rs", "rank": 72, "score": 66486.08198646472 }, { "content": " tasks_rx,\n\n tasks_tx.clone(),\n\n ));\n\n\n\n task::spawn(handoff::listener(\n\n logger.clone(),\n\n local.to_string(),\n\n tasks_tx.clone(),\n\n ));\n\n\n\n task::block_on(uring::run(logger, local, remote, tasks_tx))\n\n}\n", "file_path": "mring-node/src/main.rs", "rank": 73, "score": 66482.00043188705 }, { "content": "fn unerror(r: Result<Response>) -> tide::Result {\n\n Ok(match r {\n\n Ok(r) => r,\n\n Err(e) => e.into(),\n\n })\n\n}\n\n\n", "file_path": "src/network/ws/rest.rs", "rank": 74, "score": 64074.81361360657 }, { "content": "fn reply(tx: Sender<WsMessage>) -> Reply {\n\n Reply(RequestId(666), tx)\n\n}\n\n\n\nasync fn request(cx: Request<Node>, req: UrMsg, mut rx: Receiver<WsMessage>) -> Result<Response> {\n\n cx.state().tx.unbounded_send(req)?;\n\n rx.next()\n\n .await\n\n .ok_or(StatusCode::INTERNAL_SERVER_ERROR.into())\n\n .and_then(|msg| match msg {\n\n WsMessage::Reply(code, r) => response_json(code, r.data),\n\n _ => unreachable!(),\n\n })\n\n}\n\n\n", "file_path": "src/network/ws/rest.rs", "rank": 75, "score": 63296.231481508745 }, { "content": " )\n\n .await\n\n}\n\n\n\npub(crate) async fn delete(cx: Request<Node>) -> Result<Response> {\n\n let (tx, rx) = channel(crate::CHANNEL_SIZE);\n\n let key: String = cx.param(\"id\").map_err(param_err)?;\n\n let id = key.clone().into_bytes();\n\n request(cx, UrMsg::Delete(id.clone(), reply(tx)), rx).await\n\n}\n", "file_path": "src/network/ws/rest/kv.rs", "rank": 76, "score": 63190.42904389448 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n// use crate::{NodeId, KV};\n\n\n\nuse super::*;\n\nuse futures::channel::mpsc::channel;\n\nuse tide::{Request, Response};\n\n\n", "file_path": "src/network/ws/rest/kv.rs", "rank": 77, "score": 63179.57244262764 }, { "content": "type Result<T> = std::result::Result<T, Error>;\n\n\n", "file_path": "src/network/ws/rest.rs", "rank": 78, "score": 59630.616536011636 }, { "content": " def set_node_id(self, id):\n", "file_path": "contrib/__init__.py", "rank": 79, "score": 53906.50240084754 }, { "content": "type WSStream = async_tungstenite::WebSocketStream<TcpStream>;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]\n\npub(crate) enum Direction {\n\n Inbound,\n\n Outbound,\n\n}\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]\n\npub(crate) struct Handoff {\n\n pub partner: String,\n\n pub chunk: u64,\n\n pub direction: Direction,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]\n\npub(crate) enum Message {\n\n HandoffStart {\n\n src: String,\n\n vnode: u64,\n\n },\n", "file_path": "mring-node/src/handoff.rs", "rank": 80, "score": 46788.87950106736 }, { "content": "type WSStream = async_tungstenite::WebSocketStream<TcpStream>;\n\n\n\nmacro_rules! eat_error_and_blow {\n\n ($l:expr, $e:expr) => {\n\n match $e {\n\n Err(e) => {\n\n error!($l, \"[WS Error] {}\", e);\n\n panic!(format!(\"{}: {:?}\", e, e));\n\n }\n\n Ok(v) => v,\n\n }\n\n };\n\n}\n\n\n\npub(crate) struct Connection {\n\n // my_id: u64,\n\n remote_id: NodeId,\n\n handler: UnboundedSender<UrMsg>,\n\n tx: Sender<WsMessage>,\n\n rx: Receiver<WsMessage>,\n", "file_path": "src/network/ws/client.rs", "rank": 81, "score": 46684.72246157605 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse raft::Error as RaftError;\n\nuse std::fmt;\n\npub type Result<T> = std::result::Result<T, Error>;\n\n#[derive(Debug)]\n\npub enum Error {\n\n Raft(RaftError),\n", "file_path": "src/errors.rs", "rank": 82, "score": 37377.939667645835 }, { "content": "}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Raft(e) => write!(f, \"{}\", e),\n\n }\n\n }\n\n}\n\nimpl std::error::Error for Error {}\n", "file_path": "src/errors.rs", "rank": 83, "score": 37374.14491517465 }, { "content": " let msg = serde_json::to_value(&msg).unwrap();\n\n let channel = channel.to_string();\n\n Self::Msg { channel, msg }\n\n }\n\n}\n\n\n\nasync fn pubsub_loop(logger: Logger, mut rx: Receiver<Msg>) {\n\n let mut subscriptions: HashMap<String, Vec<Sender<SubscriberMsg>>> = HashMap::new();\n\n while let Some(msg) = rx.next().await {\n\n match msg {\n\n Msg::Subscribe { channel, tx } => {\n\n info!(logger, \"Sub {}\", channel);\n\n let subscriptions = subscriptions.entry(channel).or_default();\n\n subscriptions.push(tx);\n\n }\n\n Msg::Msg { channel, msg } => {\n\n info!(logger, \"Msg: {} >> {}\", channel, msg);\n\n let subscriptions = subscriptions.entry(channel.clone()).or_default();\n\n let mut s1 = Vec::with_capacity(subscriptions.len());\n\n for mut tx in subscriptions.drain(..) {\n", "file_path": "src/pubsub.rs", "rank": 84, "score": 37359.17681492321 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n// use crate::{NodeId, KV};\n\nuse async_std::task;\n\nuse futures::channel::mpsc::{channel, Receiver, Sender};\n\nuse futures::{SinkExt, StreamExt};\n\nuse serde::Serialize;\n\nuse slog::Logger;\n\nuse std::collections::HashMap;\n", "file_path": "src/pubsub.rs", "rank": 85, "score": 37358.835061254824 }, { "content": " let channel = channel.clone();\n\n let msg = msg.clone();\n\n if tx.send(SubscriberMsg::Msg { channel, msg }).await.is_ok() {\n\n s1.push(tx)\n\n }\n\n }\n\n std::mem::swap(subscriptions, &mut s1);\n\n }\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn start(logger: &Logger) -> Channel {\n\n let logger = logger.clone();\n\n let (tx, rx) = channel(crate::CHANNEL_SIZE);\n\n\n\n task::spawn(pubsub_loop(logger, rx));\n\n tx\n\n}\n", "file_path": "src/pubsub.rs", "rank": 86, "score": 37354.740113225846 }, { "content": "use ws_proto::SubscriberMsg;\n\n\n\npub type Channel = Sender<Msg>;\n\n\n\npub enum Msg {\n\n Subscribe {\n\n channel: String,\n\n tx: Sender<SubscriberMsg>,\n\n },\n\n Msg {\n\n channel: String,\n\n msg: serde_json::Value,\n\n },\n\n}\n\n\n\nimpl Msg {\n\n pub fn new<T>(channel: &str, msg: T) -> Self\n\n where\n\n T: Serialize,\n\n {\n", "file_path": "src/pubsub.rs", "rank": 87, "score": 37352.763303546264 }, { "content": "// Copyright 2018-2020, Wayfair GmbH\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse slog::Logger;\n\n\n\npub const VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\n\n", "file_path": "src/version.rs", "rank": 88, "score": 37350.32067254022 }, { "content": " RaftNetworkMsg::Event(eid, sid, data) => {\n\n if let Some(mut service) = self.services.get_mut(&sid) {\n\n if service.is_local(&data).unwrap() {\n\n let (code, value) = service.execute(raft, &mut self.pubsub, data).await.unwrap();\n\n self.network.event_reply(eid, code, value).await.unwrap();\n\n } else {\n\n let pid = self.next_pid();\n\n let from = self.id;\n\n if let Err(e) = self.propose_event(from, pid, sid, eid, data).await {\n\n error!(self.logger, \"Post forward error: {}\", e);\n\n self.network.event_reply(eid, 500, serde_json::to_vec(&format!(\"{}\", e)).unwrap()).await.unwrap();\n\n } else {\n\n self.pending_acks.insert(pid, eid);\n\n }\n\n }\n\n } else {\n\n error!(self.logger, \"Unknown Service: {}\", sid);\n\n self.network.event_reply(eid, 500, serde_json::to_vec(&format!(\"Service {} not known\", sid)).unwrap()).await.unwrap();\n\n }\n\n }\n", "file_path": "src/raft_node.rs", "rank": 97, "score": 53.12906799084874 }, { "content": " // .unwrap()\n\n // .lock()\n\n // .await\n\n // .raft\n\n // .raft_log\n\n // .store;\n\n let (code, value) = service\n\n .execute(\n\n self.raft_group.as_ref().unwrap(),\n\n &mut self.pubsub,\n\n event.data,\n\n )\n\n .await\n\n .unwrap();\n\n if event.nid == Some(self.id) {\n\n self.network\n\n .event_reply(event.eid, code, value)\n\n .await\n\n .unwrap();\n\n }\n", "file_path": "src/raft_node.rs", "rank": 98, "score": 45.41523586052672 }, { "content": " MRingGetNodes(Reply),\n\n MRingAddNode(String, Reply),\n\n MRingRemoveNode(String, Reply),\n\n\n\n Protocol(ProtocolMessage),\n\n}\n\n\n\npub(crate) enum ProtocolMessage {\n\n Event {\n\n id: RequestId,\n\n service_id: ServiceId,\n\n event: Vec<u8>,\n\n reply: protocol_driver::HandlerOutboundChannelSender,\n\n },\n\n}\n\n\n\n#[async_trait]\n\nimpl NetworkTrait for Network {\n\n async fn event_reply(&mut self, id: EventId, code: u16, data: Vec<u8>) -> Result<(), Error> {\n\n if let Some(Reply(rid, mut sender)) = self.pending.remove(&id) {\n", "file_path": "src/network/ws.rs", "rank": 99, "score": 44.14096625801921 } ]
Rust
playground/jwk/src/lib.rs
DIN-Foundation/bcs-ntnu-2021
99532334904dfce5f4c2e3fdd816be80c2f5a3c9
pub fn run(config: Config) -> Result<String, std::io::Error> { match config.cmd { CMD::Init{ path } => init(&path), CMD::Doc{ path } => doc(&path), CMD::Did{ path } => did(&path), CMD::Help => help() } } fn init(path: &str) -> Result<String, std::io::Error> { use std::io::Write; if !std::fs::metadata(root_path(path)).is_ok() { std::fs::create_dir_all(root_path(path))?; } if !std::fs::metadata(jwk_path(path)).is_ok() { use did_key::KeyMaterial; let mut csprng = rand::rngs::OsRng {}; let private_key = ed25519_dalek::SecretKey::generate(&mut csprng).to_bytes(); let did_key = did_key::Ed25519KeyPair::from_seed(&private_key); let jwk = publicprivatebytes_to_jwkstr(did_key.public_key_bytes(), did_key.private_key_bytes()); let mut file = std::fs::File::create(jwk_path(path)).unwrap(); file.write(jwk.as_bytes()).unwrap(); Ok(format!("{} is ready", path)) } else { Ok(format!("{} already exists", path)) } } fn doc(path: &str) -> Result<String, std::io::Error> { use did_key::DIDCore; let jwk = std::fs::read(jwk_path(path))?; let jwkstr = String::from_utf8(jwk).unwrap(); let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr); let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None); let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC); let did_doc = serde_json::to_string_pretty(&did_doc).unwrap(); Ok(format!("{}", did_doc)) } fn did(path: &str) -> Result<String, std::io::Error> { use did_key::DIDCore; let jwk = std::fs::read(jwk_path(path))?; let jwkstr = String::from_utf8(jwk).unwrap(); let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr); let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None); let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC); let did = did_doc.id; Ok(format!("{}", did)) } fn help() -> Result<String, std::io::Error> { Ok(String::from(" Usage: didchat <path> <command> didchat <path> init didchat <path> doc didchat <path> did ")) } #[derive(Debug)] enum CMD { Init{ path: String }, Doc{ path: String }, Did{ path: String }, Help } pub struct Config { cmd: CMD, } impl Config { pub fn new(args: &[String]) -> Result<Config, std::io::Error> { let default_cmd = String::from("help"); let path = args.get(1).unwrap_or(&default_cmd).clone(); let cmd = args.get(2).unwrap_or(&default_cmd).clone(); let cmd = if args.len() < 3 { eprintln!("Command missing!"); default_cmd.clone() } else { cmd.clone() }; let cmd: CMD = match &cmd[..] { "did" => { CMD::Did{ path } }, "doc" => { CMD::Doc{ path } }, "init" => { CMD::Init{ path } }, "help" => CMD::Help, &_ => { eprintln!("{} not a valid command!", cmd); CMD::Help }, }; Ok(Config { cmd }) } } fn root_path(path: &str) -> String { format!("{}/.didchat", path) } fn jwk_path(path: &str) -> String { format!("{}/.didchat/me.jwk", path) } fn publicprivatebytes_to_jwkstr(public: Vec<u8>, private: Vec<u8>) -> String { let jwk = ssi::jwk::JWK { params: ssi::jwk::Params::OKP(ssi::jwk::OctetParams { curve: "Ed25519".to_string(), public_key: ssi::jwk::Base64urlUInt(public), private_key: Some(ssi::jwk::Base64urlUInt(private)), }), public_key_use: None, key_operations: None, algorithm: None, key_id: None, x509_url: None, x509_certificate_chain: None, x509_thumbprint_sha1: None, x509_thumbprint_sha256: None }; let _okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() { Some(o) } else { None }).unwrap(); serde_json::to_string(&jwk).unwrap() } fn jwkstr_to_publicprivatebytes(jwkstr: &str) -> (Vec<u8>, Vec<u8>) { let jwk: ssi::jwk::JWK = serde_json::from_str(jwkstr).unwrap(); let okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() { Some(o) } else { panic!("okp == None") }).unwrap(); let privkey: Vec<u8> = if let Some(key) = okp.private_key { key.0 } else { panic!("privkey == None") }; (okp.public_key.0, privkey) }
pub fn run(config: Config) -> Result<String, std::io::Error> { match config.cmd { CMD::Init{ path } => init(&path), CMD::Doc{ path } => doc(&path), CMD::Did{ path } => did(&path), CMD::Help => help() } } fn init(path: &str) -> Result<String, std::io::Error> { use std::io::Write; if !std::fs::metadata(root_path(path)).is_ok() { std::fs::create_dir_all(root_path(path))?; } if !std::fs::metadata(jwk_path(path)).is_ok() { use did_key::KeyMaterial; let mut csprng = rand::rngs::OsRng {}; let private_key = ed25519_dalek::SecretKey::generate(&mut csprng).to_bytes(); let did_key = did_key::Ed25519KeyPair::from_seed(&private_key); let jwk = publicprivatebytes_to_jwkstr(did_key.public_key_bytes(), did_key.private_key_bytes()); let mut file = std::fs::File::create(jwk_path(path)).unwrap(); file.write(jwk.as_bytes()).unwrap(); Ok(format!("{} is ready", path)) } else { Ok(format!("{} already exists", path)) } } fn doc(path: &str) -> Result<String, std::io::Error> { use did_key::DIDCore; let jwk = std::fs::read(jwk_path(path))?; let jwkstr = String::from_utf8(jwk).unwrap(); let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr); let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None); let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC); let did_doc = serde_json::to_string_pretty(&did_doc).unwrap(); Ok(format!("{}", did_doc)) } fn did(path: &str) -> Result<String, std::io::Error> { use did_key::DIDCore; let jwk = std::fs::read(jwk_path(path))?; let jwkstr = String::from_utf8(jwk).unwrap(); let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr); let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None); let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC); let did = did_doc.id; Ok(format!("{}", did)) } fn help() -> Result<String, std::io::Error> { Ok(String::from(" Usage: didchat <path> <command> didchat <path> init didchat <path> doc didchat <path> did ")) } #[derive(Debug)] enum CMD { Init{ path: String }, Doc{ path: String }, Did{ path: String }, Help } pub struct Config { cmd: CMD, } impl Config { pub fn new(args: &[String]) -> Result<Config, std::io::Error> { let default_cmd = String::from("help"); let path = args.get(1).unwrap_or(&default_cmd).clone(); let cmd = args.get(
} fn root_path(path: &str) -> String { format!("{}/.didchat", path) } fn jwk_path(path: &str) -> String { format!("{}/.didchat/me.jwk", path) } fn publicprivatebytes_to_jwkstr(public: Vec<u8>, private: Vec<u8>) -> String { let jwk = ssi::jwk::JWK { params: ssi::jwk::Params::OKP(ssi::jwk::OctetParams { curve: "Ed25519".to_string(), public_key: ssi::jwk::Base64urlUInt(public), private_key: Some(ssi::jwk::Base64urlUInt(private)), }), public_key_use: None, key_operations: None, algorithm: None, key_id: None, x509_url: None, x509_certificate_chain: None, x509_thumbprint_sha1: None, x509_thumbprint_sha256: None }; let _okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() { Some(o) } else { None }).unwrap(); serde_json::to_string(&jwk).unwrap() } fn jwkstr_to_publicprivatebytes(jwkstr: &str) -> (Vec<u8>, Vec<u8>) { let jwk: ssi::jwk::JWK = serde_json::from_str(jwkstr).unwrap(); let okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() { Some(o) } else { panic!("okp == None") }).unwrap(); let privkey: Vec<u8> = if let Some(key) = okp.private_key { key.0 } else { panic!("privkey == None") }; (okp.public_key.0, privkey) }
2).unwrap_or(&default_cmd).clone(); let cmd = if args.len() < 3 { eprintln!("Command missing!"); default_cmd.clone() } else { cmd.clone() }; let cmd: CMD = match &cmd[..] { "did" => { CMD::Did{ path } }, "doc" => { CMD::Doc{ path } }, "init" => { CMD::Init{ path } }, "help" => CMD::Help, &_ => { eprintln!("{} not a valid command!", cmd); CMD::Help }, }; Ok(Config { cmd }) }
function_block-function_prefixed
[ { "content": "//\n\n// Commands\n\n//\n\nfn init(path: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n\n\n // 1. Create empty folders\n\n if !std::fs::metadata(root_path(path)).is_ok() {\n\n std::fs::create_dir_all(root_path(path))?;\n\n }\n\n if !std::fs::metadata(names_path(path)).is_ok() {\n\n std::fs::create_dir_all(names_path(path))?;\n\n }\n\n if !std::fs::metadata(dids_path(path)).is_ok() {\n\n std::fs::create_dir_all(dids_path(path))?;\n\n }\n\n if !std::fs::metadata(messages_path(path)).is_ok() {\n\n std::fs::create_dir_all(messages_path(path))?;\n\n }\n\n\n\n if !std::fs::metadata(didkey_jwk_path(path)).is_ok() {\n\n\n\n // 2. Generate jwk\n", "file_path": "playground/didvote/src/lib.rs", "rank": 4, "score": 245541.18569305277 }, { "content": "fn doc(path: &str) -> Result<String, std::io::Error> {\n\n use did_key::DIDCore;\n\n\n\n // 1. Read jwk from file\n\n let jwk = std::fs::read(didkey_jwk_path(path))?;\n\n let jwkstr = String::from_utf8(jwk).unwrap();\n\n let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr);\n\n\n\n // 2. Transform public key to a did-document\n\n let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None);\n\n let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC);\n\n\n\n // 3. Serialize did-document to json\n\n let did_doc = serde_json::to_string_pretty(&did_doc).unwrap();\n\n Ok(format!(\"{}\", did_doc))\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 5, "score": 245537.58097357844 }, { "content": "fn doc(path: &str) -> Result<String, std::io::Error> {\n\n use did_key::DIDCore;\n\n use did_key::KeyMaterial;\n\n \n\n // 1. Read seed from file\n\n let seed = std::fs::read(seed_path(path)).unwrap();\n\n\n\n // 2. Transform seed to a did-document\n\n let public_private_keypair = did_key::Ed25519KeyPair::from_seed(&seed);\n\n let public_only_keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public_private_keypair.public_key_bytes(), None);\n\n let doc = public_only_keypair.get_did_document(did_key::CONFIG_LD_PUBLIC);\n\n\n\n // 3. Serialize did-document to json\n\n let did_document = serde_json::to_string_pretty(&doc).unwrap();\n\n Ok(format!(\"{}\", did_document))\n\n}\n\n\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 6, "score": 245537.58097357844 }, { "content": "fn init(path: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n\n\n // 1. Create empty folders\n\n if !std::fs::metadata(root_path(path)).is_ok() {\n\n std::fs::create_dir_all(root_path(path))?;\n\n }\n\n if !std::fs::metadata(names_path(path)).is_ok() {\n\n std::fs::create_dir_all(names_path(path))?;\n\n }\n\n if !std::fs::metadata(dids_path(path)).is_ok() {\n\n std::fs::create_dir_all(dids_path(path))?;\n\n }\n\n if !std::fs::metadata(messages_path(path)).is_ok() {\n\n std::fs::create_dir_all(messages_path(path))?;\n\n }\n\n\n\n if !std::fs::metadata(seed_path(path)).is_ok() {\n\n // 2. Generate seed\n\n let mut csprng = rand_core::OsRng{};\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 7, "score": 245537.58097357844 }, { "content": "fn didkey_jwk_path(path: &str) -> String {\n\n let path = std::path::Path::new(path)\n\n .join(\"./.didvote/didkey.jwk\");\n\n\n\n match path.to_str() {\n\n None => panic!(\"didkey_jwk_path({:?}) is not a valid UTF-8 sequence\", path),\n\n Some(s) => s.to_string(),\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 8, "score": 245393.41536860907 }, { "content": "fn did_path(path: &str, did: &str) -> String {\n\n format!(\"{}/.didchat/dids/{}\", path, did)\n\n}\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 11, "score": 221797.01502791548 }, { "content": "fn did_path(path: &str, did: &str) -> String {\n\n let path = std::path::Path::new(path)\n\n .join(\"./.didvote/dids/\")\n\n .join(did);\n\n\n\n match path.to_str() {\n\n None => panic!(\"did_path({:?}, {}) is not a valid UTF-8 sequence\", path, did),\n\n Some(s) => s.to_string(),\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 12, "score": 221797.01502791548 }, { "content": "fn name_path(path: &str, name: &str) -> String {\n\n let path = std::path::Path::new(path)\n\n .join(\"./.didvote/names/\")\n\n .join(name);\n\n\n\n match path.to_str() {\n\n None => panic!(\"name_path({:?}, {}) is not a valid UTF-8 sequence\", path, name),\n\n Some(s) => s.to_string(),\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 13, "score": 218118.9439555021 }, { "content": "fn name_path(path: &str, name: &str) -> String {\n\n format!(\"{}/.didchat/names/{}\", path, name)\n\n}\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 14, "score": 218118.9439555021 }, { "content": "fn names_path(path: &str) -> String {\n\n format!(\"{}/.didchat/names\", path)\n\n}\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 15, "score": 214689.70322434377 }, { "content": "fn messages_path(path: &str) -> String {\n\n\n\n let path = std::path::Path::new(path)\n\n .join(\"./.didvote/messages\");\n\n\n\n match path.to_str() {\n\n None => panic!(\"messages_path({:?}) is not a valid UTF-8 sequence\", path),\n\n Some(s) => s.to_string(),\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 16, "score": 214689.70322434377 }, { "content": "fn message_path(path: &str) -> String {\n\n let start = std::time::SystemTime::now();\n\n let since_epoch = start\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .expect(\"Time went backwards\").as_secs();\n\n\n\n let path = std::path::Path::new(path)\n\n .join(format!(\"./.didvote/messages/{}.dcem\", since_epoch));\n\n\n\n match path.to_str() {\n\n None => panic!(\"message_path({:?}, {}) is not a valid UTF-8 sequence\", path, since_epoch),\n\n Some(s) => s.to_string(),\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 17, "score": 214689.70322434377 }, { "content": "fn message_path(path: &str) -> String {\n\n let start = std::time::SystemTime::now();\n\n let since_the_epoch = start\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .expect(\"Time went backwards\");\n\n\n\n format!(\"{}/.didchat/messages/{}.dcem\", path, since_the_epoch.as_nanos())\n\n}\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 18, "score": 214689.70322434377 }, { "content": "fn dids_path(path: &str) -> String {\n\n let path = std::path::Path::new(path)\n\n .join(\"./.didvote/dids\");\n\n\n\n match path.to_str() {\n\n None => panic!(\"dids_paths({:?}) is not a valid UTF-8 sequence\", path),\n\n Some(s) => s.to_string(),\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 19, "score": 214689.70322434377 }, { "content": "fn names_path(path: &str) -> String {\n\n let path = std::path::Path::new(path)\n\n .join(\"./.didvote/names\");\n\n\n\n match path.to_str() {\n\n None => panic!(\"names_path({:?}) is not a valid UTF-8 sequence\", path),\n\n Some(s) => s.to_string(),\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 20, "score": 214689.70322434377 }, { "content": "fn messages_path(path: &str) -> String {\n\n format!(\"{}/.didchat/messages\", path)\n\n}\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 21, "score": 214689.7032243438 }, { "content": "fn root_path(path: &str) -> String {\n\n format!(\"{}/.didchat\", path)\n\n}\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 22, "score": 214689.7032243438 }, { "content": "fn seed_path(path: &str) -> String {\n\n format!(\"{}/.didchat/seed\", path)\n\n}\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 23, "score": 214689.7032243438 }, { "content": "//\n\n// Util\n\n//\n\nfn root_path(path: &str) -> String {\n\n let path = std::path::Path::new(path)\n\n .join(\"./.didvote\");\n\n\n\n match path.to_str() {\n\n None => panic!(\"root_path({:?}) is not a valid UTF-8 sequence\", path),\n\n Some(s) => s.to_string(),\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 24, "score": 214689.70322434377 }, { "content": "fn dids_path(path: &str) -> String {\n\n format!(\"{}/.didchat/dids\", path)\n\n}\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 25, "score": 214689.7032243438 }, { "content": "//\n\n// Searches a file, returning only the lines that match the query\n\n//\n\npub fn search_sensitive<'a>(query: &str, file: &'a str) -> Vec<&'a str> {\n\n let mut results = Vec::new();\n\n\n\n for line in file.lines() {\n\n if line.contains(query) {\n\n results.push(line);\n\n }\n\n }\n\n\n\n results\n\n}\n\n\n", "file_path": "playground/minigrep/src/lib.rs", "rank": 26, "score": 213420.46015897463 }, { "content": "//\n\n// Searches a file case-insensitively\n\n//\n\npub fn search_insensitive<'a>(query: &str, file: &'a str) -> Vec<&'a str> {\n\n let mut results = Vec::new();\n\n let query = query.to_lowercase();\n\n\n\n for line in file.lines() {\n\n if line.to_lowercase().contains(&query) {\n\n results.push(line);\n\n } \n\n }\n\n\n\n results\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn case_sensitive() {\n\n let query = \"Da doo\";\n", "file_path": "playground/minigrep/src/lib.rs", "rank": 27, "score": 213416.9928159332 }, { "content": "pub fn run(config: Config) -> Result<String, std::io::Error> {\n\n match config.cmd {\n\n CMD::Init{ path } => init(&path),\n\n CMD::Doc{ path } => doc(&path),\n\n CMD::Did{ path } => did(&path),\n\n CMD::Messages{ path } => messages(&path),\n\n CMD::Connect{ path, name, did } => connect(&path, &name, &did),\n\n CMD::Write{ path, name, message } => write(&path, &name, &message),\n\n CMD::Read{ path, encrypted_message } => read(&path, &encrypted_message),\n\n CMD::Vote{ name: _name, path: _path } => help(),\n\n CMD::Help => help()\n\n }\n\n}\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 28, "score": 209864.0016457013 }, { "content": "pub fn run(config: Config) -> Result<String, std::io::Error> {\n\n match config.cmd {\n\n CMD::Init{ path } => init(&path),\n\n CMD::Doc{ path } => doc(&path),\n\n CMD::Did{ path } => did(&path),\n\n CMD::Messages{ path } => messages(&path),\n\n CMD::Connect{ path, name, did } => connect(&path, &name, &did),\n\n CMD::Write{ path, name, message } => write(&path, &name, &message),\n\n CMD::Read{ path, encrypted_message } => read(&path, &encrypted_message),\n\n CMD::Help => help()\n\n }\n\n}\n\n\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 29, "score": 209864.0016457013 }, { "content": "fn connect(path: &str, name: &str, did: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n\n\n // 2. Create 'name' -> 'did' mapping\n\n let mut file = std::fs::File::create(name_path(path, name)).unwrap();\n\n file.write(did.as_bytes()).unwrap();\n\n\n\n // 3. Create 'did' to 'name' mapping\n\n let mut file = std::fs::File::create(did_path(path, did)).unwrap();\n\n file.write(name.as_bytes()).unwrap();\n\n\n\n Ok(format!(\"{}\\n{}\", name_path(path, name), did_path(path, did)))\n\n}\n\n\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 30, "score": 196866.18250914186 }, { "content": "fn connect(path: &str, name: &str, did: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n\n\n // 2. Create 'name' -> 'did' mapping\n\n let mut file = std::fs::File::create(name_path(path, name)).unwrap();\n\n file.write(did.as_bytes()).unwrap();\n\n\n\n // 3. Create 'did' to 'name' mapping\n\n let mut file = std::fs::File::create(did_path(path, did)).unwrap();\n\n file.write(name.as_bytes()).unwrap();\n\n\n\n Ok(format!(\"{}\\n{}\", name_path(path, name), did_path(path, did)))\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 31, "score": 196866.1825091419 }, { "content": "fn write(path: &str, name: &str, message: &str) -> Result<String, std::io::Error> {\n\n use did_key::KeyMaterial;\n\n use std::io::Write;\n\n\n\n // 1. Read from-key\n\n let from_seed = std::fs::read(seed_path(path)).unwrap();\n\n let from_key = did_key::Ed25519KeyPair::from_seed(&from_seed);\n\n \n\n // 2. Read to-key\n\n let to_did = std::fs::read_to_string(name_path(path, name)).unwrap();\n\n let to_key = did_key::resolve(&to_did).unwrap();\n\n let to_key = did_key::Ed25519KeyPair::from_public_key(&to_key.public_key_bytes());\n\n\n\n // 3. Encrypt message with from_key, to keep message history in local file\n\n let encrypted_message = encrypt_didcomm(&from_key, &from_key, message).unwrap();\n\n let mut file = std::fs::File::create(message_path(path)).unwrap();\n\n file.write(encrypted_message.as_bytes()).unwrap();\n\n\n\n // 4. Encrypt message with to_key, to prepare it for transmission\n\n let encrypted_message = encrypt_didcomm(&from_key, &to_key, message).unwrap();\n\n \n\n Ok(format!(\"{}\", &encrypted_message))\n\n}\n\n\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 32, "score": 195266.29245407856 }, { "content": "fn write(path: &str, name: &str, message: &str) -> Result<String, std::io::Error> {\n\n use did_key::KeyMaterial;\n\n use std::io::Write;\n\n\n\n // 1. Read from-key\n\n let jwk = std::fs::read(didkey_jwk_path(path))?;\n\n let jwkstr = String::from_utf8(jwk).unwrap();\n\n let (_, private) = jwkstr_to_publicprivatebytes(&jwkstr);\n\n let from_key = did_key::Ed25519KeyPair::from_seed(&private);\n\n\n\n // 2. Read to-key\n\n let to_did = std::fs::read_to_string(name_path(path, name)).unwrap();\n\n let to_key = did_key::resolve(&to_did).unwrap();\n\n let to_key = did_key::Ed25519KeyPair::from_public_key(&to_key.public_key_bytes());\n\n\n\n // 3. Encrypt message with from_key, to keep message history in local file\n\n let encrypted_message = encrypt_didcomm(&from_key, &from_key, message).unwrap();\n\n let mut file = std::fs::File::create(message_path(path)).unwrap();\n\n file.write(encrypted_message.as_bytes()).unwrap();\n\n\n\n // 4. Encrypt message with to_key, to prepare it for transmission\n\n let encrypted_message = encrypt_didcomm(&from_key, &to_key, message).unwrap();\n\n\n\n Ok(format!(\"{}\", &encrypted_message))\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 33, "score": 195266.29245407856 }, { "content": "fn read(path: &str, encrypted_message: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n use did_key::KeyMaterial;\n\n\n\n // 1. Store incomming message to file, to keep the message history\n\n let message_fpath = message_path(path);\n\n let message_fpath = std::path::Path::new(&message_fpath);\n\n let mut file = std::fs::File::create(message_fpath).unwrap();\n\n file.write(encrypted_message.as_bytes()).unwrap();\n\n\n\n // 2. Get to-key\n\n let to_seed = std::fs::read(seed_path(path)).unwrap();\n\n let to_key = did_key::Ed25519KeyPair::from_seed(&to_seed);\n\n \n\n // 3. Get from-key\n\n let from_jwe: didcomm_rs::Jwe = serde_json::from_str(&encrypted_message).unwrap();\n\n let from_did = from_jwe.from().as_ref().unwrap();\n\n let from_key = did_key::resolve(&from_did).unwrap();\n\n let from_key = did_key::Ed25519KeyPair::from_public_key(&from_key.public_key_bytes());\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 34, "score": 193716.47018309892 }, { "content": "fn read(path: &str, encrypted_message: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n use did_key::KeyMaterial;\n\n\n\n // 1. Store incomming message to file, to keep the message history\n\n let message_fpath = message_path(path);\n\n let message_fpath = std::path::Path::new(&message_fpath);\n\n let mut file = std::fs::File::create(message_fpath).unwrap();\n\n file.write(encrypted_message.as_bytes()).unwrap();\n\n\n\n // 2. Get to-key\n\n let jwk = std::fs::read(didkey_jwk_path(path))?;\n\n let jwkstr = String::from_utf8(jwk).unwrap();\n\n let (_, private) = jwkstr_to_publicprivatebytes(&jwkstr);\n\n let to_key = did_key::Ed25519KeyPair::from_seed(&private);\n\n\n\n // 3. Get from-key\n\n let from_jwe: didcomm_rs::Jwe = serde_json::from_str(&encrypted_message).unwrap();\n\n let from_did = from_jwe.from().as_ref().unwrap();\n\n let from_key = did_key::resolve(&from_did).unwrap();\n", "file_path": "playground/didvote/src/lib.rs", "rank": 35, "score": 193716.47018309892 }, { "content": "fn did(path: &str) -> Result<String, std::io::Error> {\n\n use did_key::DIDCore;\n\n\n\n // 1. Read jwk from file\n\n let jwk = std::fs::read(didkey_jwk_path(path))?;\n\n let jwkstr = String::from_utf8(jwk).unwrap();\n\n let (public,_) = jwkstr_to_publicprivatebytes(&jwkstr);\n\n\n\n // 2. Transform public key to a did-document\n\n let keypair = did_key::from_existing_key::<did_key::Ed25519KeyPair>(&public, None);\n\n let did_doc = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC);\n\n\n\n let did = did_doc.id;\n\n\n\n // 3. Print did\n\n Ok(format!(\"{}\", did))\n\n}\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 36, "score": 191579.6861994822 }, { "content": "fn did(path: &str) -> Result<String, std::io::Error> {\n\n use did_key::DIDCore;\n\n\n\n // 1. Read seed from file\n\n let seed = std::fs::read(seed_path(path)).unwrap();\n\n\n\n // 2. Transform seed to a did\n\n let keypair = did_key::Ed25519KeyPair::from_seed(&seed);\n\n let diddoc: did_key::Document = keypair.get_did_document(did_key::CONFIG_LD_PUBLIC);\n\n let did = diddoc.id;\n\n\n\n // 3. Print did\n\n Ok(format!(\"{}\", did))\n\n}\n\n\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 37, "score": 191579.6861994822 }, { "content": "fn messages(path: &str) -> Result<String, std::io::Error> {\n\n use did_key::KeyMaterial;\n\n\n\n let mut result = String::from(\"\");\n\n\n\n let mut entries: Vec<std::fs::DirEntry> = std::fs::read_dir(messages_path(path)).unwrap().filter_map(|f| f.ok()).collect();\n\n entries.sort_by_key(|e| e.path());\n\n\n\n // 1. Get to-key\n\n let jwk = std::fs::read(didkey_jwk_path(path))?;\n\n let jwkstr = String::from_utf8(jwk).unwrap();\n\n let (_, private) = jwkstr_to_publicprivatebytes(&jwkstr);\n\n let to_key = did_key::Ed25519KeyPair::from_seed(&private);\n\n\n\n for entry in entries {\n\n if entry.path().is_dir() {\n\n continue;\n\n }\n\n let encrypted_message = std::fs::read_to_string(entry.path())?;\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 38, "score": 189594.78659930316 }, { "content": "fn messages(path: &str) -> Result<String, std::io::Error> {\n\n use did_key::KeyMaterial;\n\n\n\n let mut result = String::from(\"\");\n\n\n\n let mut entries: Vec<std::fs::DirEntry> = std::fs::read_dir(messages_path(path)).unwrap().filter_map(|f| f.ok()).collect();\n\n entries.sort_by_key(|e| e.path());\n\n\n\n for entry in entries {\n\n if entry.path().is_dir() {\n\n continue;\n\n }\n\n let encrypted_message = std::fs::read_to_string(entry.path())?;\n\n\n\n // 1. Get to-key\n\n let to_seed = std::fs::read(seed_path(path)).unwrap();\n\n let to_key = did_key::Ed25519KeyPair::from_seed(&to_seed);\n\n\n\n // 2. Get from-key\n\n let from_jwe: didcomm_rs::Jwe = serde_json::from_str(&encrypted_message).unwrap();\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 39, "score": 189594.78659930316 }, { "content": "fn did_name_path(did: &str) -> String {\n\n std::path::Path::new(ROOT_PATH)\n\n .join(\"did-names/\")\n\n .join(did)\n\n .to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 40, "score": 186252.1096253628 }, { "content": "fn did_path(did_name: &str) -> String {\n\n std::path::Path::new(ROOT_PATH)\n\n .join(\"dids/\")\n\n .join(format!(\"{}.did\", did_name))\n\n .to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 41, "score": 186252.1096253628 }, { "content": "fn message_path(message_id: &str) -> String {\n\n std::path::Path::new(ROOT_PATH)\n\n .join(\"messages/\")\n\n .join(format!(\"{}.dcem\", message_id))\n\n .to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 43, "score": 181783.71859813065 }, { "content": "fn key_jwk_path() -> String {\n\n std::path::Path::new(ROOT_PATH)\n\n .join(\"key.jwk\")\n\n .to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 45, "score": 167674.39300710126 }, { "content": "fn ed25519_keypair_to_jwk() -> String {\n\n use did_key::KeyMaterial;\n\n let mut csprng = rand::rngs::OsRng {};\n\n let private_key = ed25519_dalek::SecretKey::generate(&mut csprng).to_bytes();\n\n let did_key = did_key::Ed25519KeyPair::from_seed(&private_key);\n\n \n\n bytes_to_jwk(did_key.public_key_bytes(), did_key.private_key_bytes())\n\n}\n\n\n", "file_path": "playground/keytojwk/src/main.rs", "rank": 47, "score": 166681.35013915668 }, { "content": "fn didkey_keypair_to_jwk() -> String {\n\n let mut csprng = rand::rngs::OsRng {};\n\n let keypair = ed25519_dalek::Keypair::generate(&mut csprng);\n\n\n\n bytes_to_jwk(keypair.public.to_bytes().to_vec(), keypair.secret.to_bytes().to_vec())\n\n}\n\n\n", "file_path": "playground/keytojwk/src/main.rs", "rank": 48, "score": 166681.35013915674 }, { "content": "//\n\n// Runs the program\n\n//\n\npub fn run(config: Config) -> Result<(), Box<dyn std::error::Error>> {\n\n let file = std::fs::read_to_string(&config.filename)?;\n\n\n\n let results = if config.case_sensitive {\n\n search_sensitive(&config.query, &file)\n\n } else {\n\n search_insensitive(&config.query, &file)\n\n };\n\n\n\n for line in results {\n\n println!(\"{}\", line);\n\n }\n\n\n\n Ok(()) \n\n}\n\n\n", "file_path": "playground/minigrep/src/lib.rs", "rank": 49, "score": 160892.3857054424 }, { "content": "//\n\n// Commands: DID\n\n//\n\nfn init() -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n\n\n // 1. Create empty folders, if not exists\n\n if !std::fs::metadata(root_path()).is_ok() {\n\n std::fs::create_dir_all(root_path()).unwrap();\n\n }\n\n if !std::fs::metadata(dids_path()).is_ok() {\n\n std::fs::create_dir_all(dids_path()).unwrap();\n\n }\n\n if !std::fs::metadata(did_names_path()).is_ok() {\n\n std::fs::create_dir_all(did_names_path()).unwrap();\n\n }\n\n if !std::fs::metadata(messages_path()).is_ok() {\n\n std::fs::create_dir_all(messages_path()).unwrap();\n\n }\n\n\n\n let did_doc = if !std::fs::metadata(key_jwk_path()).is_ok() {\n\n // 2. Generate jwk, if not exists\n\n let mut csprng = rand::rngs::OsRng {};\n", "file_path": "did-cli/src/lib.rs", "rank": 50, "score": 151428.17809447597 }, { "content": "fn doc() -> Result<String, std::io::Error> {\n\n let self_didkey = get_self_didkey();\n\n\n\n use did_key::DIDCore;\n\n let did_doc = self_didkey.get_did_document(did_key::CONFIG_LD_PUBLIC);\n\n let did_doc = serde_json::to_string_pretty(&did_doc).unwrap();\n\n\n\n Ok(format!(\"{}\", did_doc))\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 51, "score": 151424.57337500167 }, { "content": "fn help() -> Result<String, std::io::Error> {\n\n Ok(String::from(\"\n\n DID:\n\n did init\n\n did doc\n\n did connect <didname> <did>\n\n did dids\n\n did did <didname>\n\n\n\n DIDComm v2:\n\n did write <subject didname> <message> --> <dcem>\n\n did hold <dcem> --> <dcem>\n\n did read <dcem> --> <plaintext message>\n\n did messages\n\n did message <message id>\n\n\n\n Verifiable Credentials over DIDComm v2:\n\n did issue Passport <subject didname> --> <dcem>\n\n did issue DriversLicense <subject didname> --> <dcem>\n\n did issue TrafficAuthority <subject didname> --> <dcem>\n\n did issue LawEnforcer <subject didname> --> <dcem>\n\n\n\n did present <verifier didname> <dcem> --> <dcem>\n\n did verify <issuer didname> <subject didname> <dcem> --> <dcem>\n\n\"))\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 52, "score": 151424.57337500167 }, { "content": "fn encrypt_didcomm(from_key: &did_key::Ed25519KeyPair, to_key: &did_key::Ed25519KeyPair, message: &str) -> (String, DIDCommID) {\n\n use did_key::Ecdh;\n\n\n\n // 1. Get dids\n\n use did_key::DIDCore;\n\n let from_did = from_key.get_did_document(did_key::CONFIG_LD_PUBLIC).id;\n\n let to_did = to_key.get_did_document(did_key::CONFIG_LD_PUBLIC).id;\n\n\n\n // 2. Map Ed25519 -> x25519\n\n let from_key = from_key.get_x25519();\n\n let to_key = to_key.get_x25519();\n\n\n\n // 3. Make shared secret (from -> to)\n\n let shared_secret = from_key.key_exchange(&to_key);\n\n\n\n // 4. Make didcomm message\n\n let to_vec = vec![&to_did[..]];\n\n\n\n let message = didcomm_rs::Message::new()\n\n .from(&from_did)\n", "file_path": "did-cli/src/lib.rs", "rank": 53, "score": 150931.46027092956 }, { "content": "fn decrypt_didcomm(from_key: &did_key::Ed25519KeyPair, to_key: &did_key::Ed25519KeyPair, dcem: &str)-> (String, DIDCommID) {\n\n use did_key::Ecdh;\n\n\n\n // 1. Map Ed25519 -> x25519\n\n let to_key = to_key.get_x25519();\n\n let from_key = from_key.get_x25519();\n\n\n\n // 2. Make shared secret (to -> from)\n\n let shared_secret = to_key.key_exchange(&from_key);\n\n\n\n // 3. Decrypt message\n\n let message = didcomm_rs::Message::receive(dcem, Some(&shared_secret), None);\n\n let message = message.unwrap();\n\n let id = message.get_didcomm_header().id.to_string();\n\n let body = String::from_utf8(message.body).unwrap();\n\n\n\n (body, id)\n\n}\n\n\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 54, "score": 150931.46027092956 }, { "content": "fn help() -> Result<String, std::io::Error> {\n\n Ok(String::from(\"\n\n Usage:\n\n didchat <path> <command>\n\n \n\n didchat <path> init\n\n didchat <path> doc \n\n didchat <path> did \n\n didchat <path> messages\n\n\n\n didchat <path> connect <name> <did>\n\n\n\n didchat <path> write <name> <message> --> <encrypted message>\n\n didchat <path> read <encrypted message> --> <name> <message>\n\n\n\n Example - Write to self:\n\n didchat . init\n\n didchat . connect self $(didchat . did)\n\n didchat . read $(didchat . write self \\\"Hello self!\\\")\n\n\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 55, "score": 149819.32603723917 }, { "content": "fn help() -> Result<String, std::io::Error> {\n\n Ok(String::from(\"\n\n Usage:\n\n didvote <path> <command>\n\n\n\n didvote <path> init\n\n didvote <path> doc\n\n didvote <path> did\n\n didvote <path> messages\n\n\n\n didvote <path> connect <name> <did>\n\n\n\n didvote <path> write <name> <message> --> <encrypted message>\n\n didvote <path> read <encrypted message> --> <name> <message>\n\n\n\n Example - Write to self:\n\n didvote . init\n\n didvote . write self \\\"Hello self!\\\"\n\n didvote . read $(didvote . write self \\\"How do you do?\\\")\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 56, "score": 149819.32603723917 }, { "content": "fn connect(did_name: &str, did: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n\n\n // 1. Create 'name' -> 'did'-mapping\n\n let mut file = std::fs::File::create(did_path(did_name)).unwrap();\n\n file.write(did.as_bytes()).unwrap();\n\n\n\n // 2. Create 'did' -> 'name'-mapping\n\n let mut file = std::fs::File::create(did_name_path(did)).unwrap();\n\n file.write(did_name.as_bytes()).unwrap();\n\n\n\n Ok(format!(\"{}\\n{}\", did_path(did_name), did_name_path(did)))\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 58, "score": 147757.01261464442 }, { "content": "//\n\n// Commands: DIDComm v2\n\n//\n\nfn write(subject_didname: &str, message: &str) -> Result<String, std::io::Error> {\n\n // 1. Get did:keys\n\n let from_key = get_self_didkey();\n\n let to_key = get_other_didkey(subject_didname);\n\n\n\n // 2. Encrypt message with to_key, to prepare it for transmission\n\n let (dcem, _) = encrypt_didcomm(&from_key, &to_key, message);\n\n\n\n Ok(format!(\"{}\", &dcem))\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 59, "score": 145145.45761366782 }, { "content": "fn encrypt_didcomm(from_key: &did_key::Ed25519KeyPair, to_key: &did_key::Ed25519KeyPair, message: &str) -> Result<String, didcomm_rs::Error> {\n\n use did_key::Ecdh;\n\n use did_key::DIDCore;\n\n\n\n // 1. Get dids\n\n let from_did = from_key.get_did_document(did_key::CONFIG_LD_PUBLIC).id;\n\n let to_did = to_key.get_did_document(did_key::CONFIG_LD_PUBLIC).id;\n\n\n\n // 2. Map Ed25519 -> x25519\n\n let from_key = from_key.get_x25519();\n\n let to_key = to_key.get_x25519();\n\n\n\n // 3. Make shared secret (from -> to)\n\n let shared_secret = from_key.key_exchange(&to_key);\n\n\n\n // 4. Make didcomm message\n\n let to_vec = vec![&to_did[..]];\n\n \n\n let message = didcomm_rs::Message::new()\n\n .from(&from_did)\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 60, "score": 143877.17962980995 }, { "content": "fn encrypt_didcomm(from_key: &did_key::Ed25519KeyPair, to_key: &did_key::Ed25519KeyPair, message: &str) -> Result<String, didcomm_rs::Error> {\n\n use did_key::Ecdh;\n\n use did_key::DIDCore;\n\n\n\n // 1. Get dids\n\n let from_did = from_key.get_did_document(did_key::CONFIG_LD_PUBLIC).id;\n\n let to_did = to_key.get_did_document(did_key::CONFIG_LD_PUBLIC).id;\n\n\n\n // 2. Map Ed25519 -> x25519\n\n let from_key = from_key.get_x25519();\n\n let to_key = to_key.get_x25519();\n\n\n\n // 3. Make shared secret (from -> to)\n\n let shared_secret = from_key.key_exchange(&to_key);\n\n\n\n // 4. Make didcomm message\n\n let to_vec = vec![&to_did[..]];\n\n\n\n let message = didcomm_rs::Message::new()\n\n .from(&from_did)\n", "file_path": "playground/didvote/src/lib.rs", "rank": 61, "score": 143877.17962980995 }, { "content": "fn decrypt_didcomm(from_key: &did_key::Ed25519KeyPair, to_key: &did_key::Ed25519KeyPair, encrypted_message: &str)-> Result<String, didcomm_rs::Error> {\n\n use did_key::Ecdh;\n\n\n\n // 1. Map Ed25519 -> x25519\n\n let to_key = to_key.get_x25519();\n\n let from_key = from_key.get_x25519();\n\n\n\n // 2. Make shared secret (to -> from)\n\n let shared_secret = to_key.key_exchange(&from_key);\n\n\n\n // 3. Decrypt message\n\n let decrypted = didcomm_rs::Message::receive(encrypted_message, Some(&shared_secret), None);\n\n let decrypted = decrypted.unwrap();\n\n let decrypted = String::from_utf8(decrypted.body).unwrap();\n\n\n\n Ok(decrypted)\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 62, "score": 142847.21254686237 }, { "content": "fn decrypt_didcomm(from_key: &did_key::Ed25519KeyPair, to_key: &did_key::Ed25519KeyPair, encrypted_message: &str)-> Result<String, didcomm_rs::Error> {\n\n use did_key::Ecdh;\n\n\n\n // 1. Map Ed25519 -> x25519\n\n let to_key = to_key.get_x25519();\n\n let from_key = from_key.get_x25519();\n\n\n\n // 2. Make shared secret (to -> from)\n\n let shared_secret = to_key.key_exchange(&from_key);\n\n\n\n // 3. Decrypt message\n\n let decrypted = didcomm_rs::Message::receive(encrypted_message, Some(&shared_secret), None);\n\n let decrypted = decrypted.unwrap();\n\n let decrypted = String::from_utf8(decrypted.body).unwrap();\n\n\n\n Ok(decrypted)\n\n}", "file_path": "playground/didchat2/src/lib.rs", "rank": 63, "score": 142847.21254686237 }, { "content": "fn get_self_jwk_and_didkey() -> (did_key::Ed25519KeyPair, ssi::jwk::JWK) {\n\n let jwk = key_jwk_path();\n\n let jwk = std::fs::read(jwk).unwrap();\n\n let jwk = String::from_utf8(jwk).unwrap();\n\n\n\n let (_, private) = jwkstr_to_publicprivatebytes(&jwk);\n\n let didkey = did_key::Ed25519KeyPair::from_seed(&private);\n\n\n\n let jwk: ssi::jwk::JWK = serde_json::from_str(&jwk).unwrap();\n\n\n\n (didkey, jwk)\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 64, "score": 141052.71526042785 }, { "content": "fn did(did_name: &str) -> Result<String, std::io::Error> {\n\n let path = did_path(did_name);\n\n let did = std::fs::read_to_string(path).unwrap();\n\n\n\n Ok(did)\n\n}\n\n\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 65, "score": 139889.50465736852 }, { "content": "fn jwkstr_to_publicprivatebytes(jwkstr: &str) -> (Vec<u8>, Vec<u8>) {// -> (public: Vec<u8>, private: Vec<u8>)\n\n\n\n let jwk: ssi::jwk::JWK = serde_json::from_str(jwkstr).unwrap();\n\n let okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() {\n\n Some(o)\n\n } else {\n\n panic!(\"okp == None\")\n\n }).unwrap();\n\n\n\n\n\n let privkey: Vec<u8> = if let Some(key) = okp.private_key {\n\n key.0\n\n } else {\n\n panic!(\"privkey == None\")\n\n };\n\n\n\n (okp.public_key.0, privkey)\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 66, "score": 139478.8091008524 }, { "content": "fn read(dcem: &str) -> Result<String, std::io::Error> {\n\n // 1. Get did:keys\n\n let to_key = get_self_didkey();\n\n let from_key = get_from_key_from_didcomm_message(dcem);\n\n\n\n // 2. Decrypt message, to get the contents of the message-body\n\n let (body, _) = decrypt_didcomm(&from_key, &to_key, dcem);\n\n\n\n Ok(format!(\"{}\", body))\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 67, "score": 138365.28578963902 }, { "content": "fn hold(dcem: &str) -> Result<String, std::io::Error> {\n\n use std::io::Write;\n\n\n\n // 1. Deserialize message\n\n let message: DIDCommEncryptedMessage = serde_json::from_str(dcem).unwrap();\n\n\n\n // 2. Store incomming message to file with didcomm_header.id as filename.\n\n let message_id = message.didcomm_header.id.to_string();\n\n let path = message_path(&message_id);\n\n let path = std::path::Path::new(&path);\n\n let mut file = std::fs::File::create(path).unwrap();\n\n file.write(dcem.as_bytes()).unwrap();\n\n\n\n // 3. Print message to stdout, to support piping commands together\n\n //\n\n // Example: did write self \"Hello\" | did hold | did read\n\n //\n\n Ok(format!(\"{}\", dcem))\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 68, "score": 138365.28578963905 }, { "content": "fn jwkstr_to_publicprivatebytes(jwkstr: &str) -> (Vec<u8>, Vec<u8>) {// -> (public: Vec<u8>, private: Vec<u8>)\n\n\n\n let jwk: ssi::jwk::JWK = serde_json::from_str(jwkstr).unwrap();\n\n let okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() {\n\n Some(o)\n\n } else {\n\n panic!(\"okp == None\")\n\n }).unwrap();\n\n\n\n\n\n let privkey: Vec<u8> = if let Some(key) = okp.private_key {\n\n key.0\n\n } else {\n\n panic!(\"privkey == None\")\n\n };\n\n\n\n (okp.public_key.0, privkey)\n\n}\n\n\n\n//\n\n// Config\n\n//\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 69, "score": 138297.6418505209 }, { "content": "#[derive(Debug)]\n\nenum CMD {\n\n Help,\n\n\n\n // DID\n\n Init,\n\n Doc,\n\n Connect{ didname: String, did: String },\n\n Dids,\n\n Did{ didname: String },\n\n\n\n // DIDComm v2 messaging\n\n Write{ didname: String, message: String },\n\n Read{ dcem: String },\n\n Hold{ dcem: String },\n\n Messages,\n\n Message{ message_id: String },\n\n\n\n // DIDComm v2 + Verifiable Credentials\n\n IssuePassport{ didname: String },\n\n IssueDriversLicense{ didname: String },\n", "file_path": "did-cli/src/lib.rs", "rank": 70, "score": 137179.48946358304 }, { "content": "fn message(message_id: &str) -> Result<String, std::io::Error> {\n\n let dcem = std::fs::read_to_string(message_path(message_id)).unwrap();\n\n Ok(dcem)\n\n}\n\n\n\n\n\n//\n\n// Commands: Verifiable credentials\n\n//\n\nasync fn issue(credential_type: &str, subject_didname: &str) -> Result<String, std::io::Error> {\n\n // 1. Get did docs\n\n let (issuer_didkey, issuer_jwk) = get_self_jwk_and_didkey();\n\n let subject_didkey = get_other_didkey(subject_didname);\n\n\n\n use did_key::DIDCore;\n\n let issuer_doc = issuer_didkey.get_did_document(did_key::CONFIG_LD_PUBLIC);\n\n let subject_doc = subject_didkey.get_did_document(did_key::CONFIG_LD_PUBLIC);\n\n\n\n // 2. Construct unsigned vc\n\n let vc = serde_json::json!({\n", "file_path": "did-cli/src/lib.rs", "rank": 71, "score": 136890.59468850307 }, { "content": "#[derive(Debug)]\n\nenum CMD {\n\n Init{ path: String },\n\n Doc{ path: String },\n\n Did{ path: String },\n\n Vote{ path: String, name: String },\n\n Messages{ path: String },\n\n Connect{ path: String, name: String, did: String },\n\n Write{ path: String, name: String, message: String },\n\n Read{ path: String, encrypted_message: String },\n\n Help\n\n}\n\n\n\npub struct Config {\n\n cmd: CMD,\n\n}\n\n\n\nimpl Config {\n\n pub fn new(args: &[String]) -> Result<Config, std::io::Error> {\n\n let default_cmd = String::from(\"help\");\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 72, "score": 135828.64911665992 }, { "content": "#[derive(Debug)]\n\nenum CMD {\n\n Init{ path: String },\n\n Doc{ path: String },\n\n Did{ path: String },\n\n Messages{ path: String },\n\n Connect{ path: String, name: String, did: String },\n\n Write{ path: String, name: String, message: String },\n\n Read{ path: String, encrypted_message: String },\n\n Help\n\n}\n\n\n\npub struct Config {\n\n cmd: CMD,\n\n}\n\n\n\nimpl Config {\n\n pub fn new(args: &[String]) -> Result<Config, std::io::Error> {\n\n let default_cmd = String::from(\"help\");\n\n \n\n let path = args.get(1).unwrap_or(&default_cmd).clone();\n", "file_path": "playground/didchat2/src/lib.rs", "rank": 73, "score": 135828.64911665992 }, { "content": "fn did_names_path() -> String {\n\n std::path::Path::new(ROOT_PATH)\n\n .join(\"did-names/\")\n\n .to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 74, "score": 135002.91171859857 }, { "content": "fn root_path() -> String {\n\n String::from(ROOT_PATH)\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 75, "score": 135002.91171859857 }, { "content": "fn messages_path() -> String {\n\n std::path::Path::new(ROOT_PATH)\n\n .join(\"messages/\")\n\n .to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 76, "score": 135002.91171859857 }, { "content": "fn dids_path() -> String {\n\n std::path::Path::new(ROOT_PATH)\n\n .join(\"dids/\")\n\n .to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 77, "score": 135002.91171859857 }, { "content": "fn get_other_didkey(other_did_name: &str) -> did_key::Ed25519KeyPair {\n\n let path = did_path(other_did_name);\n\n let other_did = std::fs::read_to_string(path).unwrap();\n\n let other_didkey = did_key::resolve(&other_did).unwrap();\n\n\n\n use did_key::KeyMaterial;\n\n let other_didkey = did_key::Ed25519KeyPair::from_public_key(&other_didkey.public_key_bytes());\n\n\n\n other_didkey\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 78, "score": 120926.04598915698 }, { "content": "fn get_from_key_from_didcomm_message(dcem: &str) -> did_key::Ed25519KeyPair {\n\n let from_jwe: didcomm_rs::Jwe = serde_json::from_str(&dcem).unwrap();\n\n let from_did = from_jwe.from().as_ref().unwrap();\n\n let from_key = did_key::resolve(&from_did).unwrap();\n\n\n\n use did_key::KeyMaterial;\n\n let from_key = did_key::Ed25519KeyPair::from_public_key(&from_key.public_key_bytes());\n\n\n\n from_key\n\n}\n\n\n\n\n\n//\n\n// Config\n\n//\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 79, "score": 118992.91380542026 }, { "content": "fn publicprivatebytes_to_jwkstr(public: Vec<u8>, private: Vec<u8>) -> String {\n\n let jwk = ssi::jwk::JWK {\n\n params: ssi::jwk::Params::OKP(ssi::jwk::OctetParams {\n\n curve: \"Ed25519\".to_string(),\n\n public_key: ssi::jwk::Base64urlUInt(public),\n\n private_key: Some(ssi::jwk::Base64urlUInt(private)),\n\n }),\n\n public_key_use: None,\n\n key_operations: None,\n\n algorithm: None,\n\n key_id: None,\n\n x509_url: None,\n\n x509_certificate_chain: None,\n\n x509_thumbprint_sha1: None,\n\n x509_thumbprint_sha256: None\n\n };\n\n\n\n let _okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() {\n\n Some(o)\n\n } else {\n\n None\n\n }).unwrap();\n\n\n\n serde_json::to_string(&jwk).unwrap()\n\n}\n\n\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 80, "score": 118874.05491841308 }, { "content": "fn publicprivatebytes_to_jwkstr(public: Vec<u8>, private: Vec<u8>) -> String {\n\n let jwk = ssi::jwk::JWK {\n\n params: ssi::jwk::Params::OKP(ssi::jwk::OctetParams {\n\n curve: \"Ed25519\".to_string(),\n\n public_key: ssi::jwk::Base64urlUInt(public),\n\n private_key: Some(ssi::jwk::Base64urlUInt(private)),\n\n }),\n\n public_key_use: None,\n\n key_operations: None,\n\n algorithm: None,\n\n key_id: None,\n\n x509_url: None,\n\n x509_certificate_chain: None,\n\n x509_thumbprint_sha1: None,\n\n x509_thumbprint_sha256: None\n\n };\n\n\n\n let _okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() {\n\n Some(o)\n\n } else {\n\n None\n\n }).unwrap();\n\n\n\n serde_json::to_string(&jwk).unwrap()\n\n}\n\n\n\n\n", "file_path": "playground/didvote/src/lib.rs", "rank": 81, "score": 117421.16834217585 }, { "content": "fn bytes_to_jwk(public: Vec<u8>, private: Vec<u8>) -> String {\n\n let jwk = ssi::jwk::JWK {\n\n params: ssi::jwk::Params::OKP(ssi::jwk::OctetParams {\n\n curve: \"Ed25519\".to_string(),\n\n public_key: ssi::jwk::Base64urlUInt(public),\n\n private_key: Some(ssi::jwk::Base64urlUInt(private)),\n\n }),\n\n public_key_use: None,\n\n key_operations: None,\n\n algorithm: None,\n\n key_id: None,\n\n x509_url: None,\n\n x509_certificate_chain: None,\n\n x509_thumbprint_sha1: None,\n\n x509_thumbprint_sha256: None\n\n };\n\n\n\n let _okp = (if let ssi::jwk::Params::OKP(o) = jwk.params.clone() {\n\n Some(o)\n\n } else {\n\n None\n\n }).unwrap();\n\n \n\n \n\n serde_json::to_string(&jwk).unwrap()\n\n}\n\n\n", "file_path": "playground/keytojwk/src/main.rs", "rank": 82, "score": 117060.29455702059 }, { "content": "fn dids() -> Result<String, std::io::Error> {\n\n let mut list = format!(\"{:16}{}\\n\", \"ID\", \"DID\");\n\n let mut entries: Vec<std::fs::DirEntry> = std::fs::read_dir(dids_path())\n\n .unwrap()\n\n .filter_map(|f| f.ok()).collect();\n\n entries.sort_by_key(|e| e.path());\n\n\n\n for entry in entries {\n\n if entry.path().is_dir() {\n\n continue;\n\n }\n\n let did = std::fs::read_to_string(entry.path()).unwrap();\n\n let did_name = String::from(entry.file_name().to_str().unwrap()).replace(\".did\", \"\");\n\n list.push_str(&format!(\"{:16}{}\\n\", did_name, did));\n\n }\n\n\n\n Ok(list)\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 83, "score": 92021.32801543034 }, { "content": "fn messages() -> Result<String, std::io::Error> {\n\n let mut list = format!(\n\n \"{:16}\\t{:14}\\t{:14}\\t{:>12}\\t{:>9}\",\n\n \"ID\", \"From\", \"To\", \"Created\", \"Length\");\n\n\n\n // 1. Get messages from message directory\n\n let mut messages: Vec<DIDCommEncryptedMessage> = std::fs::read_dir(messages_path())\n\n .unwrap()\n\n .filter_map(|f| f.ok())\n\n .filter(|f| !f.path().is_dir())\n\n .map(|entry| {\n\n let dcem = std::fs::read_to_string(entry.path()).unwrap();\n\n let dcem: DIDCommEncryptedMessage = serde_json::from_str(&dcem).unwrap();\n\n\n\n dcem\n\n })\n\n .collect();\n\n\n\n // 2. Sort by created time\n\n messages.sort_by_key(|dcem| dcem.didcomm_header.created_time.unwrap());\n", "file_path": "did-cli/src/lib.rs", "rank": 84, "score": 92021.32801543034 }, { "content": "fn main() {\n\n let jonas = \"did:xyz:ulapcuhsatnpuhza930hpu34n_\";\n\n let cecilie = vec!(\"did::xyz:34r3cu403hnth03r49g03\");\n\n let args: Vec<String> = std::env::args().collect();\n\n let args_as_string = args.join(\"\");\n\n let args_as_bytes = args_as_string.as_bytes();\n\n\n\n // println!(\"{:?}\", &args[1..]);\n\n\n\n // 1. Unencrypted\n\n {\n\n let message = didcomm_rs::Message::new()\n\n .from(jonas)\n\n .to(cecilie.clone())\n\n .body(args_as_bytes);\n\n\n\n let serialized_message = message\n\n .clone()\n\n .as_raw_json()\n\n .unwrap();\n", "file_path": "playground/didchat/src/main.rs", "rank": 85, "score": 83425.78295124773 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n\n\n let config = jwk::Config::new(&args).unwrap_or_else(|err| {\n\n eprintln!(\"Config::new(&args) failed: {}\", err);\n\n std::process::exit(1);\n\n });\n\n\n\n let output = jwk::run(config).unwrap_or_else(|err| {\n\n eprintln!(\"run(config) failed: {}\", err);\n\n std::process::exit(2);\n\n });\n\n\n\n println!(\"{}\", output);\n\n}\n", "file_path": "playground/jwk/src/main.rs", "rank": 86, "score": 82980.88072028692 }, { "content": "fn get_self_didkey() -> did_key::Ed25519KeyPair {\n\n let jwk = key_jwk_path();\n\n let jwk = std::fs::read(jwk).unwrap();\n\n let jwk = String::from_utf8(jwk).unwrap();\n\n\n\n let (_, private) = jwkstr_to_publicprivatebytes(&jwk);\n\n let self_didkey = did_key::Ed25519KeyPair::from_seed(&private);\n\n\n\n self_didkey\n\n}\n\n\n", "file_path": "did-cli/src/lib.rs", "rank": 87, "score": 71887.848455255 }, { "content": "#[derive(serde::Serialize, serde::Deserialize)]\n\nstruct DIDCommEncryptedMessage {\n\n /// JOSE header, which is sent as public part with JWE.\n\n #[serde(flatten)]\n\n pub jwm_header: didcomm_rs::JwmHeader,\n\n /// DIDComm headers part, sent as part of encrypted message in JWE.\n\n #[serde(flatten)]\n\n pub didcomm_header: didcomm_rs::DidcommHeader,\n\n /// Message payload, which can be basically anything (JSON, text, file, etc.) represented\n\n /// as bytes of data.\n\n pub ciphertext: Vec<u8>,\n\n}\n", "file_path": "did-cli/src/lib.rs", "rank": 88, "score": 67591.1125938617 }, { "content": "type DIDCommID = String;\n\n\n\n/**\n\n * @returns (String, String) which is (didcomm encrypted message, didcomm header id)\n\n */\n", "file_path": "did-cli/src/lib.rs", "rank": 89, "score": 54725.82999297431 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n\n\n let config = didchat::Config::new(&args).unwrap_or_else(|err| {\n\n eprintln!(\"Config::new(&args) failed: {}\", err);\n\n std::process::exit(1);\n\n });\n\n\n\n let output = didchat::run(config).unwrap_or_else(|err| {\n\n eprintln!(\"run(config) failed: {}\", err);\n\n std::process::exit(2);\n\n });\n\n\n\n println!(\"{}\", output);\n\n}\n", "file_path": "playground/didchat2/src/main.rs", "rank": 90, "score": 47288.132690189894 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n\n\n let config = didvote::Config::new(&args).unwrap_or_else(|err| {\n\n eprintln!(\"Config::new(&args) failed: {}\", err);\n\n std::process::exit(1);\n\n });\n\n\n\n let output = didvote::run(config).unwrap_or_else(|err| {\n\n eprintln!(\"run(config) failed: {}\", err);\n\n std::process::exit(2);\n\n });\n\n\n\n println!(\"{}\", output);\n\n}\n", "file_path": "playground/didvote/src/main.rs", "rank": 91, "score": 47288.132690189894 }, { "content": "fn main() {\n\n let key = DIDKey::new(DIDKeyType::Ed25519);\n\n println!(\"{}\", key.fingerprint());\n\n\n\n let did_doc = key.to_did_document(CONFIG_LD_PUBLIC);\n\n println!(\"{:?}\", did_doc);\n\n}\n", "file_path": "playground/didkey/src/main.rs", "rank": 92, "score": 47288.132690189894 }, { "content": "//\n\n// Entry point of the application\n\n//\n\nfn main() {\n\n // Although we very rarely need to annotate types in Rust, collect is one \n\n // function you do often need to annotate because Rust isn’t able to infer \n\n // the kind of collection you want.\n\n let args: Vec<String> = std::env::args().collect();\n\n\n\n let config = minigrep::Config::new(&args).unwrap_or_else(|err| {\n\n eprintln!(\"Config::new(&args) failed: {}\", err);\n\n std::process::exit(1);\n\n });\n\n\n\n if let Err(err) = minigrep::run(config) {\n\n eprintln!(\"run(config) failed: {}\", err);\n\n std::process::exit(2);\n\n }\n\n}\n", "file_path": "playground/minigrep/src/main.rs", "rank": 93, "score": 47288.132690189894 }, { "content": "fn main() {\n\n println!(\"{}\", ed25519_keypair_to_jwk());\n\n println!(\"{}\", didkey_keypair_to_jwk());\n\n}\n\n\n", "file_path": "playground/keytojwk/src/main.rs", "rank": 94, "score": 47288.132690189894 }, { "content": "fn main() {\n\n\tlet args: Vec<String> = std::env::args().collect();\n\n\n\n match args.len() {\n\n \t1 => println!(\"Try to pass some arguments...\"),\n\n \t2 => {\n\n \t\tmatch args[1].parse() {\n\n \t\t\tOk(1) => guessing_game(),\n\n \t\t\tOk(2) => println!(\"Two\"),\n\n \t\t\tOk(3) | Ok(4) => println!(\"Three\"),\n\n \t\t\tOk(5..=5000) => println!(\"Between 1 and 5000\"),\n\n \t\t\tn => println!(\"You passed {:?}\", n),\n\n \t\t}\n\n \t},\n\n \t_ => println!(\"Too many args\"),\n\n }\n\n}\n\n\n\n\n\n/**\n\n * @see https://doc.rust-lang.org/book/ch02-00-guessing-game-tutorial.html\n\n */\n\nuse rand::Rng;\n\n\n", "file_path": "playground/rust/src/main.rs", "rank": 95, "score": 47288.132690189894 }, { "content": "fn guessing_game() {\n\n println!(\"--------Guess the number--------\");\n\n println!(\"Please input your guess:🙏\");\n\n\n\n let secret_number = rand::thread_rng().gen_range(1, 1001);\n\n\n\n println!(\"The secret number is: {}\", secret_number);\n\n\n\n let mut guess = String::new();\n\n\n\n std::io::stdin()\n\n .read_line(&mut guess)\n\n .expect(\"Failed to read line\");\n\n\n\n let guess: u32 = guess\n\n .trim()\n\n .parse()\n\n .expect(\"You have to type a number.\");\n\n\n\n println!(\"You guessed: {}\", guess);\n\n\n\n match guess.cmp(&secret_number) {\n\n std::cmp::Ordering::Less => println!(\"Too small!\"),\n\n std::cmp::Ordering::Greater => println!(\"Too big!\"),\n\n std::cmp::Ordering::Equal => println!(\"Just right! :D\"),\n\n }\n\n}", "file_path": "playground/rust/src/main.rs", "rank": 96, "score": 46697.83461411332 }, { "content": "\n\n println!(\"---------UNECNRYPTED---------\");\n\n println!(\"{}\", serialized_message);\n\n println!(\"---------UNECNRYPTED---------\");\n\n }\n\n \n\n // 2. Encrypted\n\n {\n\n let jonas_secret = x25519_dalek::StaticSecret::new(rand_core::OsRng);\n\n let cecilie_secret = x25519_dalek::StaticSecret::new(rand_core::OsRng);\n\n let _jonas_public = x25519_dalek::PublicKey::from(&jonas_secret);\n\n let cecilie_public = x25519_dalek::PublicKey::from(&cecilie_secret);\n\n\n\n std::fs::create_dir(\".didchat\").unwrap_or_default();\n\n\n\n let mut _file = std::fs::File::create(\".didchat/key.private\").unwrap();\n\n _file.write_all(&jonas_secret.to_bytes()).unwrap();\n\n \n\n let shared_secret = jonas_secret.diffie_hellman(&cecilie_public);\n\n\n", "file_path": "playground/didchat/src/main.rs", "rank": 97, "score": 36821.27566896432 }, { "content": "use std::io::Write;\n\n\n", "file_path": "playground/didchat/src/main.rs", "rank": 98, "score": 36815.89529564693 }, { "content": " let message = didcomm_rs::Message::new()\n\n .from(jonas)\n\n .to(cecilie.clone())\n\n .timed(Some(3600))\n\n .body(args_as_bytes)\n\n .as_jwe(&didcomm_rs::crypto::CryptoAlgorithm::XC20P);\n\n\n\n let serialized_message = message\n\n .seal(shared_secret.as_bytes())\n\n .unwrap();\n\n\n\n println!(\"---------ENCRYPTED---------\");\n\n println!(\"{:?}\", serialized_message);\n\n println!(\"---------ENCRYPTED---------\");\n\n }\n\n}\n", "file_path": "playground/didchat/src/main.rs", "rank": 99, "score": 36812.48872650509 } ]
Rust
src/libos/src/fs/file_ops/ioctl/mod.rs
qzheng527/ngo
635ce9ef2427fe1b602b40ec89aa3530b167169d
use super::*; use util::mem_util::from_user; pub use self::builtin::*; pub use self::non_builtin::{NonBuiltinIoctlCmd, StructuredIoctlArgType, StructuredIoctlNum}; #[macro_use] mod macros; mod builtin; mod non_builtin; impl_ioctl_nums_and_cmds! { TCGETS => (0x5401, mut KernelTermios), TCSETS => (0x5402, KernelTermios), TIOCGWINSZ => (0x5413, mut WinSize), TIOCSWINSZ => (0x5414, WinSize), FIONBIO => (0x5421, i32), TIOCNOTTY => (0x5422, ()), FIONREAD => (0x541B, mut i32), FIONCLEX => (0x5450, ()), FIOCLEX => (0x5451, ()), SIOCGIFNAME => (0x8910, mut IfReq), SIOCGIFCONF => (0x8912, mut IfConf), SIOCGIFFLAGS => (0x8913, mut IfReq), SIOCGIFADDR => (0x8915, mut IfReq), SIOCGIFDSTADDR => (0x8917, mut IfReq), SIOCGIFBRDADDR => (0x8919, mut IfReq), SIOCGIFNETMASK => (0x891B, mut IfReq), SIOCGIFMTU => (0x8921, mut IfReq), SIOCGIFHWADDR => (0x8927, mut IfReq), SIOCGIFINDEX => (0x8933, mut IfReq), SIOCGIFPFLAGS => (0x8935, mut IfReq), SIOCGIFTXQLEN => (0x8942, mut IfReq), SIOCGIFMAP => (0x8970, mut IfReq), } impl<'a> IoctlRawCmd<'a> { pub fn to_safe_ioctlcmd(&self) -> Result<Box<dyn IoctlCmd>> { Ok(match self { IoctlRawCmd::TCGETS(_) => Box::new(TcGets::new(())), IoctlRawCmd::TCSETS(termios_ref) => { let termios = **termios_ref; Box::new(TcSets::new(termios)) } IoctlRawCmd::TIOCGWINSZ(_) => Box::new(GetWinSize::new(())), IoctlRawCmd::TIOCSWINSZ(winsize_ref) => { let winsize = **winsize_ref; Box::new(SetWinSize::new(winsize)) } IoctlRawCmd::NonBuiltin(inner) => { let nonbuiltin_cmd = unsafe { NonBuiltinIoctlCmd::new(*inner.cmd_num(), inner.arg_ptr() as _)? }; Box::new(nonbuiltin_cmd) } IoctlRawCmd::FIONBIO(non_blocking) => Box::new(SetNonBlocking::new(**non_blocking)), IoctlRawCmd::FIONREAD(_) => Box::new(GetReadBufLen::new(())), IoctlRawCmd::FIONCLEX(_) => Box::new(SetCloseOnExec::new(false)), IoctlRawCmd::FIOCLEX(_) => Box::new(SetCloseOnExec::new(true)), IoctlRawCmd::SIOCGIFCONF(ifconf_mut) => { if !ifconf_mut.ifc_buf.is_null() { if ifconf_mut.ifc_len < 0 { return_errno!(EINVAL, "invalid ifc_len"); } from_user::check_array(ifconf_mut.ifc_buf, ifconf_mut.ifc_len as usize)?; } Box::new(GetIfConf::new(ifconf_mut)) } IoctlRawCmd::SIOCGIFFLAGS(req) | IoctlRawCmd::SIOCGIFNAME(req) | IoctlRawCmd::SIOCGIFADDR(req) | IoctlRawCmd::SIOCGIFDSTADDR(req) | IoctlRawCmd::SIOCGIFBRDADDR(req) | IoctlRawCmd::SIOCGIFNETMASK(req) | IoctlRawCmd::SIOCGIFMTU(req) | IoctlRawCmd::SIOCGIFHWADDR(req) | IoctlRawCmd::SIOCGIFINDEX(req) | IoctlRawCmd::SIOCGIFPFLAGS(req) | IoctlRawCmd::SIOCGIFTXQLEN(req) | IoctlRawCmd::SIOCGIFMAP(req) => { Box::new(GetIfReqWithRawCmd::new(self.cmd_num(), **req)) } _ => { return_errno!(EINVAL, "unsupported cmd"); } }) } pub fn copy_output_from_safe(&mut self, cmd: &dyn IoctlCmd) { match self { IoctlRawCmd::TCGETS(termios_mut) => { let cmd = cmd.downcast_ref::<TcGets>().unwrap(); **termios_mut = *cmd.output().unwrap(); } IoctlRawCmd::TIOCGWINSZ(winsize_mut) => { let cmd = cmd.downcast_ref::<GetWinSize>().unwrap(); **winsize_mut = *cmd.output().unwrap(); } IoctlRawCmd::FIONREAD(len_mut) => { let cmd = cmd.downcast_ref::<GetReadBufLen>().unwrap(); **len_mut = *cmd.output().unwrap(); } IoctlRawCmd::SIOCGIFCONF(ifconf_mut) => { let cmd = cmd.downcast_ref::<GetIfConf>().unwrap(); ifconf_mut.ifc_len = cmd.len() as i32; if !ifconf_mut.ifc_buf.is_null() { let mut raw_buf = unsafe { std::slice::from_raw_parts_mut( ifconf_mut.ifc_buf as _, ifconf_mut.ifc_len as _, ) }; raw_buf.copy_from_slice(cmd.as_slice().unwrap()); } } IoctlRawCmd::SIOCGIFNAME(ifreq_mut) | IoctlRawCmd::SIOCGIFFLAGS(ifreq_mut) | IoctlRawCmd::SIOCGIFADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFDSTADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFBRDADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFNETMASK(ifreq_mut) | IoctlRawCmd::SIOCGIFMTU(ifreq_mut) | IoctlRawCmd::SIOCGIFHWADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFINDEX(ifreq_mut) | IoctlRawCmd::SIOCGIFPFLAGS(ifreq_mut) | IoctlRawCmd::SIOCGIFTXQLEN(ifreq_mut) | IoctlRawCmd::SIOCGIFMAP(ifreq_mut) => { let cmd = cmd.downcast_ref::<GetIfReqWithRawCmd>().unwrap(); **ifreq_mut = *cmd.output().unwrap(); } _ => {} } } } pub fn do_ioctl(fd: FileDesc, raw_cmd: &mut IoctlRawCmd) -> Result<i32> { debug!("ioctl: fd: {}, cmd: {:?}", fd, raw_cmd); let current = current!(); let file_ref = current.file(fd)?; let mut cmd = raw_cmd.to_safe_ioctlcmd()?; if cmd.is::<SetCloseOnExec>() { let is_close_on_exec = cmd.downcast_ref::<SetCloseOnExec>().unwrap().input(); let mut file_table = current.files().lock().unwrap(); let entry = file_table.get_entry_mut(fd)?; entry.set_close_on_spawn(*is_close_on_exec); return Ok(0); } file_ref.ioctl(cmd.as_mut())?; raw_cmd.copy_output_from_safe(cmd.as_ref()); Ok(0) } extern "C" { pub fn occlum_ocall_ioctl( ret: *mut i32, fd: c_int, request: c_int, arg: *mut c_void, len: size_t, ) -> sgx_status_t; }
use super::*; use util::mem_util::from_user; pub use self::builtin::*; pub use self::non_builtin::{NonBuiltinIoctlCmd, StructuredIoctlArgType, StructuredIoctlNum}; #[macro_use] mod macros; mod builtin; mod non_builtin; impl_ioctl_nums_and_cmds! { TCGETS => (0x5401, mut KernelTermios), TCSETS => (0x5402, KernelTermios), TIOCGWINSZ => (0x5413, mut WinSize), TIOCSWINSZ => (0x5414, WinSize), FIONBIO => (0x5421, i32), TIOCNOTTY => (0x5422, ()), FIONREAD => (0x541B, mut i32), FIONCLEX => (0x5450, ()), FIOCLEX => (0x5451, ()), SIOCGIFNAME => (0x8910, mut IfReq), SIOCGIFCONF => (0x8912, mut IfConf), SIOCGIFFLAGS => (0x8913, mut IfReq), SIOCGIFADDR => (0x8915, mut IfReq), SIOCGIFDSTADDR => (0x8917, mut IfReq), SIOCGIFBRDADDR => (0x8919, mut IfReq), SIOCGIFNETMASK => (0x891B, mut IfReq), SIOCGIFMTU => (0x8921, mut IfReq), SIOCGIFHWADDR => (0x8927, mut IfReq), SIOCGIFINDEX => (0x8933, mut IfReq), SIOCGIFPFLAGS => (0x8935, mut IfReq), SIOCGIFTXQLEN => (0x8942, mut IfReq), SIOCGIFMAP => (0x8970, mut IfReq), } impl<'a> IoctlRawCmd<'a> { pub fn to_safe_ioctlcmd(&self) -> Result<Box<dyn IoctlCmd>> { Ok(match self { IoctlRawCmd::TCGETS(_) => Box::new(TcGets::new(())), IoctlRawCmd::TCSETS(termios_ref) => { let termios = **termios_ref; Box::new(TcSets::new(termios)) } IoctlRawCmd::TIOCGWINSZ(_) => Box::new(GetWinSize::new(())), IoctlRawCmd::TIOCSWINSZ(winsize_ref) => { let winsize = **winsize_ref; Box::new(SetWinSize::new(winsize)) } IoctlRawCmd::NonBuiltin(inner) => { let nonbuiltin_cmd = unsafe { NonBuiltinIoctlCmd::new(*inner.cmd_num(), inner.arg_ptr() as _)? }; Box::new(nonbuiltin_cmd) } IoctlRawCmd::FIONBIO(non_blocking) => Box::new(SetNonBlocking::new(**non_blocking)), IoctlRawCmd::FIONREAD(_) => Box::new(GetReadBufLen::new(())), IoctlRawCmd::FIONCLEX(_) => Box::new(SetCloseOnExec::new(false)), IoctlRawCmd::FIOCLEX(_) => Box::new(SetCloseOnExec::new(true)), IoctlRawCmd::SIOCGIFCONF(ifconf_mut) => { if !ifconf_mut.ifc_buf.is_null() { if ifconf_mut.ifc_len < 0 { return_errno!(EINVAL, "invalid ifc_len"); } from_user::check_array(ifconf_mut.ifc_buf, ifconf_mut.ifc_len as usize)?; } Box::new(GetIfConf::new(ifconf_mut)) } IoctlRawCmd::SIOCGIFFLAGS(req) | IoctlRawCmd::SIOCGIFNAME(req) | IoctlRawCmd::SIOCGIFADDR(req) | IoctlRawCmd::SIOCGIFDSTADDR(req)
| IoctlRawCmd::SIOCGIFPFLAGS(req) | IoctlRawCmd::SIOCGIFTXQLEN(req) | IoctlRawCmd::SIOCGIFMAP(req) => { Box::new(GetIfReqWithRawCmd::new(self.cmd_num(), **req)) } _ => { return_errno!(EINVAL, "unsupported cmd"); } }) } pub fn copy_output_from_safe(&mut self, cmd: &dyn IoctlCmd) { match self { IoctlRawCmd::TCGETS(termios_mut) => { let cmd = cmd.downcast_ref::<TcGets>().unwrap(); **termios_mut = *cmd.output().unwrap(); } IoctlRawCmd::TIOCGWINSZ(winsize_mut) => { let cmd = cmd.downcast_ref::<GetWinSize>().unwrap(); **winsize_mut = *cmd.output().unwrap(); } IoctlRawCmd::FIONREAD(len_mut) => { let cmd = cmd.downcast_ref::<GetReadBufLen>().unwrap(); **len_mut = *cmd.output().unwrap(); } IoctlRawCmd::SIOCGIFCONF(ifconf_mut) => { let cmd = cmd.downcast_ref::<GetIfConf>().unwrap(); ifconf_mut.ifc_len = cmd.len() as i32; if !ifconf_mut.ifc_buf.is_null() { let mut raw_buf = unsafe { std::slice::from_raw_parts_mut( ifconf_mut.ifc_buf as _, ifconf_mut.ifc_len as _, ) }; raw_buf.copy_from_slice(cmd.as_slice().unwrap()); } } IoctlRawCmd::SIOCGIFNAME(ifreq_mut) | IoctlRawCmd::SIOCGIFFLAGS(ifreq_mut) | IoctlRawCmd::SIOCGIFADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFDSTADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFBRDADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFNETMASK(ifreq_mut) | IoctlRawCmd::SIOCGIFMTU(ifreq_mut) | IoctlRawCmd::SIOCGIFHWADDR(ifreq_mut) | IoctlRawCmd::SIOCGIFINDEX(ifreq_mut) | IoctlRawCmd::SIOCGIFPFLAGS(ifreq_mut) | IoctlRawCmd::SIOCGIFTXQLEN(ifreq_mut) | IoctlRawCmd::SIOCGIFMAP(ifreq_mut) => { let cmd = cmd.downcast_ref::<GetIfReqWithRawCmd>().unwrap(); **ifreq_mut = *cmd.output().unwrap(); } _ => {} } } } pub fn do_ioctl(fd: FileDesc, raw_cmd: &mut IoctlRawCmd) -> Result<i32> { debug!("ioctl: fd: {}, cmd: {:?}", fd, raw_cmd); let current = current!(); let file_ref = current.file(fd)?; let mut cmd = raw_cmd.to_safe_ioctlcmd()?; if cmd.is::<SetCloseOnExec>() { let is_close_on_exec = cmd.downcast_ref::<SetCloseOnExec>().unwrap().input(); let mut file_table = current.files().lock().unwrap(); let entry = file_table.get_entry_mut(fd)?; entry.set_close_on_spawn(*is_close_on_exec); return Ok(0); } file_ref.ioctl(cmd.as_mut())?; raw_cmd.copy_output_from_safe(cmd.as_ref()); Ok(0) } extern "C" { pub fn occlum_ocall_ioctl( ret: *mut i32, fd: c_int, request: c_int, arg: *mut c_void, len: size_t, ) -> sgx_status_t; }
| IoctlRawCmd::SIOCGIFBRDADDR(req) | IoctlRawCmd::SIOCGIFNETMASK(req) | IoctlRawCmd::SIOCGIFMTU(req) | IoctlRawCmd::SIOCGIFHWADDR(req) | IoctlRawCmd::SIOCGIFINDEX(req)
function_block-random_span
[ { "content": "// TODO: rename this to do_poll after the old version is removed\n\npub fn do_poll_new(poll_fds: &[PollFd], mut timeout: Option<&mut Duration>) -> Result<usize> {\n\n debug!(\"poll: poll_fds: {:?}, timeout: {:?}\", poll_fds, timeout);\n\n\n\n // Always clear the revents fields first\n\n for poll_fd in poll_fds {\n\n poll_fd.revents.set(IoEvents::empty());\n\n }\n\n\n\n // Map poll_fds to FileRef's\n\n let thread = current!();\n\n let files: Vec<FileRef> = poll_fds\n\n .iter()\n\n .filter_map(|poll_fd| {\n\n let file = thread.file(poll_fd.fd).ok();\n\n\n\n // Mark an invalid fd by outputting an IoEvents::NVAL event\n\n if file.is_none() {\n\n poll_fd.revents.set(IoEvents::NVAL);\n\n }\n\n\n", "file_path": "src/libos/src/net.deprecated/io_multiplexing/poll_new/mod.rs", "rank": 1, "score": 271979.2481076424 }, { "content": "pub fn do_brk(addr: usize) -> Result<usize> {\n\n debug!(\"brk: addr: {:#x}\", addr);\n\n current!().vm().brk(addr)\n\n}\n\n\n", "file_path": "src/libos/src/vm/mod.rs", "rank": 2, "score": 257278.17545106323 }, { "content": "pub fn do_poll(pollfds: &mut [PollEvent], timeout: *mut timeval_t) -> Result<usize> {\n\n let mut libos_ready_num = 0;\n\n let mut host_ready_num = 0;\n\n let mut notified = 0;\n\n let current = current!();\n\n\n\n // The pollfd of the host file\n\n let mut host_pollfds: Vec<PollEvent> = Vec::new();\n\n // The indices in pollfds of host file\n\n let mut index_host_pollfds: Vec<usize> = Vec::new();\n\n // Vec<usize>: The indices in pollfds which may be more than one for the same file\n\n // PollEvent: the merged pollfd of FileDesc\n\n let mut libos_pollfds: HashMap<FileDesc, (PollEvent, Vec<usize>)> = HashMap::new();\n\n\n\n for (i, pollfd) in pollfds.iter_mut().enumerate() {\n\n // Ignore negative fds\n\n if (pollfd.fd() as i32) < 0 {\n\n continue;\n\n }\n\n\n", "file_path": "src/libos/src/net.deprecated/io_multiplexing/poll.rs", "rank": 3, "score": 251631.5237030327 }, { "content": "pub fn do_munmap(addr: usize, size: usize) -> Result<()> {\n\n debug!(\"munmap: addr: {:#x}, size: {:#x}\", addr, size);\n\n let current = current!();\n\n current!().vm().munmap(addr, size)\n\n}\n\n\n", "file_path": "src/libos/src/vm/mod.rs", "rank": 4, "score": 249772.963568219 }, { "content": "/// Do futex wake\n\npub fn futex_wake(futex_addr: *const i32, max_count: usize) -> Result<usize> {\n\n futex_wake_bitset(futex_addr, max_count, FUTEX_BITSET_MATCH_ANY)\n\n}\n\n\n", "file_path": "src/libos/src/process/do_futex.rs", "rank": 5, "score": 249570.75568002803 }, { "content": "pub fn do_exit(status: i32) {\n\n let term_status = TermStatus::Exited(status as u8);\n\n exit_thread(term_status);\n\n}\n\n\n", "file_path": "src/libos/src/process/do_exit.rs", "rank": 6, "score": 244380.52500672874 }, { "content": "pub fn do_arch_prctl(code: ArchPrctlCode, addr: *mut usize) -> Result<()> {\n\n debug!(\"do_arch_prctl: code: {:?}, addr: {:?}\", code, addr);\n\n match code {\n\n ArchPrctlCode::ARCH_SET_FS => {\n\n CURRENT_CONTEXT.with(|context| {\n\n context.borrow_mut().fs_base = addr as _;\n\n });\n\n }\n\n ArchPrctlCode::ARCH_GET_FS => unsafe {\n\n CURRENT_CONTEXT.with(|context| {\n\n *addr = context.borrow_mut().fs_base as _;\n\n });\n\n },\n\n ArchPrctlCode::ARCH_SET_GS | ArchPrctlCode::ARCH_GET_GS => {\n\n return_errno!(EINVAL, \"GS cannot be accessed from the user space\");\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/libos/src/process/do_arch_prctl.rs", "rank": 7, "score": 239567.94737204822 }, { "content": "pub fn do_getdents(fd: FileDesc, buf: &mut [u8]) -> Result<usize> {\n\n getdents_common::<LinuxDirent>(fd, buf)\n\n}\n\n\n", "file_path": "src/libos/src/fs/file_ops/getdents.rs", "rank": 8, "score": 235235.13140852004 }, { "content": "pub fn do_getdents64(fd: FileDesc, buf: &mut [u8]) -> Result<usize> {\n\n getdents_common::<LinuxDirent64>(fd, buf)\n\n}\n\n\n", "file_path": "src/libos/src/fs/file_ops/getdents.rs", "rank": 9, "score": 235235.13140852004 }, { "content": "/// Do futex wake with bitset\n\npub fn futex_wake_bitset(futex_addr: *const i32, max_count: usize, bitset: u32) -> Result<usize> {\n\n debug!(\n\n \"futex_wake_bitset addr: {:#x}, max_count: {}, bitset: {:#x}\",\n\n futex_addr as usize, max_count, bitset\n\n );\n\n\n\n // Get and lock the futex bucket\n\n let futex_key = FutexKey::new(futex_addr);\n\n let (_, futex_bucket_ref) = FUTEX_BUCKETS.get_bucket(futex_key);\n\n let mut futex_bucket = futex_bucket_ref.lock().unwrap();\n\n\n\n // Dequeue and wake up the items in the bucket\n\n let count = futex_bucket.dequeue_and_wake_items(futex_key, max_count, bitset);\n\n Ok(count)\n\n}\n\n\n", "file_path": "src/libos/src/process/do_futex.rs", "rank": 10, "score": 233579.23707499655 }, { "content": "pub fn do_mprotect(addr: usize, size: usize, perms: VMPerms) -> Result<()> {\n\n debug!(\n\n \"mprotect: addr: {:#x}, size: {:#x}, perms: {:?}\",\n\n addr, size, perms\n\n );\n\n current!().vm().mprotect(addr, size, perms)\n\n}\n\n\n", "file_path": "src/libos/src/vm/mod.rs", "rank": 11, "score": 233231.07913739394 }, { "content": "pub fn do_mremap(\n\n old_addr: usize,\n\n old_size: usize,\n\n new_size: usize,\n\n flags: MRemapFlags,\n\n) -> Result<usize> {\n\n debug!(\n\n \"mremap: old_addr: {:#x}, old_size: {:#x}, new_size: {:#x}, flags: {:?}\",\n\n old_addr, old_size, new_size, flags\n\n );\n\n current!().vm().mremap(old_addr, old_size, new_size, flags)\n\n}\n\n\n", "file_path": "src/libos/src/vm/mod.rs", "rank": 12, "score": 232202.60788475795 }, { "content": "pub fn do_mmap(\n\n addr: usize,\n\n size: usize,\n\n perms: VMPerms,\n\n flags: MMapFlags,\n\n fd: FileDesc,\n\n offset: usize,\n\n) -> Result<usize> {\n\n if flags.contains(MMapFlags::MAP_ANONYMOUS) {\n\n debug!(\n\n \"mmap: addr: {:#x}, size: {:#x}, perms: {:?}, flags: {:?}\",\n\n addr, size, perms, flags,\n\n );\n\n } else {\n\n debug!(\n\n \"mmap: addr: {:#x}, size: {:#x}, perms: {:?}, flags: {:?}, fd: {:?}, offset: {:?}\",\n\n addr, size, perms, flags, fd, offset\n\n );\n\n }\n\n\n\n current!().vm().mmap(addr, size, perms, flags, fd, offset)\n\n}\n\n\n", "file_path": "src/libos/src/vm/mod.rs", "rank": 13, "score": 232202.60788475795 }, { "content": "pub fn do_readlinkat(fs_path: &FsPath, buf: &mut [u8]) -> Result<usize> {\n\n debug!(\"readlinkat: fs_path: {:?}\", fs_path);\n\n\n\n let file_path = {\n\n let inode = {\n\n let current = current!();\n\n let fs = current.fs().read().unwrap();\n\n fs.lookup_inode_no_follow(fs_path)?\n\n };\n\n if inode.metadata()?.type_ != FileType::SymLink {\n\n return_errno!(EINVAL, \"not a symbolic link\");\n\n }\n\n let mut content = vec![0u8; PATH_MAX];\n\n let len = inode.read_at(0, &mut content)?;\n\n let path =\n\n std::str::from_utf8(&content[..len]).map_err(|_| errno!(EINVAL, \"invalid symlink\"))?;\n\n String::from(path)\n\n };\n\n let len = file_path.len().min(buf.len());\n\n buf[0..len].copy_from_slice(&file_path.as_bytes()[0..len]);\n\n Ok(len)\n\n}\n\n\n", "file_path": "src/libos/src/fs/file_ops/symlink.rs", "rank": 14, "score": 232138.50204788632 }, { "content": "pub fn do_set_robust_list(list_head_ptr: *mut RobustListHead, len: usize) -> Result<()> {\n\n debug!(\n\n \"set_robust_list: list_head_ptr: {:?}, len: {}\",\n\n list_head_ptr, len\n\n );\n\n if std::mem::size_of::<RobustListHead>() != len {\n\n return_errno!(EINVAL, \"invalid size for RobustListHead\");\n\n }\n\n // We do not check if the pointer is a valid user space pointer, deferring\n\n // it in waking the robust list. If the pointer is invalid, we just stop\n\n // waking the robust list.\n\n let robust_list = NonNull::new(list_head_ptr);\n\n let current = current!();\n\n current.set_robust_list(robust_list);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/libos/src/process/do_robust_list.rs", "rank": 15, "score": 230605.21399418445 }, { "content": "pub fn do_msync(addr: usize, size: usize, flags: MSyncFlags) -> Result<()> {\n\n debug!(\n\n \"msync: addr: {:#x}, size: {:#x}, flags: {:?}\",\n\n addr, size, flags\n\n );\n\n if flags.contains(MSyncFlags::MS_INVALIDATE) {\n\n return_errno!(EINVAL, \"not support MS_INVALIDATE\");\n\n }\n\n if flags.contains(MSyncFlags::MS_ASYNC) {\n\n warn!(\"not support MS_ASYNC\");\n\n }\n\n current!().vm().msync(addr, size)\n\n}\n\n\n\npub const PAGE_SIZE: usize = 4096;\n", "file_path": "src/libos/src/vm/mod.rs", "rank": 16, "score": 230006.30209153754 }, { "content": "/// Spawn a new process and execute it in a new host thread.\n\npub fn do_spawn(\n\n elf_path: &str,\n\n argv: &[CString],\n\n envp: &[CString],\n\n file_actions: &[FileAction],\n\n spawn_attributes: Option<SpawnAttr>,\n\n current_ref: &ThreadRef,\n\n) -> Result<pid_t> {\n\n let exec_now = true;\n\n do_spawn_common(\n\n elf_path,\n\n argv,\n\n envp,\n\n file_actions,\n\n spawn_attributes,\n\n None,\n\n None,\n\n current_ref,\n\n exec_now,\n\n )\n\n}\n\n\n", "file_path": "src/libos/src/process/do_spawn/mod.rs", "rank": 17, "score": 228729.17280100848 }, { "content": "pub fn init() {\n\n unsafe {\n\n let status = sgx::sgx_interrupt_init(interrupt_entrypoint);\n\n assert!(status == sgx_status_t::SGX_SUCCESS);\n\n }\n\n}\n\n\n\nextern \"C\" fn interrupt_entrypoint(sgx_interrupt_info: *mut sgx_interrupt_info_t) -> i32 {\n\n let sgx_interrupt_info = unsafe { &mut *sgx_interrupt_info };\n\n\n\n // Update the current CPU context\n\n let mut curr_context_ptr = context_switch::current_context_ptr();\n\n let curr_context = unsafe { curr_context_ptr.as_mut() };\n\n // Save CPU's floating-point registers at the time when the exception occurs.\n\n // Note that we do this at the earliest possible time in hope that\n\n // the floating-point registers have not been tainted by the LibOS.\n\n curr_context.fp_regs.save();\n\n // Save CPU's general-purpose registers\n\n curr_context.gp_regs = GpRegs::from(&sgx_interrupt_info.cpu_context);\n\n\n", "file_path": "src/libos/src/entry/interrupt/mod.rs", "rank": 18, "score": 228729.17280100848 }, { "content": "/// Spawn a new process but execute it later.\n\npub fn do_spawn_root(\n\n elf_path: &str,\n\n argv: &[CString],\n\n envp: &[CString],\n\n file_actions: &[FileAction],\n\n spawn_attributes: Option<SpawnAttr>,\n\n host_stdio_fds: &HostStdioFds,\n\n wake_host: *mut i32,\n\n current_ref: &ThreadRef,\n\n) -> Result<pid_t> {\n\n let exec_now = false;\n\n do_spawn_common(\n\n elf_path,\n\n argv,\n\n envp,\n\n file_actions,\n\n spawn_attributes,\n\n Some(host_stdio_fds),\n\n Some(wake_host),\n\n current_ref,\n\n exec_now,\n\n )\n\n}\n\n\n", "file_path": "src/libos/src/process/do_spawn/mod.rs", "rank": 19, "score": 225407.4425409594 }, { "content": "pub fn do_uname(name: &mut utsname_t) -> Result<()> {\n\n copy_from_cstr_to_u8_array(&SYSNAME, &mut name.sysname);\n\n copy_from_cstr_to_u8_array(&NODENAME.read().unwrap(), &mut name.nodename);\n\n copy_from_cstr_to_u8_array(&RELEASE, &mut name.release);\n\n copy_from_cstr_to_u8_array(&VERSION, &mut name.version);\n\n copy_from_cstr_to_u8_array(&MACHINE, &mut name.machine);\n\n copy_from_cstr_to_u8_array(&DOMAINNAME, &mut name.domainname);\n\n Ok(())\n\n}\n\n\n\nlazy_static! {\n\n static ref SYSNAME: CString = CString::new(\"Occlum\").unwrap();\n\n static ref NODENAME: RwLock<CString> = RwLock::new(CString::new(\"occlum-node\").unwrap());\n\n static ref RELEASE: CString = CString::new(\"0.1\").unwrap();\n\n static ref VERSION: CString = CString::new(\"0.1\").unwrap();\n\n static ref MACHINE: CString = CString::new(\"x86-64\").unwrap();\n\n static ref DOMAINNAME: CString = CString::new(\"\").unwrap();\n\n}\n\n\n", "file_path": "src/libos/src/misc/uname.rs", "rank": 20, "score": 223594.89322729598 }, { "content": "/// Create a new process for execve which will use same parent, pid, tid\n\npub fn new_process_for_exec(\n\n file_path: &str,\n\n argv: &[CString],\n\n envp: &[CString],\n\n current_ref: &ThreadRef,\n\n reuse_tid: Option<ThreadId>,\n\n parent_process: Option<ProcessRef>,\n\n) -> Result<(ProcessRef, CpuContext)> {\n\n let tid = ThreadId {\n\n tid: current_ref.process().pid() as u32,\n\n };\n\n let (new_process_ref, init_cpu_state) = new_process_common(\n\n file_path,\n\n argv,\n\n envp,\n\n &Vec::new(),\n\n None,\n\n None,\n\n None,\n\n current_ref,\n\n reuse_tid,\n\n parent_process,\n\n )?;\n\n\n\n Ok((new_process_ref, init_cpu_state))\n\n}\n\n\n", "file_path": "src/libos/src/process/do_spawn/mod.rs", "rank": 21, "score": 222233.34391101805 }, { "content": "pub fn disable_current_thread() {\n\n unsafe {\n\n let status = sgx::sgx_interrupt_disable();\n\n assert!(status == sgx_status_t::SGX_SUCCESS);\n\n }\n\n}\n", "file_path": "src/libos/src/entry/interrupt/mod.rs", "rank": 22, "score": 222227.690777751 }, { "content": "pub fn socketpair(\n\n socket_type: SocketType,\n\n flags: FileFlags,\n\n protocol: i32,\n\n) -> Result<(Stream, Stream)> {\n\n if protocol != 0 && protocol != AddressFamily::LOCAL as i32 {\n\n return_errno!(EPROTONOSUPPORT, \"protocol is not supported\");\n\n }\n\n\n\n if socket_type == SocketType::STREAM {\n\n Stream::socketpair(flags)\n\n } else {\n\n return_errno!(ESOCKTNOSUPPORT, \"only stream type is supported\");\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/net.deprecated/socket/unix/mod.rs", "rank": 23, "score": 222227.690777751 }, { "content": "pub fn enable_current_thread() {\n\n // Interruptible range\n\n let (addr, size) = {\n\n let thread = current!();\n\n let vm = thread.vm();\n\n let range = vm.get_process_range();\n\n (range.start(), range.size())\n\n };\n\n unsafe {\n\n let status = sgx::sgx_interrupt_enable(addr, size);\n\n assert!(status == sgx_status_t::SGX_SUCCESS);\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/entry/interrupt/mod.rs", "rank": 24, "score": 222227.690777751 }, { "content": "pub fn register_exception_handlers() {\n\n setup_cpuid_info();\n\n // Register handlers whose priorities go from low to high\n\n unsafe {\n\n let is_first = 1;\n\n sgx_register_exception_handler(is_first, exception_entrypoint);\n\n }\n\n}\n\n\n\n#[no_mangle]\n\nextern \"C\" fn exception_entrypoint(sgx_except_info: *mut sgx_exception_info_t) -> i32 {\n\n let sgx_except_info = unsafe { &mut *sgx_except_info };\n\n\n\n // Update the current CPU context\n\n let mut curr_context_ptr = context_switch::current_context_ptr();\n\n let curr_context = unsafe { curr_context_ptr.as_mut() };\n\n // Save CPU's floating-point registers at the time when the exception occurs.\n\n // Note that we do this at the earliest possible time in hope that\n\n // the floating-point registers have not been tainted by the LibOS.\n\n curr_context.fp_regs.save();\n", "file_path": "src/libos/src/entry/exception/mod.rs", "rank": 25, "score": 222227.690777751 }, { "content": "pub fn do_gettimeofday() -> timeval_t {\n\n let tv = timeval_t::from(vdso_time::clock_gettime(ClockId::CLOCK_REALTIME).unwrap());\n\n tv.validate()\n\n .expect(\"gettimeofday returned invalid timeval_t\");\n\n tv\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Default, Copy, Clone)]\n\n#[allow(non_camel_case_types)]\n\npub struct timespec_t {\n\n sec: time_t,\n\n nsec: i64,\n\n}\n\n\n\nimpl From<Duration> for timespec_t {\n\n fn from(duration: Duration) -> timespec_t {\n\n let sec = duration.as_secs() as time_t;\n\n let nsec = duration.subsec_nanos() as i64;\n\n debug_assert!(sec >= 0); // nsec >= 0 always holds\n", "file_path": "src/libos/src/time/mod.rs", "rank": 26, "score": 221167.01678037574 }, { "content": "pub fn do_exit_group(status: i32) -> Result<isize> {\n\n if is_vforked_child_process() {\n\n let current = current!();\n\n let mut curr_user_ctxt = CURRENT_CONTEXT.with(|context| context.as_ptr());\n\n return vfork_return_to_parent(curr_user_ctxt as *mut _, &current);\n\n } else {\n\n let term_status = TermStatus::Exited(status as u8);\n\n let current = current!();\n\n current.process().force_exit(term_status);\n\n exit_thread(term_status);\n\n\n\n notify_all_threads_to_exit(current.process());\n\n Ok(0)\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/process/do_exit.rs", "rank": 27, "score": 220722.36141935014 }, { "content": "pub fn get_random(rand: &mut [u8]) -> Result<()> {\n\n use sgx_types::sgx_status_t;\n\n extern \"C\" {\n\n fn sgx_read_rand(rand_buf: *mut u8, buf_size: usize) -> sgx_status_t;\n\n }\n\n const MAX_TIMES: u32 = 50;\n\n\n\n if rand.is_empty() {\n\n return Ok(());\n\n }\n\n // sgx_read_rand() may fail because of HW failure of RDRAND instruction,\n\n // add retries to get the random number.\n\n for _ in 0..MAX_TIMES {\n\n let status = unsafe { sgx_read_rand(rand.as_mut_ptr(), rand.len()) };\n\n match status {\n\n sgx_status_t::SGX_SUCCESS => {\n\n return Ok(());\n\n }\n\n sgx_status_t::SGX_ERROR_INVALID_PARAMETER => {\n\n panic!(\"invalid argument to get random number from SGX\");\n\n }\n\n _ => {}\n\n }\n\n }\n\n Err(errno!(EAGAIN, \"failed to get random number from SGX\"))\n\n}\n", "file_path": "src/libos/src/misc/random.rs", "rank": 28, "score": 220663.01533036 }, { "content": "#[cfg(feature = \"sgx\")]\n\nfn libc_errno() -> i32 {\n\n libc::errno()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use test::Bencher;\n\n\n\n use super::*;\n\n use crate::tests::Runtime;\n\n\n\n #[bench]\n\n fn write_first_page(b: &mut Bencher) {\n\n let path = \"write_first_page.data\";\n\n let file = {\n\n let path = path.to_string();\n\n let flags = libc::O_WRONLY | libc::O_CREAT | libc::O_TRUNC;\n\n let mode = libc::S_IRUSR | libc::S_IWUSR;\n\n AsyncFile::<Runtime>::open(path.clone(), flags, mode).unwrap()\n\n };\n", "file_path": "src/libos/crates/async-file/src/file/mod.rs", "rank": 29, "score": 220170.09740101622 }, { "content": "#[proc_macro_attribute]\n\npub fn inherit_methods(\n\n attr: proc_macro::TokenStream,\n\n item: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let attr = {\n\n let attr_tokens = syn::parse_macro_input!(attr as AttributeArgs);\n\n match MacroAttr::from_list(&attr_tokens) {\n\n Ok(attr) => attr,\n\n Err(e) => {\n\n return e.write_errors().into();\n\n }\n\n }\n\n };\n\n let item_impl = syn::parse_macro_input!(item as syn::ItemImpl);\n\n do_inherit_methods(attr, item_impl).into()\n\n}\n\n\n", "file_path": "src/libos/crates/inherit-methods-macro/src/lib.rs", "rank": 30, "score": 219679.88309783503 }, { "content": "//TODO: rewrite this file when a new kind of uds is added\n\npub fn unix_socket(socket_type: SocketType, flags: FileFlags, protocol: i32) -> Result<Stream> {\n\n if protocol != 0 && protocol != AddressFamily::LOCAL as i32 {\n\n return_errno!(EPROTONOSUPPORT, \"protocol is not supported\");\n\n }\n\n\n\n if socket_type == SocketType::STREAM {\n\n Ok(Stream::new(flags))\n\n } else {\n\n return_errno!(ESOCKTNOSUPPORT, \"only stream type is supported\");\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/net.deprecated/socket/unix/mod.rs", "rank": 31, "score": 216621.29964830703 }, { "content": "pub fn allow_debug() -> bool {\n\n let self_report = create_report(None, None).expect(\"create a self report should never fail\");\n\n (self_report.body.attributes.flags & SGX_FLAGS_DEBUG) == SGX_FLAGS_DEBUG\n\n}\n", "file_path": "src/libos/src/util/sgx/mod.rs", "rank": 32, "score": 214665.53475711826 }, { "content": "#[cfg(not(feature = \"sgx\"))]\n\nfn get_ifconf_by_host(fd: HostFd, if_conf: &mut IfConf) -> Result<()> {\n\n try_libc!(libc::ioctl(\n\n fd as _,\n\n SIOCGIFCONF as _,\n\n if_conf as *mut IfConf as *mut i32\n\n ));\n\n Ok(())\n\n}\n", "file_path": "src/libos/crates/host-socket/src/ioctl/get_ifconf.rs", "rank": 33, "score": 212573.11301815746 }, { "content": "pub fn align_up(addr: usize, align: usize) -> usize {\n\n debug_assert!(align != 0 && align.is_power_of_two());\n\n align_down(addr + (align - 1), align)\n\n}\n\n\n", "file_path": "src/libos/src/prelude.rs", "rank": 34, "score": 212461.0710595254 }, { "content": "pub fn align_down(addr: usize, align: usize) -> usize {\n\n debug_assert!(align != 0 && align.is_power_of_two());\n\n addr & !(align - 1)\n\n}\n\n\n", "file_path": "src/libos/src/prelude.rs", "rank": 35, "score": 212461.0710595254 }, { "content": "pub fn do_rdtsc() -> (u32, u32) {\n\n extern \"C\" {\n\n fn occlum_ocall_rdtsc(low: *mut u32, high: *mut u32) -> sgx_status_t;\n\n }\n\n let mut low = 0;\n\n let mut high = 0;\n\n let sgx_status = unsafe { occlum_ocall_rdtsc(&mut low, &mut high) };\n\n assert!(sgx_status == sgx_status_t::SGX_SUCCESS);\n\n (low, high)\n\n}\n\n\n\n// For SEFS\n\npub struct OcclumTimeProvider;\n\n\n\nimpl TimeProvider for OcclumTimeProvider {\n\n fn current_time(&self) -> Timespec {\n\n let time = do_clock_gettime(ClockId::CLOCK_REALTIME).expect(\"do_clock_gettime() failed\");\n\n Timespec {\n\n sec: time.sec,\n\n nsec: time.nsec,\n", "file_path": "src/libos/src/time/mod.rs", "rank": 36, "score": 211641.7534924255 }, { "content": "// Generate a random address within [0, range]\n\n// Note: This function doesn't gurantee alignment\n\npub fn get_randomize_offset(range: usize) -> usize {\n\n if cfg!(debug_assertions) {\n\n return range;\n\n }\n\n\n\n use crate::misc::get_random;\n\n let mut random_buf: [u8; 8] = [0u8; 8]; // same length as usize\n\n get_random(&mut random_buf).expect(\"failed to get random number\");\n\n let random_num: usize = u64::from_le_bytes(random_buf) as usize;\n\n random_num % range\n\n}\n", "file_path": "src/libos/src/vm/vm_util.rs", "rank": 37, "score": 208821.18695309377 }, { "content": "pub fn do_thread_getcpuclock() -> Result<timespec_t> {\n\n extern \"C\" {\n\n fn occlum_ocall_thread_getcpuclock(ret: *mut c_int, tp: *mut timespec_t) -> sgx_status_t;\n\n }\n\n\n\n let mut tv: timespec_t = Default::default();\n\n try_libc!({\n\n let mut retval: i32 = 0;\n\n let status = occlum_ocall_thread_getcpuclock(&mut retval, &mut tv as *mut timespec_t);\n\n assert!(status == sgx_status_t::SGX_SUCCESS);\n\n retval\n\n });\n\n tv.validate()?;\n\n Ok(tv)\n\n}\n\n\n", "file_path": "src/libos/src/time/mod.rs", "rank": 38, "score": 208462.0017292171 }, { "content": "// Returns whether an item inside `impl XXX { ... }` is a method without code block.\n\nfn is_method_missing_fn_block(impl_item: &mut ImplItem) -> Option<&mut ImplItemMethod> {\n\n // We only care about method items.\n\n let impl_item_method = if let ImplItem::Method(method) = impl_item {\n\n method\n\n } else {\n\n return None;\n\n };\n\n // We only care about methods without a code block.\n\n if !impl_item_method.block.is_empty() {\n\n return None;\n\n }\n\n Some(impl_item_method)\n\n}\n\n\n", "file_path": "src/libos/crates/inherit-methods-macro/src/lib.rs", "rank": 39, "score": 208116.73036133277 }, { "content": "pub fn do_get_priority(which: PrioWhich, who: i32) -> Result<NiceValue> {\n\n debug!(\"get_priority: which: {:?}, who: {}\", which, who);\n\n\n\n let processes = get_processes(which, who)?;\n\n let prio = {\n\n let mut prio = NiceValue::max_value();\n\n for process in processes.iter() {\n\n let main_thread = process\n\n .main_thread()\n\n .ok_or_else(|| errno!(ESRCH, \"invalid pid\"))?;\n\n let nice_value = main_thread.nice().read().unwrap();\n\n // Returns the highest priority enjoyed by the processes\n\n if *nice_value < prio {\n\n prio = *nice_value;\n\n }\n\n }\n\n prio\n\n };\n\n Ok(prio)\n\n}\n\n\n", "file_path": "src/libos/src/sched/do_priority.rs", "rank": 40, "score": 207257.72351356316 }, { "content": "pub fn do_getrlimit(resource: resource_t, old_limit: &mut rlimit_t) -> Result<()> {\n\n do_prlimit(0 as pid_t, resource, None, Some(old_limit))\n\n}\n\n\n", "file_path": "src/libos/src/misc/rlimit.rs", "rank": 41, "score": 204500.3201754754 }, { "content": "pub fn do_set_tid_address(tidptr: *mut pid_t) -> Result<pid_t> {\n\n debug!(\"set_tid_address: tidptr: {:?}\", tidptr);\n\n let clear_ctid = NonNull::new(tidptr);\n\n let current = current!();\n\n current.set_clear_ctid(clear_ctid);\n\n Ok(current.tid())\n\n}\n", "file_path": "src/libos/src/process/do_set_tid_address.rs", "rank": 42, "score": 204346.95716614294 }, { "content": "pub fn do_get_robust_list(tid: pid_t) -> Result<*mut RobustListHead> {\n\n debug!(\"get_robust_list: tid: {}\", tid);\n\n let thread = if tid == 0 {\n\n current!()\n\n } else {\n\n super::table::get_thread(tid)?\n\n };\n\n let robust_list_ptr = thread\n\n .robust_list()\n\n .map(|robust_list| robust_list.as_ptr())\n\n .unwrap_or(std::ptr::null_mut());\n\n Ok(robust_list_ptr)\n\n}\n\n\n\n/// This struct is same as Linux's robust_list\n", "file_path": "src/libos/src/process/do_robust_list.rs", "rank": 43, "score": 201939.47028106748 }, { "content": "pub fn do_getrandom(rand_buf: &mut [u8], flags: RandFlags) -> Result<()> {\n\n debug!(\"getrandom: flags: {:?}\", flags);\n\n if flags.contains(RandFlags::GRND_NONBLOCK) {\n\n get_random(rand_buf)\n\n } else {\n\n get_random_blocking(rand_buf)\n\n }\n\n}\n\n\n\nbitflags! {\n\n pub struct RandFlags: u32 {\n\n /// Don't block and return EAGAIN instead\n\n const GRND_NONBLOCK = 0x0001;\n\n /// No effect\n\n const GRND_RANDOM = 0x0002;\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/misc/random.rs", "rank": 44, "score": 201901.40725104595 }, { "content": "pub fn do_recvmsg(fd: c_int, msg_mut_ptr: *mut msghdr_mut, flags_c: c_int) -> Result<isize> {\n\n debug!(\n\n \"recvmsg: fd: {}, msg: {:?}, flags: 0x{:x}\",\n\n fd, msg_mut_ptr, flags_c\n\n );\n\n\n\n let file_ref = current!().file(fd as FileDesc)?;\n\n if let Ok(socket) = file_ref.as_host_socket() {\n\n let msg_mut_c = {\n\n from_user::check_mut_ptr(msg_mut_ptr)?;\n\n let msg_mut_c = unsafe { &mut *msg_mut_ptr };\n\n msg_mut_c.check_member_ptrs()?;\n\n msg_mut_c\n\n };\n\n let mut msg_mut = unsafe { MsgHdrMut::from_c(msg_mut_c)? };\n\n\n\n let flags = RecvFlags::from_bits_truncate(flags_c);\n\n\n\n socket\n\n .recvmsg(&mut msg_mut, flags)\n\n .map(|bytes_recvd| bytes_recvd as isize)\n\n } else if let Ok(socket) = file_ref.as_unix_socket() {\n\n return_errno!(EBADF, \"does not support unix socket\")\n\n } else {\n\n return_errno!(EBADF, \"not a socket\")\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 45, "score": 200665.9749728712 }, { "content": "pub fn do_set_priority(which: PrioWhich, who: i32, prio: NiceValue) -> Result<()> {\n\n debug!(\n\n \"set_priority: which: {:?}, who: {}, prio: {:?}\",\n\n which, who, prio\n\n );\n\n\n\n let processes = get_processes(which, who)?;\n\n for process in processes.iter() {\n\n let main_thread = process\n\n .main_thread()\n\n .ok_or_else(|| errno!(ESRCH, \"invalid pid\"))?;\n\n *main_thread.nice().write().unwrap() = prio;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/libos/src/sched/do_priority.rs", "rank": 46, "score": 200219.210856579 }, { "content": "fn new_msghdr(iovecs_ptr: *mut libc::iovec, iovecs_len: usize) -> libc::msghdr {\n\n use std::mem::MaybeUninit;\n\n // Safety. Setting all fields to zeros is a valid state for msghdr.\n\n let mut msghdr: libc::msghdr = unsafe { MaybeUninit::zeroed().assume_init() };\n\n msghdr.msg_iov = iovecs_ptr;\n\n msghdr.msg_iovlen = iovecs_len as _;\n\n // We do want to leave all other fields as zeros\n\n msghdr\n\n}\n", "file_path": "src/libos/crates/host-socket/src/stream/states/connected/mod.rs", "rank": 47, "score": 198795.0781443751 }, { "content": "fn getsockopt_by_host(fd: HostFd, level: i32, optname: i32, optval: &mut [u8]) -> Result<u32> {\n\n let max_optlen = optval.len() as u32;\n\n let mut optlen = max_optlen;\n\n try_libc!(do_getsockopt(\n\n fd as _,\n\n level as _,\n\n optname as _,\n\n optval.as_mut_ptr() as _,\n\n &mut optlen as *mut u32\n\n ));\n\n // Defence Iago attack\n\n if optlen > max_optlen {\n\n return_errno!(EINVAL, \"host returns a invalid optlen\");\n\n }\n\n Ok(optlen)\n\n}\n", "file_path": "src/libos/crates/host-socket/src/sockopt/get.rs", "rank": 48, "score": 198050.3439423916 }, { "content": "pub fn get_pgrp_number(pgid: pid_t) -> usize {\n\n PROCESSGRP_TABLE.lock().unwrap().len()\n\n}\n\n\n", "file_path": "src/libos/src/process/table.rs", "rank": 49, "score": 197799.5190432664 }, { "content": "/// Wakeup one robust futex owned by the thread\n\npub fn wake_robust_futex(futex_addr: *const i32, tid: pid_t) -> Result<()> {\n\n let futex_val = {\n\n check_ptr(futex_addr)?;\n\n unsafe { AtomicU32::from_mut(&mut *(futex_addr as *mut u32)) }\n\n };\n\n let mut old_val = futex_val.load(Ordering::SeqCst);\n\n loop {\n\n // This futex may held by another thread, do nothing\n\n if old_val & FUTEX_TID_MASK != tid {\n\n break;\n\n }\n\n let new_val = (old_val & FUTEX_WAITERS) | FUTEX_OWNER_DIED;\n\n if let Err(cur_val) =\n\n futex_val.compare_exchange(old_val, new_val, Ordering::SeqCst, Ordering::SeqCst)\n\n {\n\n // The futex value has changed, let's retry with current value\n\n old_val = cur_val;\n\n continue;\n\n }\n\n // Wakeup one waiter\n\n if futex_val.load(Ordering::SeqCst) & FUTEX_WAITERS != 0 {\n\n debug!(\"wake robust futex addr: {:?}\", futex_addr);\n\n super::do_futex::futex_wake(futex_addr, 1)?;\n\n }\n\n break;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/libos/src/process/do_robust_list.rs", "rank": 50, "score": 197045.8771471858 }, { "content": "pub fn align_down(addr: usize, align: usize) -> usize {\n\n debug_assert!(align.is_power_of_two());\n\n addr & !(align - 1)\n\n}\n\n\n", "file_path": "src/libos/crates/sgx-untrusted-alloc/src/untrusted_allocator/vm_util.rs", "rank": 51, "score": 195924.35981627242 }, { "content": "pub fn align_up(addr: usize, align: usize) -> usize {\n\n debug_assert!(align.is_power_of_two());\n\n align_down(addr + (align - 1), align)\n\n}\n", "file_path": "src/libos/crates/sgx-untrusted-alloc/src/untrusted_allocator/vm_util.rs", "rank": 52, "score": 195924.35981627242 }, { "content": "pub fn get_self_target() -> Result<sgx_target_info_t> {\n\n let mut self_target = sgx_target_info_t::default();\n\n let sgx_status = unsafe { sgx_self_target(&mut self_target) };\n\n match sgx_status {\n\n sgx_status_t::SGX_SUCCESS => Ok(self_target),\n\n _ => return_errno!(EINVAL, \"unexpected SGX error\"),\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/util/sgx/sgx_report.rs", "rank": 53, "score": 194882.59461335553 }, { "content": "/// Get a pointer to the current `CpuContext` that is being used by the\n\n/// on-going `switch_to_user` on the current vCPU.\n\npub fn current_context_ptr() -> NonNull<CpuContext> {\n\n let ptr = unsafe { __current_context_ptr() };\n\n NonNull::new(ptr).unwrap()\n\n}\n\n\n\nextern \"C\" {\n\n // C functions\n\n #[allow(improper_ctypes)]\n\n fn _switch_to_user(user_context: *mut CpuContext, fault: *mut Fault);\n\n\n\n // Assembly functions\n\n fn __switch_to_kernel() -> !;\n\n #[allow(improper_ctypes)]\n\n fn __current_context_ptr() -> *mut CpuContext;\n\n fn __current_fault_ptr() -> *mut Fault;\n\n}\n", "file_path": "src/libos/src/entry/context_switch/mod.rs", "rank": 54, "score": 194410.64769110666 }, { "content": "/// API to initialize the DevFS\n\npub fn init_devfs() -> Result<Arc<MountFS>> {\n\n let devfs = DevFS::new();\n\n let dev_null = Arc::new(DevNull) as _;\n\n devfs.add(\"null\", dev_null)?;\n\n let dev_zero = Arc::new(DevZero) as _;\n\n devfs.add(\"zero\", dev_zero)?;\n\n let dev_random = Arc::new(DevRandom) as _;\n\n devfs.add(\"random\", Arc::clone(&dev_random))?;\n\n devfs.add(\"urandom\", Arc::clone(&dev_random))?;\n\n devfs.add(\"arandom\", Arc::clone(&dev_random))?;\n\n let dev_sgx = Arc::new(DevSgx) as _;\n\n devfs.add(\"sgx\", dev_sgx)?;\n\n let dev_shm = Arc::new(DevShm) as _;\n\n devfs.add(\"shm\", dev_shm)?;\n\n let dev_fd = Arc::new(DevFd) as _;\n\n devfs.add(\"fd\", dev_fd);\n\n let mountable_devfs = MountFS::new(devfs);\n\n // Mount the ramfs at '/shm'\n\n let ramfs = RamFS::new();\n\n mount_fs_at(\n\n ramfs,\n\n &mountable_devfs.root_inode(),\n\n &Path::new(\"/shm\"),\n\n true,\n\n )?;\n\n // TODO: Add stdio(stdin, stdout, stderr) into DevFS\n\n Ok(mountable_devfs)\n\n}\n", "file_path": "src/libos/src/fs/dev_fs/mod.rs", "rank": 55, "score": 191814.1943999111 }, { "content": "pub fn do_prctl(cmd: PrctlCmd) -> Result<isize> {\n\n debug!(\"prctl: {:?}\", cmd);\n\n\n\n let current = current!();\n\n match cmd {\n\n PrctlCmd::PR_SET_NAME(name) => {\n\n current.set_name(name);\n\n }\n\n PrctlCmd::PR_GET_NAME(c_buf) => {\n\n let name = current.name();\n\n c_buf.copy_from_slice(name.as_slice());\n\n }\n\n PrctlCmd::PR_SET_TIMERSLACK(nanoseconds) => {\n\n return_errno!(\n\n EINVAL,\n\n \"Setting timer slack for different libos process is not supported\"\n\n );\n\n }\n\n PrctlCmd::PR_GET_TIMERSLACK(()) => {\n\n let nanoseconds = (*TIMERSLACK).to_u32();\n\n return Ok(nanoseconds as isize);\n\n }\n\n _ => return_errno!(EINVAL, \"Prctl command not supported\"),\n\n }\n\n\n\n Ok(0)\n\n}\n", "file_path": "src/libos/src/process/prctl/mod.rs", "rank": 56, "score": 190116.37837468367 }, { "content": "pub fn do_clock_gettime(clockid: ClockId) -> Result<timespec_t> {\n\n // TODO: support CLOCK_PROCESS_CPUTIME_ID and CLOCK_THREAD_CPUTIME_ID.\n\n if clockid == ClockId::CLOCK_PROCESS_CPUTIME_ID || clockid == ClockId::CLOCK_THREAD_CPUTIME_ID {\n\n return_errno!(\n\n EINVAL,\n\n \"Not support CLOCK_PROCESS_CPUTIME_ID or CLOCK_THREAD_CPUTIME_ID\"\n\n );\n\n }\n\n let tv = timespec_t::from(vdso_time::clock_gettime(clockid).unwrap());\n\n tv.validate()\n\n .expect(\"clock_gettime returned invalid timespec\");\n\n Ok(tv)\n\n}\n\n\n", "file_path": "src/libos/src/time/mod.rs", "rank": 57, "score": 190116.37837468367 }, { "content": "pub fn do_clock_getres(clockid: ClockId) -> Result<timespec_t> {\n\n let res = timespec_t::from(vdso_time::clock_getres(clockid).unwrap());\n\n let validate_resolution = |res: &timespec_t| -> Result<()> {\n\n // The resolution can be ranged from 1 nanosecond to a few milliseconds\n\n if res.sec == 0 && res.nsec > 0 && res.nsec < 1_000_000_000 {\n\n Ok(())\n\n } else {\n\n return_errno!(EINVAL, \"invalid value for resolution\");\n\n }\n\n };\n\n // do sanity check\n\n validate_resolution(&res).expect(\"clock_getres returned invalid resolution\");\n\n Ok(res)\n\n}\n\n\n\nconst TIMER_ABSTIME: i32 = 0x01;\n\n\n\npub async fn do_nanosleep(req: &timespec_t, rem: Option<&mut timespec_t>) -> Result<isize> {\n\n do_clock_nanosleep(ClockId::CLOCK_REALTIME, 0, req, rem).await\n\n}\n", "file_path": "src/libos/src/time/mod.rs", "rank": 58, "score": 190116.37837468367 }, { "content": "// Add a code block of method forwarding for the method item.\n\nfn add_fn_block(impl_item_method: &mut ImplItemMethod, field: &Expr) {\n\n let fn_sig = &impl_item_method.sig;\n\n let fn_name = &fn_sig.ident;\n\n let fn_arg_tokens = {\n\n // Extract all argument idents (except self) from the signature\n\n let fn_arg_idents: Vec<&Ident> = fn_sig\n\n .inputs\n\n .iter()\n\n .filter_map(|fn_arg| match fn_arg {\n\n FnArg::Receiver(_) => None,\n\n FnArg::Typed(pat_type) => Some(pat_type),\n\n })\n\n .filter_map(|pat_type| match &*pat_type.pat {\n\n Pat::Ident(pat_ident) => Some(&pat_ident.ident),\n\n _ => None,\n\n })\n\n .collect();\n\n\n\n // Combine all arguments into a comma-separated token stream\n\n let mut fn_arg_tokens = TokenStream::new();\n", "file_path": "src/libos/crates/inherit-methods-macro/src/lib.rs", "rank": 59, "score": 188697.97492349995 }, { "content": "fn to_blocks(size_in_bytes: usize) -> usize {\n\n align_up(size_in_bytes, BLOCK_SIZE) / BLOCK_SIZE\n\n}\n\n\n\nmod runtime {\n\n use io_uring_callback::IoUring;\n\n use sgx_disk::IoUringProvider;\n\n\n\n pub struct IoUringRuntime;\n\n\n\n impl IoUringProvider for IoUringRuntime {\n\n fn io_uring() -> &'static IoUring {\n\n &*crate::io_uring::SINGLETON\n\n }\n\n }\n\n}\n", "file_path": "src/libos/src/fs/builtin_disk.rs", "rank": 60, "score": 188081.80281183837 }, { "content": "pub fn mpx_bndcl(bndreg: MpxReg, addr: usize) {\n\n match bndreg {\n\n MpxReg::BND0 => unsafe { __mpx_bndcl0(addr) },\n\n MpxReg::BND1 => unsafe { __mpx_bndcl1(addr) },\n\n MpxReg::BND2 => unsafe { __mpx_bndcl2(addr) },\n\n MpxReg::BND3 => unsafe { __mpx_bndcl3(addr) },\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/util/mpx_util.rs", "rank": 61, "score": 187890.06641499553 }, { "content": "pub fn mpx_bndcu(bndreg: MpxReg, addr: usize) {\n\n match bndreg {\n\n MpxReg::BND0 => unsafe { __mpx_bndcu0(addr) },\n\n MpxReg::BND1 => unsafe { __mpx_bndcu1(addr) },\n\n MpxReg::BND2 => unsafe { __mpx_bndcu2(addr) },\n\n MpxReg::BND3 => unsafe { __mpx_bndcu3(addr) },\n\n }\n\n}\n\n\n\nextern \"C\" {\n\n // See mpx_util.h\n\n fn __mpx_enable() -> i32;\n\n fn __mpx_bndmk0(base: usize, size: usize);\n\n fn __mpx_bndmk1(base: usize, size: usize);\n\n fn __mpx_bndmk2(base: usize, size: usize);\n\n fn __mpx_bndmk3(base: usize, size: usize);\n\n fn __mpx_bndcl0(x: usize);\n\n fn __mpx_bndcl1(x: usize);\n\n fn __mpx_bndcl2(x: usize);\n\n fn __mpx_bndcl3(x: usize);\n\n fn __mpx_bndcu0(x: usize);\n\n fn __mpx_bndcu1(x: usize);\n\n fn __mpx_bndcu2(x: usize);\n\n fn __mpx_bndcu3(x: usize);\n\n}\n", "file_path": "src/libos/src/util/mpx_util.rs", "rank": 62, "score": 187890.06641499553 }, { "content": "#[test]\n\nfn use_inherited_methods() {\n\n let dummy = DummyObject::new();\n\n assert!(dummy.object_id() == 0);\n\n assert!(&dummy.name() == \"\");\n\n\n\n let new_name = \"this is dummy\";\n\n dummy.set_name(new_name.to_string());\n\n assert!(&dummy.name() == new_name);\n\n}\n", "file_path": "src/libos/crates/inherit-methods-macro/tests/inheritance.rs", "rank": 63, "score": 186110.10089906462 }, { "content": "fn do_inherit_methods(attr: MacroAttr, mut item_impl: ItemImpl) -> TokenStream {\n\n // Parse the field to which we will forward method calls\n\n let field: Expr = syn::parse_str(&attr.from).unwrap();\n\n\n\n // Transform this impl item by adding method forwarding code to inherited methods.\n\n for impl_item in &mut item_impl.items {\n\n let impl_item_method = match is_method_missing_fn_block(impl_item) {\n\n Some(method) => method,\n\n None => continue,\n\n };\n\n add_fn_block(impl_item_method, &field);\n\n }\n\n item_impl.into_token_stream()\n\n}\n\n\n", "file_path": "src/libos/crates/inherit-methods-macro/src/lib.rs", "rank": 64, "score": 185026.7111724507 }, { "content": "pub fn mpx_bndmk(bndreg: MpxReg, base: usize, size: usize) -> Result<()> {\n\n /* Check whether the upper bound overflows the max of 64-bit */\n\n if base.checked_add(size).is_none() {\n\n return_errno!(ERANGE, \"Upper bound overflows\");\n\n }\n\n\n\n match bndreg {\n\n MpxReg::BND0 => unsafe { __mpx_bndmk0(base, size) },\n\n MpxReg::BND1 => unsafe { __mpx_bndmk1(base, size) },\n\n MpxReg::BND2 => unsafe { __mpx_bndmk2(base, size) },\n\n MpxReg::BND3 => unsafe { __mpx_bndmk3(base, size) },\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/libos/src/util/mpx_util.rs", "rank": 65, "score": 184759.50484169088 }, { "content": "#[test]\n\nfn use_inherited_methods() {\n\n // As long as this code compiles, we are sure that the implementation of\n\n // inherited methods are generated. If the code runs successfully, we\n\n // can be sure that the generated implementation is correct.\n\n\n\n let mut stack = Stack::new();\n\n assert!(stack.len() == 0);\n\n stack.push(1);\n\n stack.push(2);\n\n stack.push(3);\n\n assert!(stack.len() == 3);\n\n assert!(stack.pop() == Some(3));\n\n assert!(stack.pop() == Some(2));\n\n assert!(stack.pop() == Some(1));\n\n assert!(stack.len() == 0);\n\n}\n", "file_path": "src/libos/crates/inherit-methods-macro/tests/new_type.rs", "rank": 66, "score": 183172.07910969225 }, { "content": "pub fn do_ftruncate(fd: FileDesc, len: usize) -> Result<()> {\n\n debug!(\"ftruncate: fd: {}, len: {}\", fd, len);\n\n let file_ref = current!().file(fd)?;\n\n if let Some(inode_file) = file_ref.as_inode_file() {\n\n if !inode_file.access_mode().writable() {\n\n return_errno!(EBADF, \"File is not opened for writing\");\n\n }\n\n inode_file.inode().resize(len)?;\n\n Ok(())\n\n } else if let Some(disk_file) = file_ref.as_disk_file() {\n\n warn!(\"disk_file does not support ftruncate\");\n\n Ok(())\n\n } else {\n\n return_errno!(EBADF, \"not supported\");\n\n }\n\n}\n", "file_path": "src/libos/src/fs/file_ops/truncate.rs", "rank": 67, "score": 181520.76717304136 }, { "content": "fn rust_occlum_pal_kill(pid: i32, sig: i32) -> Result<(), i32> {\n\n let ret = unsafe { occlum_pal_kill(pid, sig) };\n\n\n\n if ret == 0 {\n\n return Ok(());\n\n } else {\n\n return Err(ret);\n\n }\n\n}\n", "file_path": "src/exec/src/server.rs", "rank": 68, "score": 180800.06165336468 }, { "content": "/// Get or set resource limits.\n\n///\n\n/// The man page suggests that this system call works on a per-process basis\n\n/// and the input argument pid can only be process ID, not thread ID. This\n\n/// (unnecessary) restriction is lifted by our implementation. Nevertheless,\n\n/// since the rlimits object is shared between threads in a process, the\n\n/// semantic of limiting resource usage on a per-process basisi is preserved.\n\n///\n\n/// Limitation: Current implementation only takes effect on child processes.\n\npub fn do_prlimit(\n\n pid: pid_t,\n\n resource: resource_t,\n\n new_limit: Option<&rlimit_t>,\n\n old_limit: Option<&mut rlimit_t>,\n\n) -> Result<()> {\n\n let process = if pid == 0 {\n\n current!()\n\n } else {\n\n process::table::get_thread(pid).cause_err(|_| errno!(ESRCH, \"invalid pid\"))?\n\n };\n\n let mut rlimits = process.rlimits().lock().unwrap();\n\n if let Some(old_limit) = old_limit {\n\n *old_limit = *rlimits.get(resource)\n\n }\n\n if let Some(new_limit) = new_limit {\n\n // Privilege is not granted for setting hard limit\n\n if new_limit.get_max() != u64::max_value() {\n\n return_errno!(EPERM, \"setting hard limit is not permitted\")\n\n }\n", "file_path": "src/libos/src/misc/rlimit.rs", "rank": 69, "score": 180448.61443048145 }, { "content": "pub fn debug() {\n\n println!(\"process table = {:#?}\", PROCESS_TABLE.lock().unwrap());\n\n println!(\"thread table = {:#?}\", THREAD_TABLE.lock().unwrap());\n\n //println!(\"idle = {:#?}\", *super::IDLE);\n\n}\n\n\n\nlazy_static! {\n\n static ref PROCESS_TABLE: SgxMutex<Table<ProcessRef>> =\n\n { SgxMutex::new(Table::<ProcessRef>::with_capacity(8)) };\n\n static ref THREAD_TABLE: SgxMutex<Table<ThreadRef>> =\n\n { SgxMutex::new(Table::<ThreadRef>::with_capacity(8)) };\n\n static ref PROCESSGRP_TABLE: SgxMutex<Table<ProcessGrpRef>> =\n\n { SgxMutex::new(Table::<ProcessGrpRef>::with_capacity(4)) };\n\n static ref PROCESSES_STATUS: Arc<(SgxMutex<bool>, SgxCondvar)> =\n\n Arc::new((SgxMutex::new(false), SgxCondvar::new()));\n\n}\n\n\n", "file_path": "src/libos/src/process/table.rs", "rank": 70, "score": 180448.61443048145 }, { "content": "pub fn init() {\n\n *BOOT_TIME_STAMP;\n\n}\n\n\n", "file_path": "src/libos/src/time/up_time.rs", "rank": 71, "score": 180448.61443048145 }, { "content": "fn write_zeros(pfs_file: &mut PfsFile, begin: usize, end: usize) {\n\n debug_assert!(begin <= end);\n\n\n\n const ZEROS: [u8; BLOCK_SIZE] = [0; BLOCK_SIZE];\n\n\n\n pfs_file.seek(SeekFrom::Start(begin as u64)).unwrap();\n\n let mut remain = end - begin;\n\n while remain > 0 {\n\n let buf_len = remain.min(ZEROS.len());\n\n pfs_file.write(&ZEROS[0..buf_len]).unwrap();\n\n remain -= buf_len;\n\n }\n\n}\n", "file_path": "src/libos/crates/sgx-disk/src/pfs_disk/open_options.rs", "rank": 72, "score": 179694.23883059836 }, { "content": "pub fn do_truncate(fs_path: &FsPath, len: usize) -> Result<()> {\n\n debug!(\"truncate: path: {:?}, len: {}\", fs_path, len);\n\n let inode = {\n\n let current = current!();\n\n let fs = current.fs().read().unwrap();\n\n fs.lookup_inode(fs_path)?\n\n };\n\n inode.resize(len)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/libos/src/fs/file_ops/truncate.rs", "rank": 73, "score": 179020.2186865486 }, { "content": "pub fn do_socketpair(\n\n domain: c_int,\n\n socket_type: c_int,\n\n protocol: c_int,\n\n sv: *mut c_int,\n\n) -> Result<isize> {\n\n let mut sock_pair = unsafe {\n\n from_user::check_mut_array(sv, 2)?;\n\n std::slice::from_raw_parts_mut(sv as *mut u32, 2)\n\n };\n\n\n\n let file_flags = FileFlags::from_bits_truncate(socket_type);\n\n let close_on_spawn = file_flags.contains(FileFlags::SOCK_CLOEXEC);\n\n let sock_type = SocketType::try_from(socket_type & (!file_flags.bits()))?;\n\n\n\n let domain = AddressFamily::try_from(domain as u16)?;\n\n if (domain == AddressFamily::LOCAL) {\n\n let (client_socket, server_socket) = socketpair(sock_type, file_flags, protocol as i32)?;\n\n\n\n let current = current!();\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 74, "score": 178130.60825177463 }, { "content": "pub fn do_getsockopt(\n\n fd: c_int,\n\n level: c_int,\n\n optname: c_int,\n\n optval: *mut c_void,\n\n optlen: *mut libc::socklen_t,\n\n) -> Result<isize> {\n\n debug!(\n\n \"getsockopt: fd: {}, level: {}, optname: {}, optval: {:?}, optlen: {:?}\",\n\n fd, level, optname, optval, optlen\n\n );\n\n let file_ref = current!().file(fd as FileDesc)?;\n\n let socket = file_ref.as_host_socket()?;\n\n\n\n let ret = try_libc!(libc::ocall::getsockopt(\n\n socket.raw_host_fd() as i32,\n\n level,\n\n optname,\n\n optval,\n\n optlen\n\n ));\n\n Ok(ret as isize)\n\n}\n\n\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 75, "score": 178130.60825177463 }, { "content": "/// Do futex requeue\n\npub fn futex_requeue(\n\n futex_addr: *const i32,\n\n max_nwakes: usize,\n\n max_nrequeues: usize,\n\n futex_new_addr: *const i32,\n\n) -> Result<usize> {\n\n if futex_new_addr == futex_addr {\n\n return futex_wake(futex_addr, max_nwakes);\n\n }\n\n let futex_key = FutexKey::new(futex_addr);\n\n let futex_new_key = FutexKey::new(futex_new_addr);\n\n let (bucket_idx, futex_bucket_ref) = FUTEX_BUCKETS.get_bucket(futex_key);\n\n let (new_bucket_idx, futex_new_bucket_ref) = FUTEX_BUCKETS.get_bucket(futex_new_key);\n\n let nwakes = {\n\n if bucket_idx != new_bucket_idx {\n\n let (mut futex_bucket, mut futex_new_bucket) = {\n\n if bucket_idx < new_bucket_idx {\n\n let mut futex_bucket = futex_bucket_ref.lock().unwrap();\n\n let mut futex_new_bucket = futex_new_bucket_ref.lock().unwrap();\n\n (futex_bucket, futex_new_bucket)\n", "file_path": "src/libos/src/process/do_futex.rs", "rank": 76, "score": 178130.60825177463 }, { "content": "pub fn do_accept(\n\n fd: c_int,\n\n addr: *mut libc::sockaddr,\n\n addr_len: *mut libc::socklen_t,\n\n) -> Result<isize> {\n\n do_accept4(fd, addr, addr_len, 0)\n\n}\n\n\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 77, "score": 178130.60825177463 }, { "content": "pub fn do_connect(\n\n fd: c_int,\n\n addr: *const libc::sockaddr,\n\n addr_len: libc::socklen_t,\n\n) -> Result<isize> {\n\n // For SOCK_DGRAM sockets not initiated in connection-mode,\n\n // if address is a null address for the protocol,\n\n // the socket's peer address shall be reset.\n\n let addr_set: bool = !addr.is_null();\n\n if addr_set {\n\n from_user::check_array(addr as *const u8, addr_len as usize)?;\n\n }\n\n\n\n let file_ref = current!().file(fd as FileDesc)?;\n\n if let Ok(socket) = file_ref.as_host_socket() {\n\n let addr_option = if addr_set {\n\n Some(unsafe { SockAddr::try_from_raw(addr, addr_len)? })\n\n } else {\n\n None\n\n };\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 78, "score": 178130.60825177463 }, { "content": "/// Deliver a queued signal for the current thread, respecting the thread's\n\n/// signal mask.\n\n///\n\n/// The delivery of a signal means two things: 1) dequeuing the signal from\n\n/// the per-thread or per-process signal queue, and 2) handling the signal\n\n/// according to the signal disposition.\n\n///\n\n/// When handling a signal, one of the three actions below will be done:\n\n///\n\n/// 1. Ignore the signal. This is the easy part.\n\n///\n\n/// 2. Terminate the process if the signal is fatal. This is called \"force exit\".\n\n///\n\n/// 3. Call a user-registered signal handler. In this case, the current CPU context\n\n/// will be modified so that the user-registered signal handler will be called\n\n/// upon returning to the user space when the current syscall is finished.\n\n///\n\n/// **Requirement.** This must be called only once during the execution of a\n\n/// syscall and at a very late stage.\n\n///\n\n/// **Post-condition.** The temporary signal mask of the current thread is cleared.\n\n///\n\n/// **Interaction with force_signal.** If force_signal is called during a syscall,\n\n/// then deliver_signal won't deliver any signals.\n\npub fn deliver_signal() {\n\n let thread = current!();\n\n\n\n if thread.process().is_forced_to_exit() {\n\n return;\n\n }\n\n\n\n if !forced_signal_flag::get() {\n\n do_deliver_signal(&thread);\n\n } else {\n\n forced_signal_flag::reset();\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/signal/do_sigreturn.rs", "rank": 79, "score": 178130.60825177463 }, { "content": "pub fn mount_fs_at(\n\n fs: Arc<dyn FileSystem>,\n\n parent_inode: &Arc<dyn INode>,\n\n path: &Path,\n\n follow_symlink: bool,\n\n) -> Result<()> {\n\n let path = path\n\n .to_str()\n\n .ok_or_else(|| errno!(EINVAL, \"invalid path\"))?;\n\n let mount_dir = if follow_symlink {\n\n parent_inode.lookup_follow(path, MAX_SYMLINKS)?\n\n } else {\n\n let (dir_path, file_name) = split_path(path);\n\n parent_inode\n\n .lookup_follow(dir_path, MAX_SYMLINKS)?\n\n .lookup(file_name)?\n\n };\n\n mount_dir.downcast_ref::<MNode>().unwrap().mount(fs)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/libos/src/fs/rootfs.rs", "rank": 80, "score": 178130.60825177463 }, { "content": "pub fn do_select(\n\n nfds: c_int,\n\n readfds: *mut libc::fd_set,\n\n writefds: *mut libc::fd_set,\n\n exceptfds: *mut libc::fd_set,\n\n timeout: *mut timeval_t,\n\n) -> Result<isize> {\n\n let nfds = {\n\n let soft_rlimit_nofile = current!()\n\n .rlimits()\n\n .lock()\n\n .unwrap()\n\n .get(resource_t::RLIMIT_NOFILE)\n\n .get_cur();\n\n if nfds < 0 || nfds > libc::FD_SETSIZE as i32 || nfds as u64 > soft_rlimit_nofile {\n\n return_errno!(\n\n EINVAL,\n\n \"nfds is negative or exceeds the resource limit or FD_SETSIZE\"\n\n );\n\n }\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 81, "score": 178130.60825177463 }, { "content": "pub fn do_recvfrom(\n\n fd: c_int,\n\n base: *mut c_void,\n\n len: size_t,\n\n flags: c_int,\n\n addr: *mut libc::sockaddr,\n\n addr_len: *mut libc::socklen_t,\n\n) -> Result<isize> {\n\n if addr.is_null() ^ addr_len.is_null() {\n\n return_errno!(EINVAL, \"addr and ddr_len should be both null\");\n\n }\n\n\n\n from_user::check_array(base as *mut u8, len)?;\n\n let mut buf = unsafe { std::slice::from_raw_parts_mut(base as *mut u8, len as usize) };\n\n\n\n // MSG_CTRUNC is a return flag but linux allows it to be set on input flags.\n\n // We just ignore it.\n\n let recv_flags = RecvFlags::from_bits(flags & !(MsgHdrFlags::MSG_CTRUNC.bits()))\n\n .ok_or_else(|| errno!(EINVAL, \"invalid flags\"))?;\n\n\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 82, "score": 178130.60825177463 }, { "content": "pub fn main_loop(\n\n current: ThreadRef,\n\n init_cpu_state: CpuContext,\n\n) -> impl Future<Output = ()> + Send {\n\n // FIXME: this is only a temp solution; we should not mark the entire task Send.\n\n unsafe { mark_send::mark_send(__main_loop(current, init_cpu_state)) }\n\n}\n\n\n\nasync fn __main_loop(current: ThreadRef, init_cpu_state: CpuContext) {\n\n unsafe {\n\n crate::process::current::set(current.clone());\n\n }\n\n CURRENT_CONTEXT.with(|context| {\n\n *context.borrow_mut() = init_cpu_state;\n\n });\n\n\n\n let thread_id = current.tid();\n\n let task_id = async_rt::task::current::get().tid().0;\n\n debug!(\"Thread #{} is executed as task #{}\", thread_id, task_id);\n\n\n", "file_path": "src/libos/src/entry/thread.rs", "rank": 83, "score": 178130.60825177463 }, { "content": "pub fn do_accept4(\n\n fd: c_int,\n\n addr: *mut libc::sockaddr,\n\n addr_len: *mut libc::socklen_t,\n\n flags: c_int,\n\n) -> Result<isize> {\n\n let addr_set: bool = !addr.is_null();\n\n if addr_set {\n\n from_user::check_ptr(addr_len)?;\n\n from_user::check_mut_array(addr as *mut u8, unsafe { *addr_len } as usize)?;\n\n }\n\n\n\n let file_flags = FileFlags::from_bits(flags).ok_or_else(|| errno!(EINVAL, \"invalid flags\"))?;\n\n let close_on_spawn = file_flags.contains(FileFlags::SOCK_CLOEXEC);\n\n\n\n let file_ref = current!().file(fd as FileDesc)?;\n\n if let Ok(socket) = file_ref.as_host_socket() {\n\n let (new_socket_file, sock_addr_option) = socket.accept(file_flags)?;\n\n let new_file_ref: Arc<dyn File> = Arc::new(new_socket_file);\n\n let new_fd = current!().add_file(new_file_ref, close_on_spawn);\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 84, "score": 178130.60825177463 }, { "content": "pub fn do_getsockname(\n\n fd: c_int,\n\n addr: *mut libc::sockaddr,\n\n addr_len: *mut libc::socklen_t,\n\n) -> Result<isize> {\n\n let addr_set: bool = !addr.is_null();\n\n if addr_set {\n\n from_user::check_ptr(addr_len)?;\n\n from_user::check_mut_array(addr as *mut u8, unsafe { *addr_len } as usize)?;\n\n } else {\n\n return Ok(0);\n\n }\n\n\n\n if unsafe { *addr_len } < std::mem::size_of::<libc::sa_family_t>() as u32 {\n\n return_errno!(EINVAL, \"input length is too short\");\n\n }\n\n\n\n let file_ref = current!().file(fd as FileDesc)?;\n\n if let Ok(socket) = file_ref.as_host_socket() {\n\n let ret = try_libc!(libc::ocall::getsockname(\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 85, "score": 178130.60825177463 }, { "content": "pub fn do_setsockopt(\n\n fd: c_int,\n\n level: c_int,\n\n optname: c_int,\n\n optval: *const c_void,\n\n optlen: libc::socklen_t,\n\n) -> Result<isize> {\n\n debug!(\n\n \"setsockopt: fd: {}, level: {}, optname: {}, optval: {:?}, optlen: {:?}\",\n\n fd, level, optname, optval, optlen\n\n );\n\n let file_ref = current!().file(fd as FileDesc)?;\n\n if let Ok(socket) = file_ref.as_host_socket() {\n\n let ret = try_libc!(libc::ocall::setsockopt(\n\n socket.raw_host_fd() as i32,\n\n level,\n\n optname,\n\n optval,\n\n optlen\n\n ));\n\n Ok(ret as isize)\n\n } else if let Ok(unix_socket) = file_ref.as_unix_socket() {\n\n warn!(\"setsockopt for unix socket is unimplemented\");\n\n Ok(0)\n\n } else {\n\n return_errno!(EBADF, \"not a socket\")\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 86, "score": 178130.60825177463 }, { "content": "pub fn do_sendto(\n\n fd: c_int,\n\n base: *const c_void,\n\n len: size_t,\n\n flags: c_int,\n\n addr: *const libc::sockaddr,\n\n addr_len: libc::socklen_t,\n\n) -> Result<isize> {\n\n if len == 0 {\n\n return Ok(0);\n\n }\n\n\n\n if addr.is_null() ^ (addr_len == 0) {\n\n return_errno!(EINVAL, \"addr and ddr_len should be both null\");\n\n }\n\n\n\n from_user::check_array(base as *const u8, len)?;\n\n let buf = unsafe { std::slice::from_raw_parts(base as *const u8, len as usize) };\n\n\n\n let addr_set: bool = !addr.is_null();\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 87, "score": 178130.60825177463 }, { "content": "pub fn do_getpeername(\n\n fd: c_int,\n\n addr: *mut libc::sockaddr,\n\n addr_len: *mut libc::socklen_t,\n\n) -> Result<isize> {\n\n let addr_set: bool = !addr.is_null();\n\n if addr_set {\n\n from_user::check_ptr(addr_len)?;\n\n from_user::check_mut_array(addr as *mut u8, unsafe { *addr_len } as usize)?;\n\n } else {\n\n return Ok(0);\n\n }\n\n\n\n let file_ref = current!().file(fd as FileDesc)?;\n\n if let Ok(socket) = file_ref.as_host_socket() {\n\n let ret = try_libc!(libc::ocall::getpeername(\n\n socket.raw_host_fd() as i32,\n\n addr,\n\n addr_len\n\n ));\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 88, "score": 178130.60825177463 }, { "content": "pub fn do_rt_sigprocmask(\n\n op_and_set: Option<(MaskOp, &sigset_t)>,\n\n oldset: Option<&mut sigset_t>,\n\n) -> Result<()> {\n\n debug!(\n\n \"do_rt_sigprocmask: op_and_set: {:?}, oldset: {:?}\",\n\n op_and_set.map(|(op, set)| (op, SigSet::from_c(*set))),\n\n oldset\n\n );\n\n\n\n let thread = current!();\n\n let old_sig_mask = thread.sig_mask();\n\n if let Some(oldset) = oldset {\n\n *oldset = old_sig_mask.to_c();\n\n }\n\n if let Some((op, &set)) = op_and_set {\n\n let set = SigSet::from_c(set);\n\n let new_sig_mask = match op {\n\n MaskOp::Block => old_sig_mask | set,\n\n MaskOp::Unblock => old_sig_mask & !set,\n", "file_path": "src/libos/src/signal/do_sigprocmask.rs", "rank": 89, "score": 178130.60825177463 }, { "content": "pub fn do_faccessat(\n\n fs_path: &FsPath,\n\n mode: AccessibilityCheckMode,\n\n flags: AccessibilityCheckFlags,\n\n) -> Result<()> {\n\n debug!(\n\n \"faccessat: fs_path: {:?}, mode: {:?}, flags: {:?}\",\n\n fs_path, mode, flags\n\n );\n\n\n\n let inode = {\n\n let current = current!();\n\n let fs = current.fs().read().unwrap();\n\n if flags.contains(AccessibilityCheckFlags::AT_SYMLINK_NOFOLLOW) {\n\n fs.lookup_inode_no_follow(fs_path)?\n\n } else {\n\n fs.lookup_inode(fs_path)?\n\n }\n\n };\n\n\n", "file_path": "src/libos/src/fs/file_ops/access.rs", "rank": 90, "score": 175913.84269228135 }, { "content": "/// Exit this thread if it has been forced to exit.\n\n///\n\n/// A thread may be forced to exit for two reasons: 1) a fatal signal; 2)\n\n/// exit_group syscall.\n\npub fn handle_force_exit() {\n\n if current!().process().is_forced_to_exit() {\n\n exit_thread(current!().process().term_status().unwrap());\n\n }\n\n}\n\n\n", "file_path": "src/libos/src/process/do_exit.rs", "rank": 91, "score": 175913.84269228135 }, { "content": "pub fn umount_nonroot_fs(\n\n root: &Arc<dyn INode>,\n\n abs_path: &str,\n\n follow_symlink: bool,\n\n) -> Result<()> {\n\n let mount_dir = if follow_symlink {\n\n root.lookup_follow(abs_path, MAX_SYMLINKS)?\n\n } else {\n\n let (dir_path, file_name) = split_path(abs_path);\n\n root.lookup_follow(dir_path, MAX_SYMLINKS)?\n\n .lookup(file_name)?\n\n };\n\n\n\n mount_dir.downcast_ref::<MNode>().unwrap().umount()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/libos/src/fs/rootfs.rs", "rank": 92, "score": 175913.84269228135 }, { "content": "pub fn do_sched_yield() {\n\n extern \"C\" {\n\n fn occlum_ocall_sched_yield() -> sgx_status_t;\n\n }\n\n unsafe {\n\n let status = occlum_ocall_sched_yield();\n\n assert!(status == sgx_status_t::SGX_SUCCESS);\n\n }\n\n}\n", "file_path": "src/libos/src/sched/do_sched_yield.rs", "rank": 93, "score": 175913.84269228135 }, { "content": "pub fn do_epoll_ctl(\n\n epfd: c_int,\n\n op: c_int,\n\n fd: c_int,\n\n event_ptr: *const libc::epoll_event,\n\n) -> Result<isize> {\n\n debug!(\"epoll_ctl: epfd: {}, op: {:?}, fd: {}\", epfd, op, fd);\n\n\n\n let get_c_event = |event_ptr| -> Result<&libc::epoll_event> {\n\n from_user::check_ptr(event_ptr)?;\n\n Ok(unsafe { &*event_ptr })\n\n };\n\n\n\n let fd = fd as FileDesc;\n\n let ctl_cmd = match op {\n\n libc::EPOLL_CTL_ADD => {\n\n let c_event = get_c_event(event_ptr)?;\n\n let event = EpollEvent::from_c(c_event);\n\n let flags = EpollFlags::from_c(c_event);\n\n EpollCtl::Add(fd, event, flags)\n", "file_path": "src/libos/src/net.deprecated/syscalls.rs", "rank": 94, "score": 175913.84269228135 }, { "content": "pub fn do_mount(\n\n source: &str,\n\n target: &str,\n\n flags: MountFlags,\n\n options: MountOptions,\n\n) -> Result<()> {\n\n debug!(\n\n \"mount: source: {}, target: {}, flags: {:?}, options: {:?}\",\n\n source, target, flags, options\n\n );\n\n\n\n let target = if target == \"/\" {\n\n return_errno!(EPERM, \"can not mount on root\");\n\n } else {\n\n let fs_path = FsPath::try_from(target)?;\n\n let thread = current!();\n\n let fs = thread.fs().read().unwrap();\n\n PathBuf::from(fs.convert_fspath_to_abs(&fs_path)?)\n\n };\n\n\n", "file_path": "src/libos/src/fs/fs_ops/mount.rs", "rank": 95, "score": 175913.84269228135 }, { "content": "/// Performs a serializing operation on all load-from-memory instructions\n\n/// that were issued prior to this instruction.\n\n///\n\n/// Guarantees that every load instruction that precedes, in program order,\n\n/// is globally visible before any load instruction which follows the fence in program order.\n\npub fn lfence() {\n\n cfg_if::cfg_if! {\n\n if #[cfg(target_arch = \"x86_64\")] {\n\n unsafe { core::arch::x86_64::_mm_lfence() }\n\n } else if #[cfg(target_arch = \"x86\")] {\n\n unsafe { core::arch::x86::_mm_lfence() }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/libos/crates/vdso-time/src/sys.rs", "rank": 96, "score": 175913.84269228135 }, { "content": "pub fn shutdown() {\n\n EXECUTOR.shutdown()\n\n}\n\n\n\nlazy_static! {\n\n pub(crate) static ref EXECUTOR: Executor = {\n\n let parallelism = CONFIG.parallelism();\n\n Executor::new(parallelism).unwrap()\n\n };\n\n}\n\n\n\npub(crate) struct Executor {\n\n parallelism: u32,\n\n running_vcpu_num: AtomicU32,\n\n next_thread_id: AtomicU32,\n\n is_shutdown: AtomicBool,\n\n parks: Arc<Parks>,\n\n scheduler: Box<dyn Scheduler>,\n\n}\n\n\n", "file_path": "src/libos/crates/async-rt/src/executor.rs", "rank": 97, "score": 175913.84269228135 } ]
Rust
src/rugl.rs
micahscopes/rugl
bb7fefb08c7d648f41630fe515c0bb128d95de69
/*! An ergonomic macro for creating themetic stateless WebGL applications! # Syntax ```ignore rugl_main! { vertex: " precision mediump float; attribute vec2 position; void main() { gl_Position = vec4(position, 0, 1); } "; fragment: " precision mediump float; uniform vec3 color; void main() { gl_FragColor = color; } "; attributes: { position: [ [-1, 0], [0, -1], [1, 1] ] } uniforms: { color: [1, 0, 0, 1] }, count: 3 } */ use std::borrow::Cow; use crate::webgl::{Attribute, Uniform, WebGlContext}; #[derive(Debug)] pub struct Rugl<'a> { pub inner: RuglInner<'a>, pub context: WebGlContext, } impl Rugl<'_> { pub fn step(&mut self) -> Result<(), String> { self.context.clear_with_color([1.0, 1.0, 1.0, 1.0]); for attribute in self.inner.get_attributes() { self.context.enable_attribute(attribute.get_name())?; } self.context.draw_triangles(*self.inner.get_count()); Ok(()) } } #[derive(Debug)] pub struct RuglInner<'a> { pub vertex: Cow<'a, str>, pub fragment: Cow<'a, str>, pub attributes: Vec<Attribute>, pub uniforms: Vec<Uniform>, pub count: i32, } impl<'a> RuglInner<'a> { pub fn get_vertex_shader(&self) -> &str { &self.vertex } pub fn get_fragment_shader(&self) -> &str { &self.fragment } pub fn get_attributes(&mut self) -> &Vec<Attribute> { &self.attributes } pub fn get_mut_attributes(&mut self) -> &mut Vec<Attribute> { &mut self.attributes } pub fn get_uniforms(&self) -> &Vec<Uniform> { &self.uniforms } pub fn get_mut_uniforms(&mut self) -> &mut Vec<Uniform> { &mut self.uniforms } pub fn get_count(&self) -> &i32 { &self.count } } #[macro_export] macro_rules! rugl_inner { ( $( $i:ident: { $($tokens:tt)* } ),* ) => {{ #[inline] fn build_inner<'a>() -> Result<(RuglInner<'a>, WebGlContext), JsValue> { use std::borrow::Cow; let mut context = WebGlContext::new("canvas")?; let mut inner = RuglInner { $($i: rugl_type!($i: $($tokens)*),)* }; let vertex = context.compile_shader( ShaderType::Vertex( inner.get_vertex_shader(), std::marker::PhantomData ) )?; let fragment = context.compile_shader( ShaderType::Fragment( inner.get_fragment_shader(), std::marker::PhantomData ) )?; context.link_and_add_program(&[vertex, fragment])?; context.use_program()?; let count = inner.get_count().clone(); for attribute in inner.get_mut_attributes() { let mut attr_data = Vec::new(); for layer in attribute.get_qualifiers() { attr_data.extend_from_slice(&layer.to_vec()); } context.create_buffer_with_data(attribute.get_name(), &attr_data[..], count)?; context.bind_buffer_with_name(attribute.get_name())?; context.enable_attribute(attribute.get_name())?; } for uniform in inner.get_mut_uniforms() { context.create_uniform(uniform.get_name(), uniform.inner())?; context.bind_uniform(uniform.get_name())?; } Ok((inner, context)) } match build_inner() { Ok((inner, context)) => Ok(Rugl { inner, context }), Err(err) => { log!("There was an error! {}", err.as_string().unwrap()); Err("There was a problem!!!".to_owned()) } } }} } #[doc(hidden)] #[macro_export] macro_rules! rugl_type { (vertex: $($tokens:tt)+) => { Cow::Borrowed($($tokens)*) }; (fragment: $($tokens:tt)+) => { Cow::Borrowed($($tokens)*) }; (attributes: $($tokens:tt)+) => { parse_ident!(@attribute $($tokens)*) }; (uniforms: $($tokens:tt)+) => { parse_ident!(@uniform $($tokens)*) }; (count: $expr:expr) => { $expr } } #[doc(hidden)] #[macro_export] macro_rules! parse_ident { (@attribute $($id:ident: [$($tokens:tt)*]),+ $(,)* ) => { vec![$( Attribute::from((stringify!($id).to_owned(), determine_bracket_replace!($($tokens)*)) )),*] }; (@uniform $($id:ident: [$($tokens:tt)*]),+ $(,)* ) => { vec![$( Uniform::from((stringify!($id).to_owned(), UniformInner::from(determine_bracket_replace!($($tokens)*))) )),*] }; } #[doc(hidden)] #[macro_export] macro_rules! determine_bracket_replace { ($([$($tokens:tt)*]),*) => { [ $( ($($tokens)*) ),* ] }; ($($tokens:tt)*) => { [ $($tokens)* ] } }
/*! An ergonomic macro for creating themetic stateless WebGL applications! # Syntax ```ignore rugl_main! { vertex: " precision mediump float; attribute vec2 position; void main() { gl_Position = vec4(position, 0, 1); } "; fragment: " precision mediump float; uniform vec3 color; void main() { gl_FragColor = color; } "; attributes: { position: [ [-1, 0], [0, -1], [1, 1] ] } uniforms: { color: [1, 0, 0, 1] }, count: 3 } */ use std::borrow::Cow; use crate::webgl::{Attribute, Uniform, WebGlContext}; #[derive(Debug)] pub struct Rugl<'a> { pub inner: RuglInner<'a>, pub context: WebGlContext, } impl Rugl<'_> { pub fn step(&mut self) -> Result<(), String> { self.context.clear_with_color([1.0, 1.0, 1.0, 1.0]); for attribute in self.inner.get_attributes() { self.context.enable_attribute(attribute.get_name())?; } self.context.draw_triangles(*self.inn
rm(uniform.get_name())?; } Ok((inner, context)) } match build_inner() { Ok((inner, context)) => Ok(Rugl { inner, context }), Err(err) => { log!("There was an error! {}", err.as_string().unwrap()); Err("There was a problem!!!".to_owned()) } } }} } #[doc(hidden)] #[macro_export] macro_rules! rugl_type { (vertex: $($tokens:tt)+) => { Cow::Borrowed($($tokens)*) }; (fragment: $($tokens:tt)+) => { Cow::Borrowed($($tokens)*) }; (attributes: $($tokens:tt)+) => { parse_ident!(@attribute $($tokens)*) }; (uniforms: $($tokens:tt)+) => { parse_ident!(@uniform $($tokens)*) }; (count: $expr:expr) => { $expr } } #[doc(hidden)] #[macro_export] macro_rules! parse_ident { (@attribute $($id:ident: [$($tokens:tt)*]),+ $(,)* ) => { vec![$( Attribute::from((stringify!($id).to_owned(), determine_bracket_replace!($($tokens)*)) )),*] }; (@uniform $($id:ident: [$($tokens:tt)*]),+ $(,)* ) => { vec![$( Uniform::from((stringify!($id).to_owned(), UniformInner::from(determine_bracket_replace!($($tokens)*))) )),*] }; } #[doc(hidden)] #[macro_export] macro_rules! determine_bracket_replace { ($([$($tokens:tt)*]),*) => { [ $( ($($tokens)*) ),* ] }; ($($tokens:tt)*) => { [ $($tokens)* ] } }
er.get_count()); Ok(()) } } #[derive(Debug)] pub struct RuglInner<'a> { pub vertex: Cow<'a, str>, pub fragment: Cow<'a, str>, pub attributes: Vec<Attribute>, pub uniforms: Vec<Uniform>, pub count: i32, } impl<'a> RuglInner<'a> { pub fn get_vertex_shader(&self) -> &str { &self.vertex } pub fn get_fragment_shader(&self) -> &str { &self.fragment } pub fn get_attributes(&mut self) -> &Vec<Attribute> { &self.attributes } pub fn get_mut_attributes(&mut self) -> &mut Vec<Attribute> { &mut self.attributes } pub fn get_uniforms(&self) -> &Vec<Uniform> { &self.uniforms } pub fn get_mut_uniforms(&mut self) -> &mut Vec<Uniform> { &mut self.uniforms } pub fn get_count(&self) -> &i32 { &self.count } } #[macro_export] macro_rules! rugl_inner { ( $( $i:ident: { $($tokens:tt)* } ),* ) => {{ #[inline] fn build_inner<'a>() -> Result<(RuglInner<'a>, WebGlContext), JsValue> { use std::borrow::Cow; let mut context = WebGlContext::new("canvas")?; let mut inner = RuglInner { $($i: rugl_type!($i: $($tokens)*),)* }; let vertex = context.compile_shader( ShaderType::Vertex( inner.get_vertex_shader(), std::marker::PhantomData ) )?; let fragment = context.compile_shader( ShaderType::Fragment( inner.get_fragment_shader(), std::marker::PhantomData ) )?; context.link_and_add_program(&[vertex, fragment])?; context.use_program()?; let count = inner.get_count().clone(); for attribute in inner.get_mut_attributes() { let mut attr_data = Vec::new(); for layer in attribute.get_qualifiers() { attr_data.extend_from_slice(&layer.to_vec()); } context.create_buffer_with_data(attribute.get_name(), &attr_data[..], count)?; context.bind_buffer_with_name(attribute.get_name())?; context.enable_attribute(attribute.get_name())?; } for uniform in inner.get_mut_uniforms() { context.create_uniform(uniform.get_name(), uniform.inner())?; context.bind_unifo
random
[ { "content": "fn main() {\n\n let dest_path = Path::new(\"pkg\");\n\n\n\n if !dest_path.exists() {\n\n fs::create_dir(dest_path).expect(\"Unable to create directory\");\n\n }\n\n\n\n let test = Command::new(\"wasm-bindgen\")\n\n .args(&[\n\n \"target/wasm32-unknown-unknown/debug/rugl_test.wasm\",\n\n \"--out-dir\",\n\n dest_path.to_str().unwrap(),\n\n ])\n\n .output()\n\n .expect(\"Unable to build wasm\");\n\n\n\n println!(\"{:?}\", test);\n\n}\n", "file_path": "build.rs", "rank": 0, "score": 63754.54601095006 }, { "content": "pub trait FromSlice {\n\n fn from_slice(memory: &mut JsValue, data: Self) -> JsArray;\n\n}\n\n\n\nmacro_rules! from_slice {\n\n ($type:ty, $id:ident) => {\n\n impl FromSlice for $type {\n\n fn from_slice(memory: &mut JsValue, data: $type) -> JsArray {\n\n let data_size = data.len();\n\n let data_pointer = data.as_ptr() as u32;\n\n let data_normilization = size_of_val(&data[0]) as u32;\n\n let data_location = data_pointer / data_normilization;\n\n\n\n // Check to see if our memory was resized before we use it\n\n // TODO: Handle error case\n\n let memory_buffer = wasm_bindgen::memory()\n\n .dyn_into::<WebAssembly::Memory>()\n\n .unwrap()\n\n .buffer();\n\n\n", "file_path": "src/webgl/mod.rs", "rank": 1, "score": 41797.50042305457 }, { "content": "/*!\n\nThe Attribute class holds a vector of the inner types and offers many coersions of other types.\n\n */\n\n\n\nuse super::qualifier::Qualifier;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Attribute {\n\n name: String,\n\n data: Vec<Qualifier>,\n\n}\n\n\n\nimpl Attribute {\n\n pub fn get_name(&self) -> &String {\n\n &self.name\n\n }\n\n\n\n pub fn get_qualifiers(&self) -> &Vec<Qualifier> {\n\n &self.data\n\n }\n", "file_path": "src/webgl/attribute.rs", "rank": 2, "score": 39309.459993728706 }, { "content": "}\n\n\n\n// Array impls\n\n__impl_from_repeat_vec!(i32, Attribute, Qualifier::Float);\n\n__impl_from_repeat_vec!(f32, Attribute, Qualifier::Float);\n\n__impl_from_repeat_vec!((i32, i32), Attribute, Qualifier::Vec2);\n\n__impl_from_repeat_vec!((i32, i32, i32), Attribute, Qualifier::Vec3);\n\n__impl_from_repeat_vec!((f32, f32), Attribute, Qualifier::Vec2f);\n\n__impl_from_repeat_vec!((f32, f32, f32), Attribute, Qualifier::Vec3f);\n\n__impl_from_repeat_vec!((f32, f32, f32, f32), Attribute, Qualifier::Vec4f);\n", "file_path": "src/webgl/attribute.rs", "rank": 3, "score": 39308.052502599276 }, { "content": "}\n\n\n\nimpl Uniform {\n\n pub fn get_name(&self) -> &String {\n\n &self.name\n\n }\n\n\n\n pub fn inner(&self) -> UniformInner {\n\n self.data\n\n }\n\n}\n\n\n\nimpl From<(String, UniformInner)> for Uniform {\n\n fn from(uniform: (String, UniformInner)) -> Self {\n\n Uniform {\n\n name: uniform.0,\n\n data: UniformInner::from(uniform.1),\n\n }\n\n }\n\n}\n", "file_path": "src/webgl/uniform.rs", "rank": 4, "score": 39137.68657855969 }, { "content": "\n\nimpl From<i32> for UniformInner {\n\n fn from(item: i32) -> Self {\n\n UniformInner::Uniform1i(item)\n\n }\n\n}\n\n\n\nimpl From<f32> for UniformInner {\n\n fn from(item: f32) -> Self {\n\n UniformInner::Uniform1f(item)\n\n }\n\n}\n\n\n\nimpl From<[i32; 2]> for UniformInner {\n\n fn from(item: [i32; 2]) -> Self {\n\n UniformInner::Uniform2i(item[0], item[1])\n\n }\n\n}\n\n\n\nimpl From<[i32; 3]> for UniformInner {\n", "file_path": "src/webgl/uniform.rs", "rank": 5, "score": 39134.12829494035 }, { "content": " fn from(item: [i32; 3]) -> Self {\n\n UniformInner::Uniform3i(item[0], item[1], item[2])\n\n }\n\n}\n\n\n\nimpl From<[f32; 2]> for UniformInner {\n\n fn from(item: [f32; 2]) -> Self {\n\n UniformInner::Uniform2f(item[0], item[1])\n\n }\n\n}\n\n\n\nimpl From<[f32; 3]> for UniformInner {\n\n fn from(item: [f32; 3]) -> Self {\n\n UniformInner::Uniform3f(item[0], item[1], item[2])\n\n }\n\n}\n\n\n\nimpl From<[f32; 4]> for UniformInner {\n\n fn from(item: [f32; 4]) -> Self {\n\n UniformInner::Uniform4f(item[0], item[1], item[2], item[3])\n\n }\n\n}\n", "file_path": "src/webgl/uniform.rs", "rank": 6, "score": 39133.63187226112 }, { "content": "/*!\n\nThe Uniform class holds a vector of the inner types and offers many coersions of other types.\n\n */\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum UniformInner {\n\n Uniform1i(i32),\n\n Uniform1f(f32),\n\n Uniform2i(i32, i32),\n\n Uniform2f(f32, f32),\n\n Uniform3i(i32, i32, i32),\n\n Uniform3f(f32, f32, f32),\n\n Uniform4i(i32, i32, i32, i32),\n\n Uniform4f(f32, f32, f32, f32),\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Uniform {\n\n name: String,\n\n data: UniformInner,\n", "file_path": "src/webgl/uniform.rs", "rank": 7, "score": 39130.45570762649 }, { "content": "/*!\n\nMacros used for various utilities, this is the ugly file.\n\n!*/\n\n\n\n// A macro to provide `println!(..)`-style syntax for `console.log` logging.\n\n#[macro_export]\n\nmacro_rules! log {\n\n ( $( $t:tt )* ) => {\n\n web_sys::console::log_1(&format!( $( $t )* ).into());\n\n }\n\n}\n\n\n\n// TODO: Remove once const generics gets implemented\n\n#[doc(hidden)]\n\nmacro_rules! __impl_from_for_type_vec {\n\n ($type:ty, $num:expr, $impl_type:ty, $impl_subtype:tt::$impl_subtype_variant:tt) => {\n\n impl From<(String, [$type; $num])> for $impl_type {\n\n fn from(items: (String, [$type; $num])) -> Self {\n\n let mut v: Vec<Qualifier> = Vec::new();\n\n for item in &items.1 {\n", "file_path": "src/macros.rs", "rank": 8, "score": 22995.520673130242 }, { "content": " let variant = Qualifier::from(*item);\n\n v.push(variant)\n\n }\n\n\n\n Self {\n\n name: items.0,\n\n data: v,\n\n }\n\n }\n\n }\n\n };\n\n}\n\n\n\n#[doc(hidden)]\n\nmacro_rules! __impl_from_repeat_vec {\n\n ($type:ty, $impl_type:ty, $impl_subtype:tt::$impl_subtype_variant:tt) => {\n\n __impl_from_for_type_vec!($type, 1, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 2, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 3, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 4, $impl_type, $impl_subtype::$impl_subtype_variant);\n", "file_path": "src/macros.rs", "rank": 9, "score": 22990.77384310705 }, { "content": " __impl_from_for_type_vec!($type, 5, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 6, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 7, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 8, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 9, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 10, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 11, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 12, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 13, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 14, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 15, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n __impl_from_for_type_vec!($type, 16, $impl_type, $impl_subtype::$impl_subtype_variant);\n\n };\n\n}\n", "file_path": "src/macros.rs", "rank": 10, "score": 22988.62089543308 }, { "content": " WebGlRenderingContext::FLOAT,\n\n false,\n\n 0,\n\n 0,\n\n );\n\n self.context.enable_vertex_attrib_array(0);\n\n Ok(())\n\n }\n\n None => Err(String::from(\"Attribute does not exist!\")),\n\n }\n\n }\n\n\n\n pub fn create_uniform<'a, T: Copy + Into<Cow<'a, str>>>(\n\n &mut self,\n\n name: T,\n\n uniform: UniformInner,\n\n ) -> Result<(), String> {\n\n let location = self\n\n .context\n\n .get_uniform_location(&self.program.as_ref().unwrap(), name.into().as_ref());\n", "file_path": "src/webgl/mod.rs", "rank": 11, "score": 17726.963690128745 }, { "content": " None => Err(String::from(\"Attribute does not exist!\")),\n\n }\n\n }\n\n\n\n /// Clear and set background color\n\n pub fn clear_with_color(&self, color: [f32; 4]) {\n\n self.context\n\n .clear_color(color[0], color[1], color[2], color[3]);\n\n self.context.clear(WebGlRenderingContext::COLOR_BUFFER_BIT);\n\n }\n\n\n\n pub fn enable_attribute<'a, T: Into<Cow<'a, str>>>(&self, name: T) -> Result<(), String> {\n\n let attribute = self.attributes.get(&name.into().into_owned());\n\n match attribute {\n\n Some(attribute) => {\n\n let (_, location) = attribute.get_data().get_attribute();\n\n\n\n self.context.vertex_attrib_pointer_with_i32(\n\n *location,\n\n *attribute.get_count() as _,\n", "file_path": "src/webgl/mod.rs", "rank": 12, "score": 17725.394834536877 }, { "content": "/// WebGl Program\n\nuse crate::webgl::Shader;\n\nuse web_sys::{WebGlProgram, WebGlRenderingContext};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Program {\n\n internal: Option<WebGlProgram>,\n\n}\n\n\n\nimpl Program {\n\n pub fn empty() -> Self {\n\n Program { internal: None }\n\n }\n\n\n\n pub fn new<'a, T: IntoIterator<Item = &'a Shader>>(\n\n context: &WebGlRenderingContext,\n\n shaders: T,\n\n ) -> Result<Program, String> {\n\n // Create a webgl program\n\n let program = context.create_program().ok_or_else(|| {\n", "file_path": "src/webgl/program.rs", "rank": 13, "score": 17724.81907154883 }, { "content": "/*!\n\nThe Qualifer class is a single data-type used by an Attribute or Uniform\n\n */\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Qualifier {\n\n Int(i32),\n\n Float(f32),\n\n Vec2(i32, i32),\n\n Vec3(i32, i32, i32),\n\n Vec2f(f32, f32),\n\n Vec3f(f32, f32, f32),\n\n Vec4f(f32, f32, f32, f32),\n\n}\n\n\n\nimpl Qualifier {\n\n pub fn to_vec(&self) -> Vec<f32> {\n\n match self {\n\n Qualifier::Int(inner) => vec![*inner as f32],\n\n Qualifier::Float(inner) => vec![*inner],\n", "file_path": "src/webgl/qualifier.rs", "rank": 14, "score": 17724.08477156168 }, { "content": "/// WebGlBuffer wrapper\n\nuse web_sys::{WebGlBuffer, WebGlUniformLocation};\n\n\n\nuse crate::webgl::JsArray;\n\nuse crate::webgl::UniformInner;\n\n\n\n#[derive(Debug)]\n\npub enum BufferInternal {\n\n Attribute(JsArray, u32),\n\n Uniform(UniformInner, WebGlUniformLocation),\n\n}\n\n\n\nimpl BufferInternal {\n\n pub fn get_attribute(&self) -> (&JsArray, &u32) {\n\n match self {\n\n BufferInternal::Attribute(data, location) => (data, location),\n\n BufferInternal::Uniform(_, _) => panic!(\"Not an attribute!\"),\n\n }\n\n }\n\n\n", "file_path": "src/webgl/buffer.rs", "rank": 15, "score": 17723.6706680129 }, { "content": "/// WebGl Shader\n\nuse std::borrow::Cow;\n\nuse std::marker::PhantomData;\n\n\n\nuse web_sys::{WebGlRenderingContext, WebGlShader};\n\n\n\n#[derive(Debug)]\n\npub enum ShaderType<'a, T: Into<Cow<'a, str>>> {\n\n Vertex(T, PhantomData<&'a T>),\n\n Fragment(T, PhantomData<&'a T>),\n\n}\n\n\n\nimpl<'a, T: Into<Cow<'a, str>>> ShaderType<'a, T> {\n\n pub fn into_inner(self) -> String {\n\n match self {\n\n ShaderType::Vertex(inner, _) => inner.into().into_owned(),\n\n ShaderType::Fragment(inner, _) => inner.into().into_owned(),\n\n }\n\n }\n\n\n", "file_path": "src/webgl/shader.rs", "rank": 16, "score": 17723.506923880923 }, { "content": " pub fn into_gl_type(&self) -> u32 {\n\n match self {\n\n ShaderType::Vertex(_, _) => WebGlRenderingContext::VERTEX_SHADER,\n\n ShaderType::Fragment(_, _) => WebGlRenderingContext::FRAGMENT_SHADER,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Shader {\n\n internal: WebGlShader,\n\n}\n\n\n\nimpl Shader {\n\n pub fn new<'a, T: Into<Cow<'a, str>>>(\n\n context: &WebGlRenderingContext,\n\n shader: ShaderType<'a, T>,\n\n ) -> Result<Shader, String> {\n\n let shader_context = context\n\n .create_shader(shader.into_gl_type())\n", "file_path": "src/webgl/shader.rs", "rank": 17, "score": 17723.488927227903 }, { "content": " shaders: Shaders,\n\n ) -> Result<(), String> {\n\n let program = self.link_program(shaders)?;\n\n let _ = std::mem::replace(&mut self.program, program);\n\n Ok(())\n\n }\n\n\n\n /// Use internal program\n\n pub fn use_program(&self) -> Result<(), String> {\n\n match &self.program.as_ref() {\n\n Some(program) => {\n\n self.context.use_program(Some(&program));\n\n Ok(())\n\n }\n\n None => Err(String::from(\"Program has not been setup yet!\")),\n\n }\n\n }\n\n\n\n /// Create a buffer\n\n pub fn create_buffer(&self) -> Result<WebGlBuffer, String> {\n", "file_path": "src/webgl/mod.rs", "rank": 18, "score": 17722.669020849306 }, { "content": " pub fn get_uniform(&self) -> (&UniformInner, &WebGlUniformLocation) {\n\n match self {\n\n BufferInternal::Attribute(_, _) => panic!(\"Not a uniform!\"),\n\n BufferInternal::Uniform(data, location) => (data, location),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Buffer {\n\n buffer: Option<WebGlBuffer>,\n\n data: BufferInternal,\n\n count: i32,\n\n}\n\n\n\nimpl Buffer {\n\n pub fn new(buffer: Option<WebGlBuffer>, data: BufferInternal, count: i32) -> Self {\n\n Self {\n\n buffer,\n\n data,\n", "file_path": "src/webgl/buffer.rs", "rank": 19, "score": 17722.428460816773 }, { "content": "mod attribute;\n\n/// Webassembly Context\n\nmod buffer;\n\nmod program;\n\nmod qualifier;\n\nmod shader;\n\nmod uniform;\n\n\n\nuse std::borrow::Cow;\n\nuse std::collections::HashMap;\n\nuse std::mem::size_of_val;\n\n\n\npub use attribute::Attribute;\n\npub use buffer::{Buffer, BufferInternal};\n\npub use program::Program;\n\npub use shader::{Shader, ShaderType};\n\npub use uniform::{Uniform, UniformInner};\n\n\n\nuse js_sys::*;\n\nuse wasm_bindgen::prelude::*;\n", "file_path": "src/webgl/mod.rs", "rank": 20, "score": 17722.376419863216 }, { "content": "use wasm_bindgen::JsCast;\n\nuse web_sys::{WebGlBuffer, WebGlRenderingContext};\n\n\n\n#[derive(Debug)]\n\npub struct WebGlContext {\n\n context: WebGlRenderingContext,\n\n _canvas: web_sys::HtmlCanvasElement,\n\n program: Program,\n\n attributes: HashMap<String, Buffer>,\n\n uniforms: HashMap<String, Buffer>,\n\n memory: JsValue,\n\n}\n\n\n\nimpl WebGlContext {\n\n pub fn new<'a, T: Into<&'a str>>(id: T) -> Result<Self, JsValue> {\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let _canvas = document\n\n .get_element_by_id(id.into())\n\n .ok_or_else(|| String::from(\"Unable to get Canvas element!\"))?;\n\n let _canvas: web_sys::HtmlCanvasElement =\n", "file_path": "src/webgl/mod.rs", "rank": 21, "score": 17721.812080145643 }, { "content": " .uniform4f(Some(location), *val1, *val2, *val3, *val4)\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n None => Err(String::from(\"Uniform does not exist!\")),\n\n }\n\n }\n\n\n\n /// Draw triangles\n\n pub fn draw_triangles(&self, count: i32) {\n\n self.context\n\n .draw_arrays(WebGlRenderingContext::TRIANGLES, 0, count);\n\n }\n\n\n\n pub fn context(&self) -> &WebGlRenderingContext {\n\n &self.context\n\n }\n\n}\n", "file_path": "src/webgl/mod.rs", "rank": 22, "score": 17720.321244866216 }, { "content": " let buffer = self\n\n .context\n\n .create_buffer()\n\n .ok_or(\"Unable to create buffer\")?;\n\n Ok(buffer)\n\n }\n\n\n\n pub fn create_buffer_with_data<'a, Name: Into<Cow<'a, str>>, Type: FromSlice>(\n\n &mut self,\n\n name: Name,\n\n data: Type,\n\n count: i32,\n\n ) -> Result<(), String> {\n\n let qualifer_name = name.into();\n\n let buffer = self.create_buffer()?;\n\n let data = FromSlice::from_slice(&mut self.memory, data);\n\n let program = self.program.as_ref().unwrap();\n\n let location = self.context.get_attrib_location(&program, &qualifer_name);\n\n\n\n if location < 0 {\n", "file_path": "src/webgl/mod.rs", "rank": 23, "score": 17719.77470846334 }, { "content": " fn from(item: i32) -> Self {\n\n Qualifier::Int(item)\n\n }\n\n}\n\n\n\nimpl From<[f32; 1]> for Qualifier {\n\n fn from(item: [f32; 1]) -> Self {\n\n Qualifier::Float(item[0])\n\n }\n\n}\n\n\n\nimpl From<f32> for Qualifier {\n\n fn from(item: f32) -> Self {\n\n Qualifier::Float(item)\n\n }\n\n}\n\n\n\nimpl From<(i32, i32)> for Qualifier {\n\n fn from(item: (i32, i32)) -> Self {\n\n Qualifier::Vec2(item.0, item.1)\n", "file_path": "src/webgl/qualifier.rs", "rank": 24, "score": 17719.74625979186 }, { "content": " return Err(String::from(format!(\n\n \"Attribute: {} does not exist!\",\n\n qualifer_name\n\n )));\n\n }\n\n\n\n self.attributes.insert(\n\n qualifer_name.into_owned(),\n\n Buffer::new(\n\n Some(buffer),\n\n BufferInternal::Attribute(data, location as _),\n\n count,\n\n ),\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n /// Bind an array to the context\n\n pub fn bind_buffer_with_name<'a, Name: Into<Cow<'a, str>>>(\n", "file_path": "src/webgl/mod.rs", "rank": 25, "score": 17719.109688154713 }, { "content": " .ok_or_else(|| String::from(\"Unable to create shader object\"))?;\n\n\n\n context.shader_source(&shader_context, &shader.into_inner());\n\n context.compile_shader(&shader_context);\n\n\n\n if context\n\n .get_shader_parameter(&shader_context, WebGlRenderingContext::COMPILE_STATUS)\n\n .as_bool()\n\n .unwrap_or(false)\n\n {\n\n Ok(Shader {\n\n internal: shader_context,\n\n })\n\n } else {\n\n Err(context\n\n .get_shader_info_log(&shader_context)\n\n .unwrap_or_else(|| \"Unknown error creating shader\".into()))\n\n }\n\n }\n\n\n\n pub fn as_ref(&self) -> &WebGlShader {\n\n &self.internal\n\n }\n\n}\n", "file_path": "src/webgl/shader.rs", "rank": 26, "score": 17718.675154704575 }, { "content": " &self,\n\n name: Name,\n\n ) -> Result<(), String> {\n\n let attribute = self.attributes.get(&name.into().into_owned());\n\n match attribute {\n\n Some(attribute) => {\n\n self.context.bind_buffer(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n Some(&attribute.get_buffer().unwrap()),\n\n );\n\n\n\n let (data, _) = attribute.get_data().get_attribute();\n\n\n\n self.context.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n data.to_object(),\n\n WebGlRenderingContext::STATIC_DRAW,\n\n );\n\n Ok(())\n\n }\n", "file_path": "src/webgl/mod.rs", "rank": 27, "score": 17718.414550165056 }, { "content": "\n\n if !location.is_some() {\n\n return Err(String::from(\"Uniform location not found!\"));\n\n }\n\n\n\n self.uniforms.insert(\n\n name.into().as_ref().to_string(),\n\n Buffer::new(None, BufferInternal::Uniform(uniform, location.unwrap()), 0),\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn bind_uniform<'a, T: Into<Cow<'a, str>>>(&self, name: T) -> Result<(), String> {\n\n let uniform = self.uniforms.get(&name.into().into_owned());\n\n match uniform {\n\n Some(uniform) => {\n\n let (data, location) = uniform.get_data().get_uniform();\n\n\n\n match data {\n", "file_path": "src/webgl/mod.rs", "rank": 28, "score": 17718.348670165436 }, { "content": " UniformInner::Uniform1i(val) => self.context.uniform1i(Some(location), *val),\n\n UniformInner::Uniform1f(val) => self.context.uniform1f(Some(location), *val),\n\n UniformInner::Uniform2i(val1, val2) => {\n\n self.context.uniform2i(Some(location), *val1, *val2)\n\n }\n\n UniformInner::Uniform2f(val1, val2) => {\n\n self.context.uniform2f(Some(location), *val1, *val2)\n\n }\n\n UniformInner::Uniform3i(val1, val2, val3) => {\n\n self.context.uniform3i(Some(location), *val1, *val2, *val3)\n\n }\n\n UniformInner::Uniform3f(val1, val2, val3) => {\n\n self.context.uniform3f(Some(location), *val1, *val2, *val3)\n\n }\n\n UniformInner::Uniform4i(val1, val2, val3, val4) => {\n\n self.context\n\n .uniform4i(Some(location), *val1, *val2, *val3, *val4)\n\n }\n\n UniformInner::Uniform4f(val1, val2, val3, val4) => {\n\n self.context\n", "file_path": "src/webgl/mod.rs", "rank": 29, "score": 17718.33998456547 }, { "content": "\n\n /// Compile shaders\n\n pub fn compile_shader<'a, T: Into<Cow<'a, str>>>(\n\n &self,\n\n shader: ShaderType<'a, T>,\n\n ) -> Result<Shader, String> {\n\n Shader::new(&self.context, shader)\n\n }\n\n\n\n /// Link shaders to program\n\n pub fn link_program<'a, Shaders: IntoIterator<Item = &'a Shader>>(\n\n &mut self,\n\n shaders: Shaders,\n\n ) -> Result<Program, String> {\n\n Program::new(&self.context, shaders.into_iter())\n\n }\n\n\n\n /// Link shaders to program, adding the program to our internal hashmap\n\n pub fn link_and_add_program<'a, Shaders: IntoIterator<Item = &'a Shader>>(\n\n &mut self,\n", "file_path": "src/webgl/mod.rs", "rank": 30, "score": 17717.91751043162 }, { "content": " })\n\n } else {\n\n Err(context\n\n .get_program_info_log(&program)\n\n .unwrap_or_else(|| String::from(\"Unknown error creating program object\")))\n\n }\n\n }\n\n\n\n pub fn as_ref(&self) -> Option<&WebGlProgram> {\n\n self.internal.as_ref()\n\n }\n\n}\n", "file_path": "src/webgl/program.rs", "rank": 31, "score": 17717.74226546552 }, { "content": " log!(\"Unable to create program\");\n\n String::from(\"Unable to create shader object\")\n\n })?;\n\n\n\n // Iterate through shaders attaching them to the program\n\n for shader in shaders {\n\n context.attach_shader(&program, shader.as_ref());\n\n }\n\n\n\n // Link program with webgl\n\n context.link_program(&program);\n\n\n\n // See if link failed\n\n if context\n\n .get_program_parameter(&program, WebGlRenderingContext::LINK_STATUS)\n\n .as_bool()\n\n .unwrap_or(false)\n\n {\n\n Ok(Program {\n\n internal: Some(program),\n", "file_path": "src/webgl/program.rs", "rank": 32, "score": 17717.672321548067 }, { "content": " _canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let context = _canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n let memory = wasm_bindgen::memory()\n\n .dyn_into::<WebAssembly::Memory>()?\n\n .buffer();\n\n\n\n Ok(WebGlContext {\n\n context,\n\n _canvas,\n\n program: Program::empty(),\n\n attributes: HashMap::new(),\n\n uniforms: HashMap::new(),\n\n memory,\n\n })\n\n }\n", "file_path": "src/webgl/mod.rs", "rank": 33, "score": 17717.24723760205 }, { "content": " }\n\n}\n\n\n\nimpl From<(i32, i32, i32)> for Qualifier {\n\n fn from(item: (i32, i32, i32)) -> Self {\n\n Qualifier::Vec3(item.0, item.1, item.2)\n\n }\n\n}\n\n\n\nimpl From<(f32, f32)> for Qualifier {\n\n fn from(item: (f32, f32)) -> Self {\n\n Qualifier::Vec2f(item.0, item.1)\n\n }\n\n}\n\n\n\nimpl From<(f32, f32, f32)> for Qualifier {\n\n fn from(item: (f32, f32, f32)) -> Self {\n\n Qualifier::Vec3f(item.0, item.1, item.2)\n\n }\n\n}\n\n\n\nimpl From<(f32, f32, f32, f32)> for Qualifier {\n\n fn from(item: (f32, f32, f32, f32)) -> Self {\n\n Qualifier::Vec4f(item.0, item.1, item.2, item.3)\n\n }\n\n}\n", "file_path": "src/webgl/qualifier.rs", "rank": 34, "score": 17716.636013606047 }, { "content": " Qualifier::Vec2(inner1, inner2) => vec![*inner1 as f32, *inner2 as f32],\n\n Qualifier::Vec3(inner1, inner2, inner3) => {\n\n vec![*inner1 as f32, *inner2 as f32, *inner3 as f32]\n\n }\n\n Qualifier::Vec2f(inner1, inner2) => vec![*inner1, *inner2],\n\n Qualifier::Vec3f(inner1, inner2, inner3) => vec![*inner1, *inner2, *inner3],\n\n Qualifier::Vec4f(inner1, inner2, inner3, inner4) => {\n\n vec![*inner1, *inner2, *inner3, *inner4]\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl From<[i32; 1]> for Qualifier {\n\n fn from(item: [i32; 1]) -> Self {\n\n Qualifier::Int(item[0])\n\n }\n\n}\n\n\n\nimpl From<i32> for Qualifier {\n", "file_path": "src/webgl/qualifier.rs", "rank": 35, "score": 17715.99473986828 }, { "content": " count,\n\n }\n\n }\n\n\n\n pub fn get_buffer(&self) -> Option<&WebGlBuffer> {\n\n self.buffer.as_ref()\n\n }\n\n\n\n pub fn get_data(&self) -> &BufferInternal {\n\n &self.data\n\n }\n\n\n\n pub fn get_count(&self) -> &i32 {\n\n &self.count\n\n }\n\n}\n", "file_path": "src/webgl/buffer.rs", "rank": 36, "score": 17715.988711831684 }, { "content": "\n\n#[derive(Debug)]\n\npub enum JsArray {\n\n Uint8Array(Uint8Array),\n\n Uint16Array(Uint16Array),\n\n Uint32Array(Uint32Array),\n\n Int8Array(Int8Array),\n\n Int16Array(Int16Array),\n\n Int32Array(Int32Array),\n\n Float32Array(Float32Array),\n\n Float64Array(Float64Array),\n\n}\n\n\n\nimpl JsArray {\n\n fn to_object(&self) -> &Object {\n\n match self {\n\n JsArray::Uint8Array(arr) => arr.as_ref(),\n\n JsArray::Uint16Array(arr) => arr.as_ref(),\n\n JsArray::Uint32Array(arr) => arr.as_ref(),\n\n JsArray::Int8Array(arr) => arr.as_ref(),\n\n JsArray::Int16Array(arr) => arr.as_ref(),\n\n JsArray::Int32Array(arr) => arr.as_ref(),\n\n JsArray::Float32Array(arr) => arr.as_ref(),\n\n JsArray::Float64Array(arr) => arr.as_ref(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/webgl/mod.rs", "rank": 37, "score": 17712.210854507008 }, { "content": " // Replace our internal memory\n\n if memory_buffer != *memory {\n\n std::mem::replace(memory, memory_buffer);\n\n }\n\n\n\n // Return js_sys value\n\n JsArray::$id(\n\n $id::new(memory).subarray(data_location, data_location + data_size as u32),\n\n )\n\n }\n\n }\n\n };\n\n}\n\n\n\nfrom_slice!(&[u8], Uint8Array);\n\nfrom_slice!(&[u16], Uint16Array);\n\nfrom_slice!(&[u32], Uint32Array);\n\nfrom_slice!(&[i8], Int8Array);\n\nfrom_slice!(&[i16], Int16Array);\n\nfrom_slice!(&[i32], Int32Array);\n\nfrom_slice!(&[f32], Float32Array);\n\nfrom_slice!(&[f64], Float64Array);\n", "file_path": "src/webgl/mod.rs", "rank": 38, "score": 17710.652764212162 }, { "content": "# rugl\n\n\n\nA clone of [regl](regl.party) a functional abstraction for wegbl.\n\n\n\nThe goal of this crate is to provide a simple Rust macro for stateless WebGL animations with minimal work!\n\n\n\n## Example\n\n```rust\n\nuse rugl::prelude::*;\n\n\n\nrugl! {\n\n vertex: \"\n\n precision mediump float;\n\n attribute vec2 position;\n\n void main() {\n\n gl_Position = vec4(position, 0, 1);\n\n }\n\n \";\n\n fragment: \"\n\n precision mediump float;\n\n uniform vec3 color;\n\n void main() {\n\n gl_FragColor = color;\n\n }\n\n \";\n\n\n\n attributes: {\n\n position: [\n\n [-1, 0],\n\n [0, -1],\n\n [1, 1]\n\n ]\n\n }\n\n\n\n uniforms: {\n\n color: [1, 0, 0, 1]\n\n },\n\n\n\n count: 3\n\n}\n\n```\n\n\n\n## Quickstart\n\n\n\nCreate a new library via cargo\n\n\n\n1. `cargo new --lib rugl_test && cd rugl_test`\n\n\n\nInstall necessary files and toolchain\n\n\n\n2. ` bash <(curl -s https://raw.githubusercontent.com/Thomspoon/rugl/master/install_rugl.sh)`\n\n\n\nReplace the code in your lib.rs with the following:\n\n\n\n```rust\n\nuse rugl::prelude::*;\n\n\n\nrugl!(\n\n vertex: {\n\n \"\n\n attribute vec4 position;\n\n void main() {\n\n gl_Position = position;\n\n }\n\n \"\n\n },\n\n fragment: {\n\n \"\n\n precision mediump float;\n\n uniform vec4 color;\n\n\n\n void main() {\n\n gl_FragColor = color;\n\n }\n\n \"\n\n },\n\n attributes: {\n\n position: [\n\n [-0.7, -0.7, 0.0],\n\n [ 0.7, -0.7, 0.0],\n\n [ 0.0, 0.7, 0.0]\n\n ],\n\n },\n\n uniforms: {\n\n color: [0.0, 0.9, 0.5, 0.3]\n\n },\n\n\n\n count: { 3 }\n\n);\n\n```\n\n\n\nAdd this to your Cargo.toml:\n\n\n\n```rust\n\n[package]\n\n//... Other things\n\nbuild = \"build.rs\"\n\n\n\n[lib]\n\ncrate-type = [\"cdylib\"]\n\n\n\n[dependencies]\n\nrugl = { git = \"https://github.com/Thomspoon/rugl.git\" }\n\njs-sys = \"0.3.10\"\n", "file_path": "README.md", "rank": 39, "score": 13504.347567246332 }, { "content": "## TODO\n\n- [ ] Run Clippy\n\n- [ ] Implement Animation Capability\n\n- [ ] Cleanup Code and Publish Crate\n\n- [ ] Spruce Up Macro to Support Non-bracken Syntax\n", "file_path": "README.md", "rank": 40, "score": 13486.706710765495 }, { "content": "wasm-bindgen = \"^0.2\"\n\n\n\n[dependencies.web-sys]\n\nversion = \"0.3.10\"\n\nfeatures = [\n\n \"console\",\n\n \"Document\",\n\n \"Element\",\n\n \"Window\",\n\n \"WebGlBuffer\",\n\n \"WebGlProgram\",\n\n \"WebGlShader\",\n\n \"WebGlRenderingContext\",\n\n \"WebGlUniformLocation\",\n\n \"WebGpuShaderStage\",\n\n \"HtmlCanvasElement\"\n\n]\n\n```\n\n\n\n3. Build your crate\n\n\n\n`cargo +nightly build --target wasm32-unknown-unknown`\n\n\n\n4. Install npm modules\n\n\n\n`npm install`\n\n\n\n5. Serve your crate\n\n\n\n`npm run serve`\n\n\n\n6. Go to http://localhost:8080, and you should see the photo below:\n\n\n\n<img alt=\"\" src=\"screenshot.png\" height=\"600\" width = \"600\" />\n\n\n", "file_path": "README.md", "rank": 41, "score": 13484.484368887119 }, { "content": "import('./pkg/rugl_test')\n\n .catch(console.error)\n\n\n\nvar can = document.getElementById(\"canvas\");\n\n\n\nfunction resizeCanvas() {\n\n can.style.width = window.innerWidth + \"px\";\n\n setTimeout(function() {\n\n can.style.height = window.innerHeight + \"px\";\n\n }, 0);\n\n};\n\n\n\n// Webkit/Blink will fire this on load, but Gecko doesn't.\n\nwindow.onresize = resizeCanvas;\n\n\n\n// So we fire it manually...\n", "file_path": "index.js", "rank": 42, "score": 13482.394829119887 }, { "content": "const path = require('path');\n\nconst HtmlWebpackPlugin = require('html-webpack-plugin');\n\nconst webpack = require('webpack');\n\nconst WasmPackPlugin = require(\"@wasm-tool/wasm-pack-plugin\");\n\n\n\nmodule.exports = {\n\n entry: './index.js',\n\n output: {\n\n path: path.resolve(__dirname, 'dist'),\n\n filename: 'index.js',\n\n },\n\n plugins: [\n\n new HtmlWebpackPlugin({\n\n template: 'index.html'\n\n }),\n\n new WasmPackPlugin({\n\n crateDirectory: path.resolve(__dirname, \".\")\n\n }),\n\n // Have this example work in Edge which doesn't ship `TextEncoder` or\n\n // `TextDecoder` at this time.\n\n new webpack.ProvidePlugin({\n\n TextDecoder: ['text-encoding', 'TextDecoder'],\n\n TextEncoder: ['text-encoding', 'TextEncoder']\n\n })\n\n ],\n\n mode: 'development'\n", "file_path": "webpack.config.js", "rank": 43, "score": 12953.096885991949 }, { "content": "mod rugl;\n\n#[macro_use]\n\nmod macros;\n\nmod webgl;\n\n\n\n#[macro_export]\n\nmacro_rules! rugl_main {\n\n ($($tt:tt)*) => {\n\n use wasm_bindgen::*;\n\n\n\n #[wasm_bindgen(start)]\n\n pub fn start() -> Result<(), JsValue> {\n\n let _ = rugl_inner!($($tt)*)\n\n .unwrap()\n\n .step()?;\n\n\n\n Ok(())\n\n }\n\n\n\n }\n", "file_path": "src/lib.rs", "rank": 49, "score": 12.921250527839089 }, { "content": "}\n\n\n\n#[macro_use]\n\npub mod prelude {\n\n pub use super::*;\n\n\n\n pub use crate::rugl::{Rugl, RuglInner};\n\n pub use crate::webgl::*;\n\n pub use wasm_bindgen::prelude::*;\n\n pub use rugl_main as rugl;\n\n}", "file_path": "src/lib.rs", "rank": 50, "score": 12.653618321627143 }, { "content": "use std::fs;\n\nuse std::path::Path;\n\nuse std::process::Command;\n\n\n", "file_path": "build.rs", "rank": 57, "score": 2.9929926732083616 } ]
Rust
yamux/examples/throughput_test.rs
kingwel-xie/tentacle
9efe228ee6de3577a4ac2967f1055e00ebb32a11
use bytesize::ByteSize; use futures::prelude::*; use log::{info, warn}; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, net::{TcpListener, TcpStream}, time::delay_for, }; use tokio_yamux::stream::StreamHandle; use tokio_yamux::{config::Config, session::Session}; fn main() { env_logger::init(); if std::env::args().nth(1) == Some("server".to_string()) { info!("Starting server ......"); run_server(); } else { info!("Starting client ......"); run_client(); } } const STR: &str = "fakeu1234567890cmxcmmmmmmmmmsssmssmsmsmxcmcmcnxzlllslsllcccccsannmxmxmxmxmxmxmxmxmmsssjjkzoso."; const LEN: usize = STR.len(); static REQC: AtomicUsize = AtomicUsize::new(0); static RESPC: AtomicUsize = AtomicUsize::new(0); use std::{ str, sync::atomic::{AtomicUsize, Ordering}, time::Duration, }; fn reqc_incr() -> usize { REQC.fetch_add(1, Ordering::Relaxed) } fn reqc() -> usize { REQC.swap(0, Ordering::SeqCst) } fn respc_incr() -> usize { RESPC.fetch_add(1, Ordering::Relaxed) } fn respc() -> usize { RESPC.swap(0, Ordering::SeqCst) } async fn show_metric() { let secs = 10; loop { delay_for(Duration::from_millis(1000 * secs)).await; let reqc = reqc(); let respc = respc(); info!( "{} secs req {}, resp {}; {} req/s, {}/s; {} resp/s {}/s", secs, reqc, respc, reqc as f64 / secs as f64, ByteSize::b(((reqc * LEN) as f64 / secs as f64) as u64).to_string_as(true), respc as f64 / secs as f64, ByteSize::b(((respc * LEN) as f64 / secs as f64) as u64).to_string_as(true), ); } } fn run_server() { let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.spawn(show_metric()); rt.block_on(async move { let mut listener = TcpListener::bind("127.0.0.1:12345").await.unwrap(); while let Ok((socket, _)) = listener.accept().await { info!("accepted a socket: {:?}", socket.peer_addr()); let mut session = Session::new_server(socket, Config::default()); tokio::spawn(async move { while let Some(Ok(mut stream)) = session.next().await { info!("Server accept a stream from client: id={}", stream.id()); tokio::spawn(async move { let mut data = [0u8; LEN]; stream.read_exact(&mut data).await.unwrap(); assert_eq!(data.as_ref(), STR.as_bytes()); loop { stream.write_all(STR.as_bytes()).await.unwrap(); respc_incr(); stream.read_exact(&mut data).await.unwrap(); reqc_incr(); assert_eq!(data.as_ref(), STR.as_bytes()); } }); } }); } }); } fn run_client() { let num = std::env::args() .nth(1) .and_then(|s| s.parse::<usize>().ok()) .unwrap_or(2); let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.block_on(async move { let socket = TcpStream::connect("127.0.0.1:12345").await.unwrap(); let sa = socket.peer_addr().unwrap(); info!("[client] connected to server: {:?}", sa); let mut session = Session::new_client(socket, Config::default()); let streams = (0..num) .into_iter() .map(|_| session.open_stream().unwrap()) .collect::<Vec<_>>(); tokio::spawn(async move { loop { match session.next().await { Some(res) => warn!("res: {:?}", res), None => break, } } warn!("{:?} broken", sa); }); let f = |mut s: StreamHandle| { tokio::spawn(async move { s.write_all(STR.as_bytes()).await.unwrap(); let mut data = [0u8; LEN]; loop { s.read_exact(&mut data).await.unwrap(); assert_eq!(data.as_ref(), STR.as_bytes()); respc_incr(); s.write_all(STR.as_bytes()).await.unwrap(); reqc_incr(); } }) }; for stream in streams { f(stream); } show_metric().await; }); }
use bytesize::ByteSize; use futures::prelude::*; use log::{info, warn}; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, net::{TcpListener, TcpStream}, time::delay_for, }; use tokio_yamux::stream::StreamHandle; use tokio_yamux::{config::Config, session::Session}; fn main() { env_logger::init(); if std::env::args().nth(1) == Some("server".to_string()) { info!("Starting server ......"); run_server(); } else { info!("Starting
reqc_incr(); assert_eq!(data.as_ref(), STR.as_bytes()); } }); } }); } }); } fn run_client() { let num = std::env::args() .nth(1) .and_then(|s| s.parse::<usize>().ok()) .unwrap_or(2); let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.block_on(async move { let socket = TcpStream::connect("127.0.0.1:12345").await.unwrap(); let sa = socket.peer_addr().unwrap(); info!("[client] connected to server: {:?}", sa); let mut session = Session::new_client(socket, Config::default()); let streams = (0..num) .into_iter() .map(|_| session.open_stream().unwrap()) .collect::<Vec<_>>(); tokio::spawn(async move { loop { match session.next().await { Some(res) => warn!("res: {:?}", res), None => break, } } warn!("{:?} broken", sa); }); let f = |mut s: StreamHandle| { tokio::spawn(async move { s.write_all(STR.as_bytes()).await.unwrap(); let mut data = [0u8; LEN]; loop { s.read_exact(&mut data).await.unwrap(); assert_eq!(data.as_ref(), STR.as_bytes()); respc_incr(); s.write_all(STR.as_bytes()).await.unwrap(); reqc_incr(); } }) }; for stream in streams { f(stream); } show_metric().await; }); }
client ......"); run_client(); } } const STR: &str = "fakeu1234567890cmxcmmmmmmmmmsssmssmsmsmxcmcmcnxzlllslsllcccccsannmxmxmxmxmxmxmxmxmmsssjjkzoso."; const LEN: usize = STR.len(); static REQC: AtomicUsize = AtomicUsize::new(0); static RESPC: AtomicUsize = AtomicUsize::new(0); use std::{ str, sync::atomic::{AtomicUsize, Ordering}, time::Duration, }; fn reqc_incr() -> usize { REQC.fetch_add(1, Ordering::Relaxed) } fn reqc() -> usize { REQC.swap(0, Ordering::SeqCst) } fn respc_incr() -> usize { RESPC.fetch_add(1, Ordering::Relaxed) } fn respc() -> usize { RESPC.swap(0, Ordering::SeqCst) } async fn show_metric() { let secs = 10; loop { delay_for(Duration::from_millis(1000 * secs)).await; let reqc = reqc(); let respc = respc(); info!( "{} secs req {}, resp {}; {} req/s, {}/s; {} resp/s {}/s", secs, reqc, respc, reqc as f64 / secs as f64, ByteSize::b(((reqc * LEN) as f64 / secs as f64) as u64).to_string_as(true), respc as f64 / secs as f64, ByteSize::b(((respc * LEN) as f64 / secs as f64) as u64).to_string_as(true), ); } } fn run_server() { let mut rt = tokio::runtime::Runtime::new().unwrap(); rt.spawn(show_metric()); rt.block_on(async move { let mut listener = TcpListener::bind("127.0.0.1:12345").await.unwrap(); while let Ok((socket, _)) = listener.accept().await { info!("accepted a socket: {:?}", socket.peer_addr()); let mut session = Session::new_server(socket, Config::default()); tokio::spawn(async move { while let Some(Ok(mut stream)) = session.next().await { info!("Server accept a stream from client: id={}", stream.id()); tokio::spawn(async move { let mut data = [0u8; LEN]; stream.read_exact(&mut data).await.unwrap(); assert_eq!(data.as_ref(), STR.as_bytes()); loop { stream.write_all(STR.as_bytes()).await.unwrap(); respc_incr(); stream.read_exact(&mut data).await.unwrap();
random
[ { "content": "fn main() {\n\n init();\n\n\n\n let cycles = std::env::args()\n\n .nth(1)\n\n .and_then(|number| number.parse().ok())\n\n .unwrap_or(100);\n\n\n\n let check_point = std::env::args()\n\n .nth(2)\n\n .and_then(|number| number.parse().ok())\n\n .unwrap_or(10);\n\n\n\n let mut bench = Bench::default().cycles(cycles).estimated_point(check_point);\n\n\n\n let mb = (0..1024 * 1024 * 10)\n\n .map(|_| rand::random::<u8>())\n\n .collect::<Vec<_>>();\n\n let kb = (0..1024 * 10)\n\n .map(|_| rand::random::<u8>())\n", "file_path": "bench/src/main.rs", "rank": 0, "score": 169959.31987503302 }, { "content": "fn server() {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n\n\n rt.block_on(async {\n\n let mut service = create_server();\n\n service\n\n .listen(\"/ip4/127.0.0.1/tcp/1337\".parse().unwrap())\n\n .await\n\n .unwrap();\n\n loop {\n\n if service.next().await.is_none() {\n\n break;\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 1, "score": 162449.25326694868 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n if std::env::args().nth(1) == Some(\"server\".to_string()) {\n\n info!(\"Starting server ......\");\n\n server();\n\n } else {\n\n info!(\"Starting client ......\");\n\n client();\n\n }\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 2, "score": 162206.329305616 }, { "content": "#[allow(clippy::inconsistent_digit_grouping)]\n\nfn main() {\n\n if let Ok(v) = env::var(\"DEP_OPENSSL_VERSION_NUMBER\") {\n\n let version = u64::from_str_radix(&v, 16).unwrap();\n\n\n\n if version >= 0x1_01_00_00_0 {\n\n println!(\"cargo:rustc-cfg=ossl110\");\n\n }\n\n }\n\n}\n", "file_path": "secio/build.rs", "rank": 3, "score": 162206.329305616 }, { "content": "fn main() {\n\n env_logger::init();\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n\n\n if std::env::args().nth(1) == Some(\"server\".to_string()) {\n\n rt.block_on(async move {\n\n let (meta, _) = create_meta(1.into());\n\n let mut service = create(false, meta, ());\n\n let listen_addr = service\n\n .listen(\"/ip4/127.0.0.1/tcp/8900\".parse().unwrap())\n\n .await\n\n .unwrap();\n\n println!(\"listen_addr: {}\", listen_addr);\n\n loop {\n\n if service.next().await.is_none() {\n\n break;\n\n }\n\n }\n\n });\n\n } else {\n", "file_path": "examples/block_send.rs", "rank": 4, "score": 159153.3733037777 }, { "content": "fn server() {\n\n let key = SecioKeyPair::secp256k1_generated();\n\n let config = Config::new(key);\n\n\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n\n\n rt.block_on(async move {\n\n let mut listener = TcpListener::bind(\"127.0.0.1:1337\").await.unwrap();\n\n\n\n while let Ok((socket, _)) = listener.accept().await {\n\n let config = config.clone();\n\n tokio::spawn(async move {\n\n let (mut handle, _, _) = config.handshake(socket).await.unwrap();\n\n let mut data = [0u8; 11];\n\n handle.read_exact(&mut data).await.unwrap();\n\n info!(\"receive: {:?}\", BytesMut::from(&data[..]));\n\n handle.write_all(&data).await.unwrap();\n\n });\n\n }\n\n });\n\n}\n\n\n", "file_path": "secio/examples/secio_simple.rs", "rank": 5, "score": 156529.51362138987 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n if std::env::args().nth(1) == Some(\"server\".to_string()) {\n\n info!(\"Starting server ......\");\n\n server();\n\n } else {\n\n info!(\"Starting client ......\");\n\n client();\n\n }\n\n}\n\n\n", "file_path": "secio/examples/secio_simple.rs", "rank": 6, "score": 156291.06646793277 }, { "content": "fn main() {\n\n env_logger::init();\n\n let callback = IdentifyCallback {\n\n local_listen_addrs: Vec::new(),\n\n };\n\n let protocol = MetaBuilder::default()\n\n .id(1.into())\n\n .service_handle(move || {\n\n ProtocolHandle::Callback(Box::new(\n\n IdentifyProtocol::new(callback).global_ip_only(false),\n\n ))\n\n })\n\n .build();\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n if std::env::args().nth(1) == Some(\"server\".to_string()) {\n\n debug!(\"Starting server ......\");\n\n let mut service = ServiceBuilder::default()\n\n .insert_protocol(protocol)\n\n .key_pair(SecioKeyPair::secp256k1_generated())\n\n .forever(true)\n", "file_path": "protocols/identify/examples/id.rs", "rank": 7, "score": 156291.06646793277 }, { "content": "fn main() {\n\n env_logger::init();\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n if std::env::args().nth(1) == Some(\"server\".to_string()) {\n\n debug!(\"Starting server ......\");\n\n let (sender, mut receiver) = channel(256);\n\n let protocol = create_meta(\n\n 1.into(),\n\n Duration::from_secs(5),\n\n Duration::from_secs(15),\n\n sender,\n\n );\n\n let mut service = ServiceBuilder::default()\n\n .insert_protocol(protocol)\n\n .key_pair(SecioKeyPair::secp256k1_generated())\n\n .forever(true)\n\n .build(SimpleHandler {});\n\n rt.spawn(async move {\n\n loop {\n\n match receiver.next().await {\n", "file_path": "protocols/ping/examples/ping.rs", "rank": 8, "score": 156291.06646793277 }, { "content": "fn main() {\n\n env_logger::init();\n\n if std::env::args().nth(1) == Some(\"server\".to_string()) {\n\n info!(\"Starting server ......\");\n\n run_server();\n\n } else {\n\n info!(\"Starting client ......\");\n\n run_client();\n\n }\n\n}\n\n\n", "file_path": "yamux/examples/yamux_simple.rs", "rank": 9, "score": 156291.06646793277 }, { "content": "fn main() {\n\n env_logger::init();\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n\n\n if std::env::args().nth(1) == Some(\"server\".to_string()) {\n\n rt.block_on(async move {\n\n let meta = create_meta(1.into());\n\n let mut service = create(true, meta, SHandle);\n\n let listen_addr = service\n\n .listen(\"/ip4/127.0.0.1/tcp/8900\".parse().unwrap())\n\n .await\n\n .unwrap();\n\n info!(\"listen_addr: {}\", listen_addr);\n\n loop {\n\n if service.next().await.is_none() {\n\n break;\n\n }\n\n }\n\n });\n\n } else {\n", "file_path": "examples/heavy_task_schedule.rs", "rank": 10, "score": 156291.06646793277 }, { "content": "fn main() {\n\n env_logger::init();\n\n let meta = create_meta(1.into(), 1400);\n\n let mut service = ServiceBuilder::default()\n\n .insert_protocol(meta)\n\n .forever(true)\n\n .build(SHandle {});\n\n\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n let first_arg = std::env::args().nth(1).unwrap();\n\n if first_arg == \"server\" {\n\n debug!(\"Starting server ......\");\n\n rt.block_on(async move {\n\n service\n\n .listen(\"/ip4/127.0.0.1/tcp/1337\".parse().unwrap())\n\n .await\n\n .unwrap();\n\n loop {\n\n if service.next().await.is_none() {\n\n break;\n", "file_path": "protocols/discovery/examples/disc.rs", "rank": 12, "score": 156291.06646793277 }, { "content": "fn run_server() {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n\n\n rt.block_on(async move {\n\n let mut listener = TcpListener::bind(\"127.0.0.1:12345\").await.unwrap();\n\n\n\n while let Ok((socket, _)) = listener.accept().await {\n\n info!(\"accepted a socket: {:?}\", socket.peer_addr());\n\n let mut session = Session::new_server(socket, Config::default());\n\n tokio::spawn(async move {\n\n while let Some(Ok(mut stream)) = session.next().await {\n\n info!(\"Server accept a stream from client: id={}\", stream.id());\n\n tokio::spawn(async move {\n\n let mut data = [0u8; 3];\n\n stream.read_exact(&mut data).await.unwrap();\n\n info!(\"[server] read data: {:?}\", data);\n\n\n\n info!(\"[server] send 'def' to remote\");\n\n stream.write_all(b\"def\").await.unwrap();\n\n\n\n let mut data = [0u8; 2];\n\n stream.read_exact(&mut data).await.unwrap();\n\n info!(\"[server] read again: {:?}\", data);\n\n });\n\n }\n\n });\n\n }\n\n });\n\n}\n\n\n", "file_path": "yamux/examples/yamux_simple.rs", "rank": 14, "score": 117634.39159012778 }, { "content": "pub fn init() {\n\n // init secio two peers\n\n START_SECIO.call_once(|| {\n\n let (meta, _receiver) = create_meta(ProtocolId::new(1));\n\n let (addr_sender, addr_receiver) = channel::oneshot::channel::<Multiaddr>();\n\n let mut service = create(true, meta, ());\n\n let control = service.control().clone();\n\n thread::spawn(move || {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n rt.block_on(async move {\n\n let listen_addr = service\n\n .listen(\"/ip4/127.0.0.1/tcp/0\".parse().unwrap())\n\n .await\n\n .unwrap();\n\n let _res = addr_sender.send(listen_addr);\n\n loop {\n\n if service.next().await.is_none() {\n\n break;\n\n }\n\n }\n", "file_path": "bench/src/main.rs", "rank": 15, "score": 117511.19824271399 }, { "content": "fn create_server() -> Service<SHandle> {\n\n ServiceBuilder::default()\n\n .insert_protocol(create_meta(0.into()))\n\n .insert_protocol(create_meta(1.into()))\n\n .key_pair(SecioKeyPair::secp256k1_generated())\n\n .build(SHandle)\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 16, "score": 110048.8972757359 }, { "content": "/// Get current used memory(bytes)\n\nfn current_used_memory() -> Option<f64> {\n\n let sys = System::new();\n\n match sys.memory() {\n\n Ok(mem) => Some((mem.total.as_u64() - mem.free.as_u64()) as f64),\n\n Err(_) => None,\n\n }\n\n}\n\n\n", "file_path": "tests/test_kill.rs", "rank": 17, "score": 107675.88698365053 }, { "content": "/// Get current used cpu(all cores) average usage ratio\n\nfn current_used_cpu() -> Option<f32> {\n\n let sys = System::new();\n\n match sys.cpu_load_aggregate() {\n\n Ok(cpu) => {\n\n thread::sleep(Duration::from_secs(1));\n\n cpu.done().ok().map(|cpu| cpu.user)\n\n }\n\n Err(_) => None,\n\n }\n\n}\n\n\n", "file_path": "tests/test_kill.rs", "rank": 18, "score": 107675.74162581764 }, { "content": "fn no_secio_and_send_data(data: &[u8]) {\n\n unsafe {\n\n NO_SECIO_CONTROL.as_mut().map(|control| {\n\n control.filter_broadcast(TargetSession::All, 1.into(), Bytes::from(data.to_owned()))\n\n });\n\n\n\n if let Some(rev) = NO_SECIO_RECV.as_ref() {\n\n assert_eq!(\n\n rev.recv(),\n\n Ok(Notify::Message(bytes::Bytes::from(data.to_owned())))\n\n )\n\n }\n\n }\n\n}\n\n\n", "file_path": "bench/src/main.rs", "rank": 19, "score": 107599.22284656069 }, { "content": "fn secio_and_send_data(data: &[u8]) {\n\n unsafe {\n\n SECIO_CONTROL.as_mut().map(|control| {\n\n control.filter_broadcast(\n\n TargetSession::All,\n\n ProtocolId::new(1),\n\n Bytes::from(data.to_owned()),\n\n )\n\n });\n\n if let Some(rev) = SECIO_RECV.as_ref() {\n\n assert_eq!(\n\n rev.recv(),\n\n Ok(Notify::Message(bytes::Bytes::from(data.to_owned())))\n\n )\n\n }\n\n }\n\n}\n\n\n", "file_path": "bench/src/main.rs", "rank": 20, "score": 107599.22284656069 }, { "content": "fn create_meta(id: ProtocolId) -> (ProtocolMeta, crossbeam_channel::Receiver<Notify>) {\n\n let (sender, receiver) = crossbeam_channel::bounded(1);\n\n\n\n let meta = MetaBuilder::new()\n\n .id(id)\n\n .codec(|| {\n\n Box::new(\n\n Builder::new()\n\n .max_frame_length(1024 * 1024 * 20)\n\n .new_codec(),\n\n )\n\n })\n\n .service_handle(move || {\n\n if id == ProtocolId::default() {\n\n ProtocolHandle::Neither\n\n } else {\n\n let handle = Box::new(PHandle {\n\n connected_count: 0,\n\n sender,\n\n });\n\n ProtocolHandle::Callback(handle)\n\n }\n\n })\n\n .build();\n\n\n\n (meta, receiver)\n\n}\n\n\n", "file_path": "bench/src/main.rs", "rank": 21, "score": 89304.56617174087 }, { "content": "pub fn create<F>(secio: bool, meta: ProtocolMeta, shandle: F) -> Service<F>\n\nwhere\n\n F: ServiceHandle + Unpin,\n\n{\n\n let builder = ServiceBuilder::default()\n\n .insert_protocol(meta)\n\n .forever(true);\n\n\n\n if secio {\n\n builder\n\n .key_pair(SecioKeyPair::secp256k1_generated())\n\n .build(shandle)\n\n } else {\n\n builder.build(shandle)\n\n }\n\n}\n\n\n", "file_path": "bench/src/main.rs", "rank": 22, "score": 83747.12474291548 }, { "content": "fn client() {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n\n\n rt.block_on(async {\n\n let mut service = create_client();\n\n service\n\n .dial(\n\n \"/dns4/localhost/tcp/1337\".parse().unwrap(),\n\n TargetProtocol::All,\n\n )\n\n .await\n\n .unwrap();\n\n loop {\n\n if service.next().await.is_none() {\n\n break;\n\n }\n\n }\n\n });\n\n}\n", "file_path": "examples/simple.rs", "rank": 23, "score": 78497.97258142455 }, { "content": "#[test]\n\nfn same_receiver() {\n\n let (mut txa1, _) = mpsc::channel::<i32>(1);\n\n let txa2 = txa1.clone();\n\n\n\n let (mut txb1, _) = mpsc::channel::<i32>(1);\n\n let txb2 = txb1.clone();\n\n\n\n assert!(txa1.same_receiver(&txa2));\n\n assert!(txb1.same_receiver(&txb2));\n\n assert!(!txa1.same_receiver(&txb1));\n\n\n\n txa1.disconnect();\n\n txb1.close_channel();\n\n\n\n assert!(!txa1.same_receiver(&txa2));\n\n assert!(txb1.same_receiver(&txb2));\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 24, "score": 75559.91951864847 }, { "content": "fn create_shandle(\n\n secio: bool,\n\n empty: bool,\n\n) -> (\n\n Box<dyn ServiceHandle + Send>,\n\n crossbeam_channel::Receiver<ServiceErrorType>,\n\n) {\n\n // NOTE: channel size must large, otherwise send will failed.\n\n let (sender, receiver) = crossbeam_channel::unbounded();\n\n\n\n if empty {\n\n (Box::new(EmptySHandle { sender, secio }), receiver)\n\n } else {\n\n (\n\n Box::new(SHandle {\n\n sender,\n\n secio,\n\n session_id: 0.into(),\n\n kind: SessionType::Inbound,\n\n }),\n\n receiver,\n\n )\n\n }\n\n}\n\n\n", "file_path": "tests/test_dial.rs", "rank": 25, "score": 75559.91951864847 }, { "content": "#[test]\n\nfn test_before_with_no_secio() {\n\n test_before_handle(false)\n\n}\n", "file_path": "tests/test_before_function.rs", "rank": 26, "score": 75559.91951864847 }, { "content": "#[test]\n\nfn sequence() {\n\n let (tx, rx) = mpsc::channel(1);\n\n\n\n let amt = 20;\n\n let t = thread::spawn(move || block_on(send_sequence(amt, tx)));\n\n let list: Vec<_> = block_on(rx.collect());\n\n let mut list = list.into_iter();\n\n for i in (1..=amt).rev() {\n\n assert_eq!(list.next().map(|item| item.1), Some(i));\n\n }\n\n assert_eq!(list.next(), None);\n\n\n\n t.join().unwrap();\n\n}\n\n\n\nasync fn send_sequence(n: u32, mut sender: mpsc::Sender<u32>) {\n\n for x in 0..n {\n\n sender.send(n - x).await.unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/channel/tests/channel.rs", "rank": 27, "score": 75559.91951864847 }, { "content": "#[test]\n\nfn test_before_with_secio() {\n\n test_before_handle(true)\n\n}\n\n\n", "file_path": "tests/test_before_function.rs", "rank": 28, "score": 75559.91951864847 }, { "content": "fn client() {\n\n let key = SecioKeyPair::secp256k1_generated();\n\n let config = Config::new(key);\n\n\n\n let data = b\"hello world\";\n\n\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n\n\n rt.block_on(async move {\n\n let stream = TcpStream::connect(\"127.0.0.1:1337\").await.unwrap();\n\n let (mut handle, _, _) = config.handshake(stream).await.unwrap();\n\n match handle.write_all(data).await {\n\n Ok(_) => info!(\"send all\"),\n\n Err(e) => info!(\"err: {:?}\", e),\n\n }\n\n let mut data = [0u8; 11];\n\n handle.read_exact(&mut data).await.unwrap();\n\n info!(\"receive: {:?}\", BytesMut::from(&data[..]));\n\n });\n\n}\n", "file_path": "secio/examples/secio_simple.rs", "rank": 29, "score": 75559.91951864847 }, { "content": "#[test]\n\nfn test_close_with_secio() {\n\n test(true, false)\n\n}\n\n\n", "file_path": "tests/test_close.rs", "rank": 30, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_priority_with_secio() {\n\n test_priority(true)\n\n}\n\n\n", "file_path": "tests/test_priority.rs", "rank": 31, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_disconnect_with_no_secio() {\n\n test_disconnect(false);\n\n}\n", "file_path": "tests/test_disconnect.rs", "rank": 32, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn try_send_2() {\n\n let (mut tx, rx) = mpsc::channel(0);\n\n let mut rx = block_on_stream(rx).map(|item| item.1);\n\n\n\n tx.try_send(\"hello\").unwrap();\n\n\n\n let (readytx, readyrx) = oneshot::channel::<()>();\n\n\n\n let th = thread::spawn(move || {\n\n block_on(poll_fn(|cx| {\n\n assert!(tx.poll_ready(cx).is_pending());\n\n Poll::Ready(())\n\n }));\n\n\n\n drop(readytx);\n\n block_on(tx.send(\"goodbye\")).unwrap();\n\n });\n\n\n\n let _ignore = block_on(readyrx);\n\n assert_eq!(rx.next(), Some(\"hello\"));\n\n assert_eq!(rx.next(), Some(\"goodbye\"));\n\n assert_eq!(rx.next(), None);\n\n\n\n th.join().unwrap();\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 33, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn send_recv() {\n\n let (mut tx, rx) = mpsc::channel::<i32>(16);\n\n\n\n block_on(tx.send(1)).unwrap();\n\n drop(tx);\n\n let v: Vec<_> = block_on(rx.map(|item| item.1).collect());\n\n assert_eq!(v, vec![1]);\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 34, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_disconnect_with_secio() {\n\n test_disconnect(true);\n\n}\n\n\n", "file_path": "tests/test_disconnect.rs", "rank": 35, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn drop_order() {\n\n static DROPS: AtomicUsize = AtomicUsize::new(0);\n\n let (mut tx, rx) = mpsc::channel(1);\n\n\n\n struct A;\n\n\n\n impl Drop for A {\n\n fn drop(&mut self) {\n\n DROPS.fetch_add(1, Ordering::SeqCst);\n\n }\n\n }\n\n\n\n block_on(tx.send(A)).unwrap();\n\n assert_eq!(DROPS.load(Ordering::SeqCst), 0);\n\n drop(rx);\n\n assert_eq!(DROPS.load(Ordering::SeqCst), 1);\n\n assert!(block_on(tx.send(A)).is_err());\n\n assert_eq!(DROPS.load(Ordering::SeqCst), 2);\n\n}\n", "file_path": "src/channel/tests/channel.rs", "rank": 36, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_kill_with_no_secio() {\n\n test_kill(false)\n\n}\n", "file_path": "tests/test_kill.rs", "rank": 37, "score": 74224.33186388739 }, { "content": "fn check_dial_errors(\n\n receiver: crossbeam_channel::Receiver<ServiceErrorType>,\n\n timeout: Duration,\n\n expected: usize,\n\n) -> usize {\n\n let now = Instant::now();\n\n for i in 0..expected {\n\n loop {\n\n if receiver.try_recv().is_ok() {\n\n break;\n\n }\n\n std::thread::sleep(Duration::from_millis(100));\n\n if now.elapsed() > timeout {\n\n return i;\n\n }\n\n }\n\n }\n\n expected\n\n}\n\n\n", "file_path": "tests/test_dial.rs", "rank": 38, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn hash_receiver() {\n\n use std::collections::hash_map::DefaultHasher;\n\n use std::hash::Hasher;\n\n\n\n let mut hasher_a1 = DefaultHasher::new();\n\n let mut hasher_a2 = DefaultHasher::new();\n\n let mut hasher_b1 = DefaultHasher::new();\n\n let mut hasher_b2 = DefaultHasher::new();\n\n let (mut txa1, _) = mpsc::channel::<i32>(1);\n\n let txa2 = txa1.clone();\n\n\n\n let (mut txb1, _) = mpsc::channel::<i32>(1);\n\n let txb2 = txb1.clone();\n\n\n\n txa1.hash_receiver(&mut hasher_a1);\n\n let hash_a1 = hasher_a1.finish();\n\n txa2.hash_receiver(&mut hasher_a2);\n\n let hash_a2 = hasher_a2.finish();\n\n txb1.hash_receiver(&mut hasher_b1);\n\n let hash_b1 = hasher_b1.finish();\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 39, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_kill_with_secio() {\n\n test_kill(true)\n\n}\n\n\n", "file_path": "tests/test_kill.rs", "rank": 40, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn send_backpressure() {\n\n let (waker, counter) = new_count_waker();\n\n let mut cx = Context::from_waker(&waker);\n\n\n\n let (mut tx, mut rx) = mpsc::channel(1);\n\n block_on(tx.send(1)).unwrap();\n\n\n\n let mut task = tx.send(2);\n\n assert_eq!(task.poll_unpin(&mut cx), Poll::Pending);\n\n assert_eq!(counter, 0);\n\n\n\n let item = block_on(rx.next().map(|item| item.map(|i| i.1))).unwrap();\n\n assert_eq!(item, 1);\n\n assert_eq!(counter, 1);\n\n assert_eq!(task.poll_unpin(&mut cx), Poll::Ready(Ok(())));\n\n\n\n let item = block_on(rx.next().map(|item| item.map(|i| i.1))).unwrap();\n\n assert_eq!(item, 2);\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 41, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn try_send_1() {\n\n const N: usize = 3000;\n\n let (mut tx, rx) = mpsc::channel(0);\n\n\n\n let t = thread::spawn(move || {\n\n for i in 0..N {\n\n loop {\n\n if tx.try_send(i).is_ok() {\n\n break;\n\n }\n\n }\n\n }\n\n });\n\n\n\n let result: Vec<_> = block_on(rx.map(|item| item.1).collect());\n\n for (i, j) in result.into_iter().enumerate() {\n\n assert_eq!(i, j);\n\n }\n\n\n\n t.join().unwrap();\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 42, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_close_with_no_secio() {\n\n test(false, false)\n\n}\n\n\n", "file_path": "tests/test_close.rs", "rank": 43, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_shutdown_with_no_secio() {\n\n test(false, true)\n\n}\n", "file_path": "tests/test_close.rs", "rank": 44, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_fail() {\n\n test_peer_id(true)\n\n}\n\n\n", "file_path": "tests/test_peer_id.rs", "rank": 46, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_priority_with_no_secio() {\n\n test_priority(false)\n\n}\n", "file_path": "tests/test_priority.rs", "rank": 47, "score": 74224.33186388739 }, { "content": "fn run_client() {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n\n\n rt.block_on(async move {\n\n let socket = TcpStream::connect(\"127.0.0.1:12345\").await.unwrap();\n\n info!(\"[client] connected to server: {:?}\", socket.peer_addr());\n\n let mut session = Session::new_client(socket, Config::default());\n\n let mut stream = session.open_stream().unwrap();\n\n\n\n tokio::spawn(async move {\n\n loop {\n\n match session.next().await {\n\n Some(_) => (),\n\n None => break,\n\n }\n\n }\n\n });\n\n\n\n info!(\"[client] send 'abc' to remote\");\n\n stream.write_all(b\"abc\").await.unwrap();\n", "file_path": "yamux/examples/yamux_simple.rs", "rank": 48, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_shutdown_with_secio() {\n\n test(true, true)\n\n}\n\n\n", "file_path": "tests/test_close.rs", "rank": 49, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn test_succeed() {\n\n test_peer_id(false)\n\n}\n", "file_path": "tests/test_peer_id.rs", "rank": 50, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn smoke() {\n\n let (mut sender, receiver) = mpsc::channel(1);\n\n\n\n let t = thread::spawn(move || while let Ok(()) = block_on(sender.send(42)) {});\n\n\n\n // `receiver` needs to be dropped for `sender` to stop sending and therefore before the join.\n\n block_on(receiver.take(3).for_each(|_| futures::future::ready(())));\n\n\n\n t.join().unwrap()\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc_close.rs", "rank": 51, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn drop_sender() {\n\n let (tx, mut rx) = mpsc::channel::<u32>(1);\n\n drop(tx);\n\n let f = poll_fn(|cx| rx.poll_next_unpin(cx));\n\n assert_eq!(block_on(f), None)\n\n}\n\n\n", "file_path": "src/channel/tests/channel.rs", "rank": 52, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn drop_rx() {\n\n let (mut tx, rx) = mpsc::channel::<u32>(1);\n\n block_on(tx.send(1)).unwrap();\n\n drop(rx);\n\n assert!(block_on(tx.send(1)).is_err());\n\n}\n\n\n", "file_path": "src/channel/tests/channel.rs", "rank": 53, "score": 74224.33186388739 }, { "content": "#[test]\n\nfn stress_poll_ready() {\n\n const AMT: u32 = 1000;\n\n const NTHREADS: u32 = 8;\n\n\n\n /// Run a stress test using the specified channel capacity.\n\n fn stress(capacity: usize) {\n\n let (tx, rx) = mpsc::channel(capacity);\n\n let mut threads = Vec::new();\n\n for _ in 0..NTHREADS {\n\n let sender = tx.clone();\n\n threads.push(thread::spawn(move || {\n\n block_on(stress_poll_ready_sender(sender, AMT))\n\n }));\n\n }\n\n drop(tx);\n\n\n\n let result: Vec<_> = block_on(rx.collect());\n\n assert_eq!(result.len() as u32, AMT * NTHREADS);\n\n\n\n for thread in threads {\n\n thread.join().unwrap();\n\n }\n\n }\n\n\n\n stress(0);\n\n stress(1);\n\n stress(8);\n\n stress(16);\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 54, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn stress_close_receiver() {\n\n for _ in 0..10000 {\n\n stress_close_receiver_iter();\n\n }\n\n}\n\n\n\nasync fn stress_poll_ready_sender(mut sender: mpsc::Sender<u32>, count: u32) {\n\n for i in (1..=count).rev() {\n\n sender.send(i).await.unwrap();\n\n }\n\n}\n\n\n\n/// Tests that after `poll_ready` indicates capacity a channel can always send without waiting.\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 55, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn send_shared_recv() {\n\n let (mut tx1, rx) = mpsc::channel::<i32>(16);\n\n let mut rx = block_on_stream(rx).map(|item| item.1);\n\n let mut tx2 = tx1.clone();\n\n\n\n block_on(tx1.send(1)).unwrap();\n\n assert_eq!(rx.next(), Some(1));\n\n\n\n block_on(tx2.send(2)).unwrap();\n\n assert_eq!(rx.next(), Some(2));\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 56, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn try_send_recv() {\n\n let (mut tx, mut rx) = mpsc::channel(1);\n\n tx.try_send(\"hello\").unwrap();\n\n tx.try_send(\"hello\").unwrap();\n\n tx.try_send(\"hello\").unwrap_err(); // should be full\n\n rx.try_next().unwrap();\n\n rx.try_next().unwrap();\n\n rx.try_next().unwrap_err(); // should be empty\n\n tx.try_send(\"hello\").unwrap();\n\n rx.try_next().unwrap();\n\n rx.try_next().unwrap_err(); // should be empty\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 57, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn send_recv_threads() {\n\n let (mut tx, rx) = mpsc::channel::<i32>(16);\n\n\n\n let t = thread::spawn(move || {\n\n block_on(tx.send(1)).unwrap();\n\n });\n\n\n\n let v: Vec<_> = block_on(rx.map(|item| item.1).take(1).collect());\n\n assert_eq!(v, vec![1]);\n\n\n\n t.join().unwrap();\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 58, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn stress_drop_sender() {\n\n fn list() -> impl Stream<Item = i32> {\n\n let (tx, rx) = mpsc::channel(1);\n\n thread::spawn(move || {\n\n block_on(send_one_two_three(tx));\n\n });\n\n rx.map(|item| item.1)\n\n }\n\n\n\n for _ in 0..10000 {\n\n let v: Vec<_> = block_on(list().collect());\n\n assert_eq!(v, vec![1, 2, 3]);\n\n }\n\n}\n\n\n\nasync fn send_one_two_three(mut tx: mpsc::Sender<i32>) {\n\n for i in 1..=3 {\n\n tx.send(i).await.unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 59, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn test_dial_no_notify_with_no_secio() {\n\n test_dial_with_no_notify(false)\n\n}\n", "file_path": "tests/test_dial.rs", "rank": 60, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn stress_shared_unbounded() {\n\n const AMT: u32 = 10000;\n\n const NTHREADS: u32 = 8;\n\n let (tx, rx) = mpsc::unbounded::<i32>();\n\n\n\n let t = thread::spawn(move || {\n\n let result: Vec<_> = block_on(rx.map(|item| item.1).collect());\n\n assert_eq!(result.len(), (AMT * NTHREADS) as usize);\n\n for item in result {\n\n assert_eq!(item, 1);\n\n }\n\n });\n\n\n\n for _ in 0..NTHREADS {\n\n let tx = tx.clone();\n\n\n\n thread::spawn(move || {\n\n for _ in 0..AMT {\n\n tx.unbounded_send(1).unwrap();\n\n }\n\n });\n\n }\n\n\n\n drop(tx);\n\n\n\n t.join().ok().unwrap();\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 61, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn test_repeated_dial_with_no_secio() {\n\n test_repeated_dial(false)\n\n}\n\n\n", "file_path": "tests/test_dial.rs", "rank": 62, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn try_send_fail() {\n\n let (mut tx, rx) = mpsc::channel(0);\n\n let mut rx = block_on_stream(rx).map(|item| item.1);\n\n\n\n tx.try_send(\"hello\").unwrap();\n\n\n\n // This should fail\n\n assert!(tx.try_send(\"fail\").is_err());\n\n\n\n assert_eq!(rx.next(), Some(\"hello\"));\n\n\n\n tx.try_send(\"goodbye\").unwrap();\n\n drop(tx);\n\n\n\n assert_eq!(rx.next(), Some(\"goodbye\"));\n\n assert_eq!(rx.next(), None);\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 63, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn send_recv() {\n\n let (mut tx, rx) = mpsc::channel::<i32>(16);\n\n tx.try_send(2).unwrap();\n\n tx.try_quick_send(1).unwrap();\n\n tx.try_send(3).unwrap();\n\n tx.try_send(4).unwrap();\n\n tx.try_quick_send(6).unwrap();\n\n tx.try_send(5).unwrap();\n\n\n\n drop(tx);\n\n let v: Vec<_> = block_on(rx.map(|item| item.1).collect());\n\n assert_eq!(v, vec![1, 6, 2, 3, 4, 5]);\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc_priority.rs", "rank": 64, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn test_repeated_dial_with_secio() {\n\n test_repeated_dial(true)\n\n}\n\n\n", "file_path": "tests/test_dial.rs", "rank": 65, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn test_dial_no_notify_with_secio() {\n\n test_dial_with_no_notify(true)\n\n}\n\n\n", "file_path": "tests/test_dial.rs", "rank": 66, "score": 72967.24595619977 }, { "content": "#[test]\n\nfn send_recv_no_buffer() {\n\n // Run on a task context\n\n block_on(poll_fn(move |cx| {\n\n let (tx, rx) = mpsc::channel::<i32>(0);\n\n pin_mut!(tx, rx);\n\n\n\n assert!(tx.as_mut().poll_flush(cx).is_ready());\n\n assert!(tx.as_mut().poll_ready(cx).is_ready());\n\n\n\n // Send first message\n\n assert!(tx.as_mut().start_send(1).is_ok());\n\n assert!(tx.as_mut().poll_ready(cx).is_pending());\n\n\n\n // poll_ready said Pending, so no room in buffer, therefore new sends\n\n // should get rejected with is_full.\n\n assert!(tx.as_mut().start_send(0).unwrap_err().is_full());\n\n assert!(tx.as_mut().poll_ready(cx).is_pending());\n\n\n\n // Take the value\n\n assert_eq!(\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 67, "score": 72967.24595619977 }, { "content": "/// Finish the agreement. On success, returns the shared key that both remote agreed upon.\n\npub fn agree(\n\n algorithm: KeyAgreement,\n\n my_private_key: agreement::EphemeralPrivateKey,\n\n other_public_key: &[u8],\n\n) -> Result<Vec<u8>, SecioError> {\n\n agreement::agree_ephemeral(\n\n my_private_key,\n\n &agreement::UnparsedPublicKey::new(algorithm.into(), other_public_key),\n\n SecioError::SecretGenerationFailed,\n\n |key_material| Ok(key_material.to_vec()),\n\n )\n\n}\n", "file_path": "secio/src/exchange.rs", "rank": 68, "score": 72812.92827620247 }, { "content": "#[test]\n\nfn recv_close_gets_none() {\n\n let (mut tx, mut rx) = mpsc::channel::<i32>(10);\n\n\n\n // Run on a task context\n\n block_on(poll_fn(move |cx| {\n\n rx.close();\n\n\n\n assert_eq!(rx.poll_next_unpin(cx), Poll::Ready(None));\n\n match tx.poll_ready(cx) {\n\n Poll::Pending | Poll::Ready(Ok(_)) => panic!(),\n\n Poll::Ready(Err(e)) => assert!(e.is_disconnected()),\n\n };\n\n\n\n Poll::Ready(())\n\n }));\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 69, "score": 71781.93825223838 }, { "content": "fn generate_stream_cipher_and_hmac(\n\n t: CipherType,\n\n _digest: Digest,\n\n mode: CryptoMode,\n\n info: &[u8],\n\n key_size: usize,\n\n iv_size: usize,\n\n) -> (BoxStreamCipher, Option<Hmac>) {\n\n let (iv, rest) = info.split_at(iv_size);\n\n let (cipher_key, _mac_key) = rest.split_at(key_size);\n\n let hmac = match t {\n\n CipherType::ChaCha20Poly1305 | CipherType::Aes128Gcm | CipherType::Aes256Gcm => None,\n\n #[cfg(unix)]\n\n _ => Some(Hmac::from_key(_digest, _mac_key)),\n\n };\n\n let cipher = new_stream(t, cipher_key, iv, mode);\n\n (cipher, hmac)\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "secio/src/handshake/procedure.rs", "rank": 70, "score": 71781.93825223838 }, { "content": "#[test]\n\nfn send_recv_threads_no_capacity() {\n\n let (mut tx, rx) = mpsc::channel::<i32>(0);\n\n\n\n let t = thread::spawn(move || {\n\n block_on(tx.send(1)).unwrap();\n\n block_on(tx.send(2)).unwrap();\n\n });\n\n\n\n let v: Vec<_> = block_on(rx.map(|item| item.1).collect());\n\n assert_eq!(v, vec![1, 2]);\n\n\n\n t.join().unwrap();\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 71, "score": 71781.93825223838 }, { "content": "#[test]\n\nfn send_backpressure_multi_senders() {\n\n let (waker, counter) = new_count_waker();\n\n let mut cx = Context::from_waker(&waker);\n\n\n\n let (mut tx1, mut rx) = mpsc::channel(1);\n\n let mut tx2 = tx1.clone();\n\n block_on(tx1.send(1)).unwrap();\n\n\n\n let mut task = tx2.send(2);\n\n assert_eq!(task.poll_unpin(&mut cx), Poll::Pending);\n\n assert_eq!(counter, 0);\n\n\n\n let item = block_on(rx.next().map(|item| item.map(|i| i.1))).unwrap();\n\n assert_eq!(item, 1);\n\n assert_eq!(counter, 1);\n\n assert_eq!(task.poll_unpin(&mut cx), Poll::Ready(Ok(())));\n\n\n\n let item = block_on(rx.next().map(|item| item.map(|i| i.1))).unwrap();\n\n assert_eq!(item, 2);\n\n}\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 72, "score": 71781.93825223838 }, { "content": "#[test]\n\nfn test_block_send_with_secio() {\n\n test_block_send(true, false)\n\n}\n\n\n", "file_path": "tests/test_block_send.rs", "rank": 73, "score": 71781.93825223838 }, { "content": "#[test]\n\nfn send_recv_unbound() {\n\n let (tx, rx) = mpsc::unbounded::<i32>();\n\n tx.unbounded_send(2).unwrap();\n\n tx.quick_unbounded_send(1).unwrap();\n\n tx.unbounded_send(3).unwrap();\n\n tx.unbounded_send(4).unwrap();\n\n tx.quick_unbounded_send(6).unwrap();\n\n tx.unbounded_send(5).unwrap();\n\n\n\n drop(tx);\n\n let v: Vec<_> = block_on(rx.map(|item| item.1).collect());\n\n assert_eq!(v, vec![1, 6, 2, 3, 4, 5]);\n\n}\n", "file_path": "src/channel/tests/mpsc_priority.rs", "rank": 74, "score": 71781.93825223838 }, { "content": "/// Stress test that after receiver dropped,\n\n/// no messages are lost.\n\nfn stress_close_receiver_iter() {\n\n let (tx, rx) = mpsc::unbounded();\n\n let mut rx = block_on_stream(rx);\n\n let (unwritten_tx, unwritten_rx) = std::sync::mpsc::channel();\n\n let th = thread::spawn(move || {\n\n for i in 1.. {\n\n if tx.unbounded_send(i).is_err() {\n\n unwritten_tx.send(i).expect(\"unwritten_tx\");\n\n return;\n\n }\n\n }\n\n });\n\n\n\n // Read one message to make sure thread effectively started\n\n assert_eq!(Some(1), rx.next().map(|item| item.1));\n\n\n\n rx.close();\n\n\n\n for i in 2.. {\n\n match rx.next().map(|item| item.1) {\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 75, "score": 71781.93825223838 }, { "content": "#[test]\n\nfn multiple_senders_disconnect() {\n\n {\n\n let (mut tx1, mut rx) = mpsc::channel(1);\n\n let (tx2, mut tx3, mut tx4) = (tx1.clone(), tx1.clone(), tx1.clone());\n\n\n\n // disconnect, dropping and Sink::poll_close should all close this sender but leave the\n\n // channel open for other senders\n\n tx1.disconnect();\n\n drop(tx2);\n\n block_on(tx3.close()).unwrap();\n\n\n\n assert!(tx1.is_closed());\n\n assert!(tx3.is_closed());\n\n assert!(!tx4.is_closed());\n\n\n\n block_on(tx4.send(5)).unwrap();\n\n assert_eq!(block_on(rx.next().map(|item| item.map(|i| i.1))), Some(5));\n\n\n\n // dropping the final sender will close the channel\n\n drop(tx4);\n", "file_path": "src/channel/tests/mpsc_close.rs", "rank": 76, "score": 71781.93825223838 }, { "content": "#[test]\n\nfn stress_shared_bounded_hard() {\n\n const AMT: u32 = 10000;\n\n const NTHREADS: u32 = 8;\n\n let (tx, rx) = mpsc::channel::<i32>(0);\n\n\n\n let t = thread::spawn(move || {\n\n let result: Vec<_> = block_on(rx.map(|item| item.1).collect());\n\n assert_eq!(result.len(), (AMT * NTHREADS) as usize);\n\n for item in result {\n\n assert_eq!(item, 1);\n\n }\n\n });\n\n\n\n for _ in 0..NTHREADS {\n\n let mut tx = tx.clone();\n\n\n\n thread::spawn(move || {\n\n for _ in 0..AMT {\n\n block_on(tx.send(1)).unwrap();\n\n }\n\n });\n\n }\n\n\n\n drop(tx);\n\n\n\n t.join().unwrap();\n\n}\n\n\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 77, "score": 71781.93825223838 }, { "content": "#[test]\n\nfn test_block_send_with_no_secio() {\n\n test_block_send(false, false)\n\n}\n", "file_path": "tests/test_block_send.rs", "rank": 78, "score": 71781.93825223838 }, { "content": "#[test]\n\nfn tx_close_gets_none() {\n\n let (_, mut rx) = mpsc::channel::<i32>(10);\n\n\n\n // Run on a task context\n\n block_on(poll_fn(move |cx| {\n\n assert_eq!(rx.poll_next_unpin(cx), Poll::Ready(None));\n\n Poll::Ready(())\n\n }));\n\n}\n\n\n\n// #[test]\n\n// fn spawn_sends_items() {\n\n// let core = local_executor::Core::new();\n\n// let stream = unfold(0, |i| Some(ok::<_,u8>((i, i + 1))));\n\n// let rx = mpsc::spawn(stream, &core, 1);\n\n// assert_eq!(core.run(rx.take(4).collect()).unwrap(),\n\n// [0, 1, 2, 3]);\n\n// }\n\n\n\n// #[test]\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 79, "score": 71781.93825223838 }, { "content": "/// Generates a new key pair as part of the exchange.\n\n///\n\n/// Returns the opaque private key and the corresponding public key.\n\npub fn generate_agreement(\n\n algorithm: KeyAgreement,\n\n) -> Result<(agreement::EphemeralPrivateKey, Vec<u8>), SecioError> {\n\n let rng = ring_rand::SystemRandom::new();\n\n\n\n match agreement::EphemeralPrivateKey::generate(algorithm.into(), &rng) {\n\n Ok(tmp_priv_key) => {\n\n let tmp_pub_key = tmp_priv_key\n\n .compute_public_key()\n\n .map_err(|_| SecioError::EphemeralKeyGenerationFailed)?;\n\n Ok((tmp_priv_key, tmp_pub_key.as_ref().to_vec()))\n\n }\n\n Err(_) => {\n\n debug!(\"failed to generate ECDH key\");\n\n Err(SecioError::EphemeralKeyGenerationFailed)\n\n }\n\n }\n\n}\n\n\n", "file_path": "secio/src/exchange.rs", "rank": 80, "score": 71477.34062144138 }, { "content": "#[cfg(feature = \"flatc\")]\n\nfn addr_to_offset<'b>(\n\n fbb: &mut flatbuffers::FlatBufferBuilder<'b>,\n\n addr: &Multiaddr,\n\n) -> flatbuffers::WIPOffset<FbsAddress<'b>> {\n\n let bytes = fbb.create_vector(addr.as_ref());\n\n let mut addr_builder = AddressBuilder::new(fbb);\n\n addr_builder.add_bytes(bytes);\n\n addr_builder.finish()\n\n}\n\n\n", "file_path": "protocols/identify/src/protocol.rs", "rank": 81, "score": 71477.34062144138 }, { "content": "fn start_service<F>(\n\n mut service: Service<F>,\n\n listen_addr: Multiaddr,\n\n) -> ::std::thread::JoinHandle<()>\n\nwhere\n\n F: ServiceHandle + Unpin + Send + 'static,\n\n{\n\n thread::spawn(move || {\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n rt.block_on(async move {\n\n service\n\n .dial(listen_addr, TargetProtocol::All)\n\n .await\n\n .unwrap();\n\n\n\n loop {\n\n if service.next().await.is_none() {\n\n break;\n\n }\n\n }\n\n });\n\n })\n\n}\n\n\n", "file_path": "tests/test_close.rs", "rank": 84, "score": 71477.34062144138 }, { "content": "#[test]\n\nfn test_protocol_open_with_secio_session() {\n\n test_protocol_open(true)\n\n}\n\n\n", "file_path": "tests/test_protocol_open.rs", "rank": 85, "score": 70662.43171011293 }, { "content": "#[test]\n\nfn test_protocol_open_with_no_secio_session() {\n\n test_protocol_open(false)\n\n}\n", "file_path": "tests/test_protocol_open.rs", "rank": 86, "score": 70662.43171011293 }, { "content": "#[test]\n\nfn test_session_handle_with_no_secio() {\n\n test_session_handle_open(false)\n\n}\n", "file_path": "tests/test_session_handle_open.rs", "rank": 87, "score": 70662.43171011293 }, { "content": "#[test]\n\nfn test_block_future_task() {\n\n let mut service = create(create_meta(1.into()), ());\n\n\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n rt.block_on(async move {\n\n loop {\n\n if service.next().await.is_none() {\n\n break;\n\n }\n\n }\n\n });\n\n}\n", "file_path": "tests/test_block_future_task.rs", "rank": 88, "score": 70662.43171011293 }, { "content": "#[test]\n\nfn multiple_senders_close_channel() {\n\n {\n\n let (mut tx1, mut rx) = mpsc::channel(1);\n\n let mut tx2 = tx1.clone();\n\n\n\n // close_channel should shut down the whole channel\n\n tx1.close_channel();\n\n\n\n assert!(tx1.is_closed());\n\n assert!(tx2.is_closed());\n\n\n\n let err = block_on(tx2.send(5)).unwrap_err();\n\n assert!(err.is_disconnected());\n\n\n\n assert_eq!(block_on(rx.next()), None);\n\n }\n\n\n\n {\n\n let (tx1, mut rx) = mpsc::unbounded();\n\n let mut tx2 = tx1.clone();\n", "file_path": "src/channel/tests/mpsc_close.rs", "rank": 89, "score": 70662.43171011293 }, { "content": "#[test]\n\nfn test_session_handle_with_secio() {\n\n test_session_handle_open(true)\n\n}\n\n\n", "file_path": "tests/test_session_handle_open.rs", "rank": 90, "score": 70662.43171011293 }, { "content": "#[doc(hidden)]\n\n#[cfg(not(unix))]\n\npub fn new_stream(\n\n t: cipher::CipherType,\n\n key: &[u8],\n\n _iv: &[u8],\n\n mode: CryptoMode,\n\n) -> BoxStreamCipher {\n\n Box::new(ring_impl::RingAeadCipher::new(t, key, mode))\n\n}\n\n\n", "file_path": "secio/src/crypto/mod.rs", "rank": 91, "score": 70220.25471375376 }, { "content": "pub fn create_meta(\n\n id: ProtocolId,\n\n interval: Duration,\n\n timeout: Duration,\n\n event_sender: Sender<Event>,\n\n) -> ProtocolMeta {\n\n MetaBuilder::new()\n\n .id(id)\n\n .service_handle(move || {\n\n let handle = Box::new(PingHandler::new(interval, timeout, event_sender));\n\n ProtocolHandle::Callback(handle)\n\n })\n\n .build()\n\n}\n\n\n", "file_path": "protocols/ping/examples/ping.rs", "rank": 92, "score": 70220.25471375376 }, { "content": "#[test]\n\nfn stress_receiver_multi_task_bounded_hard() {\n\n const AMT: usize = 10_000;\n\n const NTHREADS: u32 = 2;\n\n\n\n let (mut tx, rx) = mpsc::channel::<usize>(0);\n\n let rx = Arc::new(Mutex::new(Some(rx)));\n\n let n = Arc::new(AtomicUsize::new(0));\n\n\n\n let mut th = vec![];\n\n\n\n for _ in 0..NTHREADS {\n\n let rx = rx.clone();\n\n let n = n.clone();\n\n\n\n let t = thread::spawn(move || {\n\n let mut i = 0;\n\n\n\n loop {\n\n i += 1;\n\n let mut rx_opt = rx.lock().unwrap();\n", "file_path": "src/channel/tests/mpsc.rs", "rank": 95, "score": 69603.3949842489 }, { "content": "#[test]\n\nfn test_block_send_with_secio_session() {\n\n test_block_send(true, true)\n\n}\n\n\n", "file_path": "tests/test_block_send_session.rs", "rank": 96, "score": 69603.3949842489 }, { "content": "#[test]\n\nfn test_block_send_with_no_secio_session() {\n\n test_block_send(false, true)\n\n}\n", "file_path": "tests/test_block_send_session.rs", "rank": 97, "score": 69603.3949842489 }, { "content": "use std::{io, pin::Pin, task::Context};\n\nuse tokio_util::codec::{Decoder, Encoder};\n\n\n\nuse crate::{\n\n context::{ProtocolContext, ProtocolContextMutRef, ServiceContext},\n\n service::{ProtocolEvent, ServiceError, ServiceEvent},\n\n};\n\n\n\n/// Service handle\n\n///\n\n/// #### Note\n\n///\n\n/// All functions on this trait will block the entire server running, do not insert long-time tasks,\n\n/// you can use the futures task instead.\n\n///\n\n/// #### Behavior\n\n///\n\n/// The handle that exists when the Service is created.\n\n///\n\n/// Mainly handle some Service-level errors thrown at runtime, such as listening errors.\n\n///\n\n/// At the same time, the session establishment and disconnection messages will also be perceived here.\n", "file_path": "src/traits.rs", "rank": 99, "score": 15.429068944033045 } ]
Rust
app/src/core.rs
IgnusG/garlic
dd8100f035c59952ae2db43cb26c9111907f3711
use mio::tcp::{TcpStream, TcpListener}; use mio::{Poll, Token, Ready, PollOpt, Events}; use std::net; use std::net::SocketAddr; use std::sync::mpsc; use std::thread; use std::thread::{JoinHandle}; use std::io::Write; use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; use errors::*; use brunch::{send_message, create_connection, create_udp_connection, send_udp_message, receive_udp_message, receive_message}; use messages::Message; use messages::Message::*; use messages::onion::*; use messages::onion::Onion::*; use messages::auth::*; use messages::auth::Auth::*; use messages::rps::*; use messages::rps::Rps::*; use messages::p2p; use messages::p2p::P2PMessage; use config; static NEXT_TUNNEL_ID: AtomicUsize = ATOMIC_USIZE_INIT; static NEXT_REQUEST_ID: AtomicUsize = ATOMIC_USIZE_INIT; struct Communication { receiver: mpsc::Receiver<Message>, sender: mpsc::Sender<StreamType>, } impl Communication { fn send(&self, message: Message) { self.sender.send(StreamType::API(message)); } fn receive(&self) -> Result<Message> { Ok(self.receiver.recv().chain_err(|| "sender diconnected")?) } } struct AuthSession { session_id: u16, rps_peer: RpsPeer } pub enum StreamType { API(Message), P2P(Message) } fn request_peer(comm: &Communication) -> Result<RpsPeer> { comm.send(Rps(Query(RpsQuery {}))); if let Rps(Peer(rps_peer)) = comm.receive()? { Ok(rps_peer) } else { bail!("protocol breach - expected RpsPeer") } } fn encrypt_for_all_peers(peers: &Vec<AuthSession>, data: Vec<u8>, comm: &Communication) -> Result<Vec<u8>> { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(CipherEncrypt(AuthCipherCrypt { session_id: peers.first().unwrap().session_id, request_id: request_id, cleartext: true, payload: data }))); let data = if let Auth(CipherEncryptResp(message)) = comm.receive()? { message.payload } else { bail!("protocol breach - expected CipherEncryptResp") }; if peers.len() < 2 { return Ok(data) }; for peer in &peers[1..] { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(CipherEncrypt(AuthCipherCrypt { session_id: peers[0].session_id, request_id: request_id, cleartext: false, payload: data.clone() }))); let data = if let Auth(CipherEncryptResp(message)) = comm.receive()? { message.payload } else { bail!("protocol breach - expected CipherEncryptResp") }; }; Ok(data) } struct Connection { udp: Option<net::UdpSocket>, tcp: Option<net::TcpStream> } impl Connection { fn send(&mut self, message: Message) -> Result<()> { if let Some(ref mut conn) = self.tcp { send_message(conn, message); } else if let Some(ref conn) = self.udp { send_udp_message(conn, message); } else { bail!("at least one connection needs to be specified"); } Ok(()) } fn receive(&mut self) -> Result<Message> { if let Some(ref mut conn) = self.tcp { Ok(receive_message(conn)?) } else if let Some(ref conn) = self.udp { Ok(receive_udp_message(conn)?) } else { bail!("at least one connection needs to be specified"); } } } fn connect_to_peer(peer: RpsPeer, peers: &Vec<AuthSession>, conf: &config::Config, comm: &Communication) -> Result<AuthSession> { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(SessionStart(AuthSessionStart { request_id: request_id, hostkey: peer.hostkey.clone() }))); let conn = if peers.len() == 0 { let socket = SocketAddr::new(peer.ip_addr, peer.port); Connection { tcp: Some(create_connection(socket)?), udp: None } } else { let peer = &peers.first().unwrap().rps_peer; let socket = SocketAddr::new(peer.ip_addr, peer.port); Connection { udp: Some(create_udp_connection(socket)?), tcp: None } }; if let Auth(SessionHS1(message)) = comm.receive()? { } else { bail!("protocol breach - expected AuthSessionHS1") }; let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; Ok(AuthSession { session_id: 0, rps_peer: peer }) } fn send_over_data(data: OnionTunnelPayload) -> Result<()> { unimplemented!(); } fn start_dialogue(message: &OnionTunnelBuild, conf: &config::Config, comm: &Communication) { trace_labeled_error!( "dialogue encountered a problem", { let mut peers = vec![]; for _ in 0..conf.min_hop_count { let peer = request_peer(comm)?; let auth_session = connect_to_peer(peer, &peers, conf, comm)?; peers.push(auth_session); } let tunnel_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Onion(TunnelReady(OnionTunnelPayload { tunnel_id: tunnel_id, payload: message.hostkey.clone() }))); loop { match comm.receive()? { Onion(TunnelData(message)) => { send_over_data(message); }, Onion(TunnelDestroy(message)) => { break; }, _ => bail!("protocol breach - expected OnionTunnelData or OnionTunnelDestroy") } } }); } fn answer_dialogue(message: &P2PMessage, conf: &config::Config, comm: &Communication) { unimplemented!(); } fn spinup_state_machine(message: Message, conf: config::Config, ty: mpsc::Sender<StreamType>) -> (mpsc::Sender<Message>, JoinHandle<()>) { let (tx, rx) = mpsc::channel(); let handle = thread::spawn(move || { let message = &message; let comm = &Communication { receiver: rx, sender: ty, }; trace_labeled_error!("failed to create state machine", { match *message { Onion(TunnelBuild(ref message)) => start_dialogue(message, &conf, &comm), P2P(ref message) => { match message.message_type { p2p::P2P::Knock => answer_dialogue(message, &conf, &comm), _ => note!("message {} not part of protocol - discarding") } } _ => note!("message {} not part of protocol - discarding") }; }); }); (tx, handle) } pub fn start(rx: &mpsc::Receiver<StreamType>, ty: mpsc::Sender<StreamType>, conf: config::Config) -> Result<()> { loop { status!("Waiting for stream"); }; Ok(()) }
use mio::tcp::{TcpStream, TcpListener}; use mio::{Poll, Token, Ready, PollOpt, Events}; use std::net; use std::net::SocketAddr; use std::sync::mpsc; use std::thread; use std::thread::{JoinHandle}; use std::io::Write; use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; use errors::*; use brunch::{send_message, create_connection, create_udp_connection, send_udp_message, receive_udp_message, receive_message}; use messages::Message; use messages::Message::*; use messages::onion::*; use messages::onion::Onion::*; use messages::auth::*; use messages::auth::Auth::*; use messages::rps::*; use messages::rps::Rps::*; use messages::p2p; use messages::p2p::P2PMessage; use config; static NEXT_TUNNEL_ID: AtomicUsize = ATOMIC_USIZE_INIT; static NEXT_REQUEST_ID: AtomicUsize = ATOMIC_USIZE_INIT; struct Communication { receiver: mpsc::Receiver<Message>, sender: mpsc::Sender<StreamType>, } impl Communication { fn send(&self, message: Message) { self.sender.send(StreamType::API(message)); } fn receive(&self) -> Result<Message> { Ok(self.receiver.recv().chain_err(|| "sender diconnected")?) } } struct AuthSession { session_id: u16, rps_peer: RpsPeer } pub enum StreamType { API(Message), P2P(Message) } fn request_peer(comm: &Communication) -> Result<RpsPeer> { comm.send(Rps(Query(RpsQuery {}))); if let Rps(Peer(rps_peer)) = comm.receive()? { Ok(rps_peer) } else { bail!("protocol breach - expected RpsPeer") } } fn encrypt_for_all_peers(peers: &Vec<AuthSession>, data: Vec<u8>, comm: &Communication) -> Result<Vec<u8>> { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(CipherEncrypt(AuthCipherCrypt { session_id: peers.first().unwrap().session_id, request_id: request_id, cleartext: true, payload: data }))); let data = if let Auth(CipherEncryptResp(message)) = comm.receive()? { message.payload } else { bail!("protocol breach - expected CipherEncryptResp") }; if p
_over_data(data: OnionTunnelPayload) -> Result<()> { unimplemented!(); } fn start_dialogue(message: &OnionTunnelBuild, conf: &config::Config, comm: &Communication) { trace_labeled_error!( "dialogue encountered a problem", { let mut peers = vec![]; for _ in 0..conf.min_hop_count { let peer = request_peer(comm)?; let auth_session = connect_to_peer(peer, &peers, conf, comm)?; peers.push(auth_session); } let tunnel_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Onion(TunnelReady(OnionTunnelPayload { tunnel_id: tunnel_id, payload: message.hostkey.clone() }))); loop { match comm.receive()? { Onion(TunnelData(message)) => { send_over_data(message); }, Onion(TunnelDestroy(message)) => { break; }, _ => bail!("protocol breach - expected OnionTunnelData or OnionTunnelDestroy") } } }); } fn answer_dialogue(message: &P2PMessage, conf: &config::Config, comm: &Communication) { unimplemented!(); } fn spinup_state_machine(message: Message, conf: config::Config, ty: mpsc::Sender<StreamType>) -> (mpsc::Sender<Message>, JoinHandle<()>) { let (tx, rx) = mpsc::channel(); let handle = thread::spawn(move || { let message = &message; let comm = &Communication { receiver: rx, sender: ty, }; trace_labeled_error!("failed to create state machine", { match *message { Onion(TunnelBuild(ref message)) => start_dialogue(message, &conf, &comm), P2P(ref message) => { match message.message_type { p2p::P2P::Knock => answer_dialogue(message, &conf, &comm), _ => note!("message {} not part of protocol - discarding") } } _ => note!("message {} not part of protocol - discarding") }; }); }); (tx, handle) } pub fn start(rx: &mpsc::Receiver<StreamType>, ty: mpsc::Sender<StreamType>, conf: config::Config) -> Result<()> { loop { status!("Waiting for stream"); }; Ok(()) }
eers.len() < 2 { return Ok(data) }; for peer in &peers[1..] { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(CipherEncrypt(AuthCipherCrypt { session_id: peers[0].session_id, request_id: request_id, cleartext: false, payload: data.clone() }))); let data = if let Auth(CipherEncryptResp(message)) = comm.receive()? { message.payload } else { bail!("protocol breach - expected CipherEncryptResp") }; }; Ok(data) } struct Connection { udp: Option<net::UdpSocket>, tcp: Option<net::TcpStream> } impl Connection { fn send(&mut self, message: Message) -> Result<()> { if let Some(ref mut conn) = self.tcp { send_message(conn, message); } else if let Some(ref conn) = self.udp { send_udp_message(conn, message); } else { bail!("at least one connection needs to be specified"); } Ok(()) } fn receive(&mut self) -> Result<Message> { if let Some(ref mut conn) = self.tcp { Ok(receive_message(conn)?) } else if let Some(ref conn) = self.udp { Ok(receive_udp_message(conn)?) } else { bail!("at least one connection needs to be specified"); } } } fn connect_to_peer(peer: RpsPeer, peers: &Vec<AuthSession>, conf: &config::Config, comm: &Communication) -> Result<AuthSession> { let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; comm.send(Auth(SessionStart(AuthSessionStart { request_id: request_id, hostkey: peer.hostkey.clone() }))); let conn = if peers.len() == 0 { let socket = SocketAddr::new(peer.ip_addr, peer.port); Connection { tcp: Some(create_connection(socket)?), udp: None } } else { let peer = &peers.first().unwrap().rps_peer; let socket = SocketAddr::new(peer.ip_addr, peer.port); Connection { udp: Some(create_udp_connection(socket)?), tcp: None } }; if let Auth(SessionHS1(message)) = comm.receive()? { } else { bail!("protocol breach - expected AuthSessionHS1") }; let request_id = NEXT_TUNNEL_ID.fetch_add(1, Ordering::SeqCst) as u32; Ok(AuthSession { session_id: 0, rps_peer: peer }) } fn send
random
[ { "content": "pub fn start (conf: config::Config) -> Result<()> {\n\n status!(\"Brunch is served!\");\n\n\n\n let (tx, rx) = mpsc::channel();\n\n let (ty, ry) = mpsc::channel();\n\n\n\n let api_thread_handle = {\n\n let conf = conf.clone();\n\n let tx = tx.clone();\n\n\n\n create_api_channel(conf.api_socket, tx, ry)\n\n };\n\n\n\n let p2p_thread_handle = {\n\n let conf = conf.clone();\n\n\n\n create_p2p_listener(conf.p2p_socket, tx)\n\n };\n\n\n\n let core_result = core::start(&rx, ty, conf).chain_err(|| \"core routine exited too early\");\n\n\n\n api_thread_handle.stop();\n\n p2p_thread_handle.stop();\n\n\n\n core_result\n\n}\n", "file_path": "app/src/brunch.rs", "rank": 4, "score": 104992.1864601677 }, { "content": "#[allow(or_fun_call)]\n\npub fn read_config_file(config_file_path: String) -> Result<Config> {\n\n let config_file = Ini::load_from_file(config_file_path)\n\n .chain_err(|| \"Config file not found\")?;\n\n\n\n let onion_section = config_file.section(Some(\"onion\".to_owned()))\n\n .ok_or(Error::from(\"[onion] section not found in config file\"))?;\n\n\n\n let config = Config {\n\n hostkey_path: read_property(onion_section, \"hostkey\")?,\n\n api_socket: SocketAddr::from_str(&read_property(onion_section, \"api_addr\")?)\n\n .chain_err(|| \"[api_addr] property failed to parse\")?,\n\n p2p_socket: SocketAddr::from_str(&format!(\"0.0.0.0:{}\",\n\n read_property(onion_section, \"p2p_port\")?))\n\n .chain_err(|| \"[p2p_port] property failed to parse\")?,\n\n min_hop_count: read_property(onion_section, \"min_hop_count\")?.parse()\n\n .chain_err(|| \"[min_hop_count] property failed to parse\")?\n\n };\n\n\n\n Ok(config)\n\n}\n", "file_path": "app/src/config.rs", "rank": 5, "score": 99165.0024465321 }, { "content": "pub fn receive_message(stream: &mut net::TcpStream) -> Result<Message> {\n\n let mut buffer = Vec::new();\n\n stream.read_to_end(&mut buffer).chain_err(|| \"reading stream failed\")?;;\n\n Ok(decode_message(&buffer)?)\n\n}\n\n\n", "file_path": "app/src/brunch.rs", "rank": 7, "score": 90937.2375512612 }, { "content": "pub fn receive_udp_message(udp_socket: &net::UdpSocket) -> Result<Message> {\n\n let mut buffer = Vec::new();\n\n udp_socket.recv(&mut buffer).chain_err(|| \"reading socket failed\")?;\n\n Ok(decode_message(&buffer)?)\n\n}\n\n\n\n/**\n\n Brunch: Because nothing beats breakfast & lunch like good ol' garlic bread\n\n Connects tcp channels to the core module via the core channel\n\n**/\n", "file_path": "app/src/brunch.rs", "rank": 8, "score": 90064.4667233965 }, { "content": "#[allow(or_fun_call)]\n\npub fn decode_message(bytes: &[u8]) -> Result<Message> {\n\n use self::Message::*;\n\n use messages::auth::Auth::*;\n\n use messages::onion::Onion::*;\n\n use messages::rps::Rps::*;\n\n\n\n // Quick and dirty hack for current message system\n\n let mut deserializer = Deserializer::new(bytes);\n\n let p2p_message = Deserialize::deserialize(&mut deserializer);\n\n\n\n if let Ok(p2p_message) = p2p_message {\n\n return Ok(Message::P2P(p2p_message));\n\n }\n\n\n\n let (length, message_type) = unpack_structure!(\"2H\", &bytes[0..4]);\n\n let length = length as usize;\n\n\n\n if bytes.len() < length {\n\n bail!(\"message length is supposed to be {}, but was {}\", length, bytes.len());\n\n }\n", "file_path": "app/src/messages/mod.rs", "rank": 10, "score": 81615.75586238876 }, { "content": "pub fn encode_message(message: Message) -> Result<Vec<u8>> {\n\n use self::Message::*;\n\n use self::MessageId::*;\n\n use messages::auth::Auth::*;\n\n use messages::onion::Onion::*;\n\n use messages::rps::Rps::*;\n\n\n\n // TODO: ↑ + compiler cannot check a correct call to encode which can result in a bail out\n\n let (message_id, message) = match message {\n\n Onion(TunnelReady(message)) => (OnionTunnelReady, message.encode()?),\n\n Onion(TunnelIncomming(message)) => (OnionTunnelIncomming, message.encode()?),\n\n Onion(TunnelDestroy(message)) => (OnionTunnelDestroy, message.encode()?),\n\n Onion(TunnelData(message)) => (OnionTunnelData, message.encode()?),\n\n Onion(Error(message)) => (OnionError, message.encode()?),\n\n\n\n Auth(SessionStart(message)) => (AuthSessionStart, message.encode()?),\n\n Auth(SessionHS1(message)) => (AuthSessionHS1, message.encode()?),\n\n Auth(SessionIncommingHS1(message)) => (AuthSessionIncommingHS1, message.encode()?),\n\n Auth(SessionHS2(message)) => (AuthSessionHS2, message.encode()?),\n\n Auth(SessionIncommingHS2(message)) => (AuthSessionIncommingHS2, message.encode()?),\n", "file_path": "app/src/messages/mod.rs", "rank": 11, "score": 81045.30430620238 }, { "content": "pub fn send_message(stream: &mut net::TcpStream, message: Message) -> Result<()> {\n\n stream.write_all(&encode_message(message)?)\n\n .chain_err(|| \"writing stream failed\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "app/src/brunch.rs", "rank": 14, "score": 74428.74800912259 }, { "content": "pub fn send_udp_message(udp_socket: &net::UdpSocket, message: Message) -> Result<()> {\n\n udp_socket.send(&encode_message(message)?).chain_err(|| \"failed to send data on connection\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "app/src/brunch.rs", "rank": 15, "score": 74076.8922555588 }, { "content": "#[allow(or_fun_call)]\n\nfn read_property(section: &Properties, property: &'static str) -> Result<String> {\n\n Ok(section.get(property)\n\n .ok_or(Error::from(format!(\"[{}] property not found in config file\", property).to_string()))?\n\n .to_string())\n\n}\n\n\n\n/** Parses the config file and creates an object to be used across the app **/\n", "file_path": "app/src/config.rs", "rank": 16, "score": 73691.1095497525 }, { "content": "fn create_api_channel(socket: SocketAddr, tx: mpsc::Sender<StreamType>, ry: mpsc::Receiver<StreamType>)\n\n -> StoppableHandle<()> {\n\n stoppable_thread::spawn(move |should_die| {\n\n trace_labeled_panic!(\"failed to create API tcp channel\", {\n\n let listener = &TcpListener::bind(&socket).chain_err(|| \"couldn't create tcp listener\")?;\n\n let mut stream = &TcpStream::connect(&socket).chain_err(|| \"couldn't create tcp listener\")?;\n\n\n\n let async_incomming = create_async_channel(listener, Some(stream))?;\n\n note!(format!(\"successfully connected to API socket at {}\", socket));\n\n\n\n while !should_die.get() {\n\n trace_labeled_error!( \"API listener encountered a problem\", {\n\n for stream in async_incomming(listener)? {\n\n let mut buffer = Vec::new();\n\n stream?.read_to_end(&mut buffer).chain_err(|| \"reading stream failed\")?;\n\n let message = decode_message(&buffer)?;\n\n\n\n tx.send(StreamType::API(message))\n\n .chain_err(|| \"sending stream to core channel failed\")?;\n\n };\n", "file_path": "app/src/brunch.rs", "rank": 17, "score": 61469.466953647876 }, { "content": "pub fn create_connection(socket: SocketAddr) -> Result<net::TcpStream> {\n\n Ok(net::TcpStream::connect(&socket).chain_err(|| \"couldn't create tcp listener\")?)\n\n}\n\n\n", "file_path": "app/src/brunch.rs", "rank": 18, "score": 53029.128200094 }, { "content": "pub fn create_udp_connection(socket: SocketAddr) -> Result<net::UdpSocket> {\n\n Ok(net::UdpSocket::bind(&socket).chain_err(|| \"failed to create udp connection\")?)\n\n}\n\n\n", "file_path": "app/src/brunch.rs", "rank": 19, "score": 52093.20136466186 }, { "content": "fn create_p2p_listener(socket: SocketAddr, tx: mpsc::Sender<StreamType>) -> StoppableHandle<()> {\n\n stoppable_thread::spawn(move |should_die| {\n\n trace_labeled_panic!(\"failed to create P2P tcp listener\", {\n\n let listener = &TcpListener::bind(&socket).chain_err(|| \"couldn't create tcp listener\")?;\n\n let async_incomming = create_async_channel(listener, None)?;\n\n\n\n while !should_die.get() {\n\n trace_labeled_error!( \"P2P listener encountered a problem\", {\n\n for stream in async_incomming(listener)? {\n\n let mut buffer = Vec::new();\n\n stream?.read_to_end(&mut buffer).chain_err(|| \"reading stream failed\")?;\n\n let message = decode_message(&buffer)?;\n\n\n\n tx.send(StreamType::API(message))\n\n .chain_err(|| \"sending stream to core channel failed\")?;\n\n };\n\n });\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "app/src/brunch.rs", "rank": 20, "score": 51533.060344887104 }, { "content": "pub fn create_async_channel<'a>(listener: &'a TcpListener, stream: Option<&'a TcpStream>) ->\n\n Result<impl Fn(&'a TcpListener) -> Result<Box<impl Iterator<Item=Result<TcpStream>> + 'a>>>\n\n{\n\n let poll = Poll::new().chain_err(|| \"couln't create poll\")?;\n\n\n\n poll.register(listener, LISTENER, Ready::readable(), PollOpt::edge())\n\n .chain_err(|| \"couldn't register listener on poll\")?;\n\n if let Some(stream) = stream {\n\n poll.register(stream, STREAM, Ready::writable(), PollOpt::edge())\n\n .chain_err(|| \"couldn't register stream on poll\")?;\n\n }\n\n\n\n Ok(move |listener: &'a TcpListener| {\n\n\n\n let mut events = Events::with_capacity(1024);\n\n\n\n poll.poll(&mut events, Some(Duration::from_millis(100)))\n\n .chain_err(|| \"polling failed\")?;\n\n\n\n Ok(Box::new(\n\n vec![0; events.into_iter().filter(|e: &Event| e.token() == LISTENER).count()]\n\n .into_iter().map(move |_: u16|\n\n Ok(listener.accept().chain_err(|| \"connection failed\")?.0))\n\n ))\n\n })\n\n}\n\n\n\n// BUG: Due to rust's borrowing system and mio's Polling it is impossible to extract writing the\n\n// stream into a separate thread - reading is therefore done before and only after that is writing done\n\n/** Creates a tcp listener & tcp stream **/\n", "file_path": "app/src/brunch.rs", "rank": 21, "score": 49365.504177665454 }, { "content": "fn main() {\n\n panic::set_hook(Box::new(|e| {\n\n use colored::*;\n\n use case::CaseExt;\n\n\n\n if let Some(e) = e.payload().downcast_ref::<Error>() {\n\n let mut s = format!(\"{}: {}\", \"Critical Problem\".red().bold(),\n\n &format!(\"{}\", e).to_capitalized());\n\n for e in e.iter().skip(1) {\n\n s.push_str(&format!(\"\\n → {}: {}\", \"Caused by\".bold().dimmed(),\n\n &format!(\"{}\", e).to_capitalized()));\n\n };\n\n error!(\"{}\", s);\n\n } else if let Some(e) = e.payload().downcast_ref::<&str>() {\n\n println!(\"{}\", e.red().bold());\n\n } else {\n\n error!(\"{}\", \"App panicked but the error was malformed (most likely a bug)\".red().bold());\n\n ::std::process::exit(1);\n\n }\n\n ::std::process::exit(2);\n", "file_path": "app/src/main.rs", "rank": 25, "score": 31950.395268377135 }, { "content": "fn bootstrap() -> Result<()> {\n\n let arguments = parse_cmd_arguments()?;\n\n\n\n let config_file_path = arguments.opt_str(\"c\").unwrap();\n\n\n\n let conf = config::read_config_file(config_file_path)\n\n .chain_err(|| \"couldn't create configuration struct\")?;\n\n\n\n brunch::start(conf)\n\n}\n\n\n\n/** Setup logger and boostrap the app **/\n", "file_path": "app/src/main.rs", "rank": 26, "score": 30391.280524181595 }, { "content": "fn parse_cmd_arguments() -> Result<getopts::Matches> {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n let mut opts = Options::new();\n\n opts.reqopt(\"c\", \"config\", \"set path for the config file\", \"\");\n\n opts.parse(&args[1..]).chain_err(|| \"couldn't parse arguments\")\n\n}\n\n\n\n/** Eval cmd arguments and initialize methods, bootstrap brunch **/\n", "file_path": "app/src/main.rs", "rank": 27, "score": 26441.529496132025 }, { "content": " ($msg:expr) => {{\n\n use colored::*;\n\n info!(\" {} {}\", \"→\".dimmed() , $msg.dimmed());\n\n }}\n\n}\n\n/** Pretty-prints an error traceback **/\n\nmacro_rules! trace_error {\n\n ($($b:tt)*) => {\n\n || -> Result<()> {\n\n $($b)*;\n\n Ok(())\n\n }().unwrap_or_else(|e| {\n\n use colored::*;\n\n use case::CaseExt;\n\n\n\n let e = &e;\n\n warn!(\"{} {}\", \"Problem:\".yellow(), format!(\"{}\", e).to_capitalized().yellow());\n\n for e in e.iter().skip(1) {\n\n warn!(\" {}: {}\", \"→ Caused by\".bold().dimmed() , format!(\"{}\", e).to_capitalized());\n\n };\n", "file_path": "app/src/errors.rs", "rank": 28, "score": 24919.109423243375 }, { "content": " });\n\n }\n\n}\n\n/** Adds the provided error message to error chain and calls `trace_error` **/\n\nmacro_rules! trace_labeled_error {\n\n ($msg:expr, $($b:tt)*) => {\n\n trace_error! {\n\n || -> Result<()> {\n\n $($b)*;\n\n Ok(())\n\n }().chain_err(|| $msg)?;\n\n };\n\n }\n\n}\n\n/** Wrapper around error chain's result which panics on error **/\n\nmacro_rules! trace_panic {\n\n ( $($b:tt)* ) => {\n\n || -> Result<()> {\n\n $($b)*;\n\n Ok(())\n", "file_path": "app/src/errors.rs", "rank": 29, "score": 24919.034892059597 }, { "content": " }().unwrap_or_else(|e| {\n\n panic!(e);\n\n });\n\n }\n\n}\n\n/** Adds the provided error message to error chain and calls `trace_panic` **/\n\nmacro_rules! trace_labeled_panic {\n\n ($msg:expr, $($b:tt)*) => {\n\n trace_panic! {\n\n || -> Result<()> {\n\n $($b)*;\n\n Ok(())\n\n }().chain_err(|| $msg)?;\n\n };\n\n }\n\n}\n", "file_path": "app/src/errors.rs", "rank": 30, "score": 24918.71604185936 }, { "content": "error_chain!{}\n\n\n\n/** Pretty-prints current app status **/\n\nmacro_rules! status {\n\n ($msg:expr, $type:expr) => {{\n\n use colored::*;\n\n let color = match $type.as_ref() {\n\n \"msg\" => { \"green\" }\n\n \"error\" => { \"red\" }\n\n \"warn\" => { \"yellow\" }\n\n _ => { \"white\" }\n\n };\n\n info!(\"{}\", $msg.color(color));\n\n }};\n\n ($msg:expr) => {{\n\n status!($msg, \"msg\");\n\n }}\n\n}\n\n/** Pretty-prints sub-status (to be nested under status)**/\n\nmacro_rules! note {\n", "file_path": "app/src/errors.rs", "rank": 31, "score": 24918.19951220502 }, { "content": "// This module is responsible for parsing configuration options\n\nextern crate ini;\n\nuse self::ini::Ini;\n\nuse self::ini::ini::Properties;\n\n\n\nuse errors::*;\n\n\n\nuse std::net::SocketAddr;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Clone)]\n\npub struct Config {\n\n pub hostkey_path: String,\n\n pub api_socket: SocketAddr,\n\n pub p2p_socket: SocketAddr,\n\n pub min_hop_count: u8\n\n}\n\n\n\n#[allow(or_fun_call)]\n", "file_path": "app/src/config.rs", "rank": 32, "score": 24387.663493069835 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize)]\n\npub struct P2PMessage {\n\n pub message_type: P2P,\n\n pub data: Option<Vec<u8>>\n\n}\n\nimpl P2PMessage {\n\n pub fn new(message_type: P2P) -> P2PMessage {\n\n P2PMessage {\n\n message_type: message_type,\n\n data: None\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize)]\n\npub enum P2P {\n\n Knock,\n\n WhosThere,\n\n Handshake,\n\n Incomming,\n\n Forward,\n\n Data\n\n}\n", "file_path": "app/src/messages/p2p.rs", "rank": 33, "score": 20771.476348335458 }, { "content": " pub request_id: u32,\n\n pub payload: Vec<u8>\n\n}\n\nimpl AuthSessionHS1Response {\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n let mut bytes = pack_structure!(\"4xI\", self.request_id);\n\n bytes.extend_from_slice(&self.payload);\n\n Ok(bytes)\n\n }\n\n}\n\n\n\npub struct AuthCipherCrypt {\n\n pub session_id: u16,\n\n pub request_id: u32,\n\n pub cleartext: bool,\n\n pub payload: Vec<u8>\n\n}\n\nimpl AuthCipherCrypt {\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n let mut bytes = pack_structure!(\"3xBIH\", boolean!(self.cleartext), self.request_id, self.session_id);\n", "file_path": "app/src/messages/auth.rs", "rank": 34, "score": 20770.059637158483 }, { "content": " Rps(Rps),\n\n P2P(P2PMessage)\n\n}\n\n\n\n// Ref: 28028854\n\nenum_from_primitive! {\n\n #[repr(u16)]\n\n pub enum MessageId {\n\n RpsQuery = 540,\n\n RpsPeer = 541,\n\n OnionTunnelBuild = 560,\n\n OnionTunnelReady = 561,\n\n OnionTunnelIncomming = 562,\n\n OnionTunnelDestroy = 563,\n\n OnionTunnelData = 564,\n\n OnionCover = 566,\n\n OnionError = 565,\n\n AuthSessionStart = 600,\n\n AuthSessionHS1 = 601,\n\n AuthSessionIncommingHS1 = 602,\n", "file_path": "app/src/messages/mod.rs", "rank": 35, "score": 20769.740514380585 }, { "content": "#[macro_use]\n\nmod utilities;\n\npub mod onion;\n\npub mod auth;\n\npub mod rps;\n\npub mod p2p;\n\n\n\nuse errors::*;\n\nuse messages::auth::*;\n\nuse messages::onion::*;\n\nuse messages::rps::*;\n\nuse messages::p2p::P2PMessage;\n\n\n\nuse num::FromPrimitive;\n\nuse serde::{Deserialize, Serialize};\n\nuse rmps::{Deserializer, Serializer};\n\n\n\npub enum Message {\n\n Onion(Onion),\n\n Auth(Auth),\n", "file_path": "app/src/messages/mod.rs", "rank": 36, "score": 20769.571758568993 }, { "content": " pub cover_size: u16,\n\n}\n\n/* 2B CoverSize | 2B Reserved */\n\nimpl OnionCover {\n\n pub fn decode(bytes: Vec<u8>) -> Result<OnionCover> {\n\n let (cover_size,) = unpack_structure!(\"H2x\", &bytes);\n\n Ok(OnionCover {\n\n cover_size: cover_size,\n\n })\n\n }\n\n}\n\n\n\npub enum Onion {\n\n TunnelBuild(OnionTunnelBuild),\n\n TunnelReady(OnionTunnelPayload),\n\n TunnelIncomming(OnionTunnelID),\n\n TunnelDestroy(OnionTunnelID),\n\n TunnelData(OnionTunnelPayload),\n\n Cover(OnionCover),\n\n Error(OnionError)\n\n}\n", "file_path": "app/src/messages/onion.rs", "rank": 37, "score": 20768.93884802961 }, { "content": "use errors::*;\n\n\n\nuse bit_field::BitField;\n\n\n\npub struct AuthSessionStart {\n\n pub request_id: u32,\n\n pub hostkey: Vec<u8>\n\n}\n\n/* 4B Reserved | 4B RequestId | Rest Hostkey */\n\nimpl AuthSessionStart {\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n let mut bytes = pack_structure!(\"4xI\", self.request_id);\n\n bytes.extend_from_slice(&self.hostkey);\n\n Ok(bytes)\n\n }\n\n}\n\n\n\npub struct AuthSessionHS {\n\n pub session_id: u16,\n\n pub request_id: u32,\n", "file_path": "app/src/messages/auth.rs", "rank": 38, "score": 20768.70722036664 }, { "content": " bytes.extend_from_slice(&self.payload);\n\n Ok(bytes)\n\n }\n\n}\n\n\n\npub struct AuthCipherCryptResp {\n\n pub request_id: u32,\n\n pub cleartext: bool,\n\n pub payload: Vec<u8>\n\n}\n\n/* 3B Reserved | 7b1b Cleartext | 4B RequestId | Rest Payload */\n\nimpl AuthCipherCryptResp {\n\n pub fn decode(bytes: Vec<u8>) -> Result<AuthCipherCryptResp> {\n\n let (cleartext, request_id,) = unpack_structure!(\"3xBI\", &bytes);\n\n Ok(AuthCipherCryptResp {\n\n request_id: request_id,\n\n cleartext: cleartext.get_bit(0),\n\n payload: bytes[8..].to_vec()\n\n })\n\n }\n", "file_path": "app/src/messages/auth.rs", "rank": 39, "score": 20768.57752245055 }, { "content": "use errors::*;\n\n\n\nuse bit_field::BitField;\n\n\n\nuse std::net::{Ipv4Addr, Ipv6Addr, IpAddr};\n\n\n\npub struct RpsQuery {}\n\nimpl RpsQuery {\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n Ok(vec![])\n\n }\n\n}\n\n\n\npub struct RpsPeer {\n\n pub port: u16,\n\n pub ip_addr: IpAddr,\n\n pub hostkey: Vec<u8>\n\n}\n\n/* 2B Port | 1B Reserved | 7b1b IPv | Rest Hostkey */\n\nimpl RpsPeer {\n", "file_path": "app/src/messages/rps.rs", "rank": 40, "score": 20767.95054363692 }, { "content": "}\n\n\n\npub struct AuthSessionClose {\n\n pub session_id: u16\n\n}\n\nimpl AuthSessionClose {\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n Ok(pack_structure!(\"2xH\", self.session_id))\n\n }\n\n}\n\n\n\npub struct AuthSessionError {\n\n pub request_id: u32\n\n}\n\n/* 4B Reserved | 4B RequestId */\n\nimpl AuthSessionError {\n\n pub fn decode(bytes: Vec<u8>) -> Result<AuthSessionError> {\n\n let (request_id,) = unpack_structure!(\"4xI\", &bytes);\n\n Ok(AuthSessionError {\n\n request_id: request_id\n", "file_path": "app/src/messages/auth.rs", "rank": 41, "score": 20767.75852449072 }, { "content": " tunnel_id: tunnel_id\n\n })\n\n }\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n Ok(pack_structure!(\"I\", self.tunnel_id))\n\n }\n\n}\n\n\n\npub struct OnionError {\n\n pub tunnel_id: u32,\n\n pub request_type: u16\n\n}\n\n/* 2B RequestType | 2B Reserved | 4B TunnelId */\n\nimpl OnionError {\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n Ok(pack_structure!(\"H2xI\", self.request_type, self.tunnel_id))\n\n }\n\n}\n\n\n\npub struct OnionCover {\n", "file_path": "app/src/messages/onion.rs", "rank": 42, "score": 20767.25754441904 }, { "content": "\n\n let bytes = bytes[4..length].to_vec();\n\n\n\n // TODO: This could use some dedupe refactoring (maybe a procedural macro - but that stuff is difficult to write)\n\n Ok(match MessageId::from_u16(message_type)\n\n .ok_or(::errors::Error::from(\"conversion of message type failed\"))? {\n\n\n\n MessageId::OnionTunnelBuild => Onion(TunnelBuild(OnionTunnelBuild::decode(bytes)?)),\n\n MessageId::OnionTunnelReady => Onion(TunnelReady(OnionTunnelPayload::decode(bytes)?)),\n\n MessageId::OnionTunnelData => Onion(TunnelData(OnionTunnelPayload::decode(bytes)?)),\n\n MessageId::OnionTunnelIncomming => Onion(TunnelIncomming(OnionTunnelID::decode(bytes)?)),\n\n MessageId::OnionTunnelDestroy => Onion(TunnelDestroy(OnionTunnelID::decode(bytes)?)),\n\n MessageId::OnionCover => Onion(Cover(OnionCover::decode(bytes)?)),\n\n\n\n MessageId::AuthSessionHS1 => Auth(SessionHS1(AuthSessionHS::decode(bytes)?)),\n\n MessageId::AuthSessionHS2 => Auth(SessionHS2(AuthSessionHS::decode(bytes)?)),\n\n MessageId::AuthSessionIncommingHS2 => Auth(SessionIncommingHS2(AuthSessionHS::decode(bytes)?)),\n\n MessageId::AuthCipherEncryptResp => Auth(CipherEncryptResp(AuthCipherCryptResp::decode(bytes)?)),\n\n MessageId::AuthCipherDecryptResp => Auth(CipherDecryptResp(AuthCipherCryptResp::decode(bytes)?)),\n\n MessageId::AuthSessionError => Auth(SessionError(AuthSessionError::decode(bytes)?)),\n\n\n\n MessageId::RpsPeer => Message::Rps(Peer(RpsPeer::decode(bytes)?)),\n\n\n\n _ => bail!(\"message type {} unknown\", message_type)\n\n })\n\n}\n\n\n", "file_path": "app/src/messages/mod.rs", "rank": 43, "score": 20767.032658111337 }, { "content": " let (i0, i1, i2, i3) = unpack_structure!(\"4B\", &bytes[4..8]);\n\n (8, IpAddr::V4(Ipv4Addr::new(i0, i1, i2, i3)))\n\n };\n\n\n\n Ok(OnionTunnelBuild {\n\n onion_tunnel: onion_tunnel,\n\n ip_addr: ip_addr,\n\n hostkey: bytes[next_field_offset..].to_vec()\n\n })\n\n }\n\n}\n\n\n\npub struct OnionTunnelPayload {\n\n pub tunnel_id: u32,\n\n pub payload: Vec<u8>\n\n}\n\n/* 4B TunnelId | Rest Payload */\n\nimpl OnionTunnelPayload {\n\n pub fn decode(bytes: Vec<u8>) -> Result<OnionTunnelPayload> {\n\n let (tunnel_id,) = unpack_structure!(\"I\", &bytes);\n", "file_path": "app/src/messages/onion.rs", "rank": 44, "score": 20766.01774508955 }, { "content": " Ok(OnionTunnelPayload {\n\n tunnel_id: tunnel_id,\n\n payload: bytes[4..].to_vec()\n\n })\n\n }\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n let mut bytes = pack_structure!(\"I\", self.tunnel_id);\n\n bytes.extend_from_slice(&self.payload);\n\n Ok(bytes)\n\n }\n\n}\n\n\n\npub struct OnionTunnelID {\n\n pub tunnel_id: u32\n\n}\n\n/* 4B TunnelId */\n\nimpl OnionTunnelID {\n\n pub fn decode(bytes: Vec<u8>) -> Result<OnionTunnelID> {\n\n let (tunnel_id,) = unpack_structure!(\"I\", &bytes);\n\n Ok(OnionTunnelID {\n", "file_path": "app/src/messages/onion.rs", "rank": 45, "score": 20766.00116568704 }, { "content": "use errors::*;\n\n\n\nuse bit_field::BitField;\n\n\n\nuse std::net::{Ipv4Addr, Ipv6Addr, IpAddr};\n\n\n\npub struct OnionTunnelBuild {\n\n pub onion_tunnel: u16,\n\n pub ip_addr: IpAddr,\n\n pub hostkey: Vec<u8>\n\n}\n\n/* 1B Reserved | 7b1b IPv | 2B OnionTunnel | 16B/4B IP | Rest Hostkey */\n\nimpl OnionTunnelBuild {\n\n pub fn decode(bytes: Vec<u8>) -> Result<OnionTunnelBuild> {\n\n let (ipv, onion_tunnel) = unpack_structure!(\"xBH\", &bytes);\n\n\n\n let (next_field_offset, ip_addr) = if ipv.get_bit(0) {\n\n let (i0, i1, i2, i3, i4, i5, i6, i7) = unpack_structure!(\"8H\", &bytes[4..18]);\n\n (18, IpAddr::V6(Ipv6Addr::new(i0, i1, i2, i3, i4, i5, i6, i7)))\n\n } else {\n", "file_path": "app/src/messages/onion.rs", "rank": 46, "score": 20765.53091278674 }, { "content": " Auth(CipherEncrypt(message)) => (AuthCipherEncrypt, message.encode()?),\n\n Auth(CipherDecrypt(message)) => (AuthCipherDecrypt, message.encode()?),\n\n Auth(SessionClose(message)) => (AuthSessionClose, message.encode()?),\n\n\n\n Rps(Query(message)) => (RpsQuery, message.encode()?),\n\n \n\n P2P(message) => {\n\n let mut bytes = Vec::new();\n\n message.serialize(&mut Serializer::new(&mut bytes)).chain_err(|| \"couldn't serialize P2P message\")?;\n\n return Ok(bytes);\n\n },\n\n\n\n // BUG: This has to be fixed with a better message system\n\n _ => panic!(\"a call to 'encode' that does not exist on this message type was requested\")\n\n };\n\n\n\n let mut bytes = pack_structure!(\"2H\", message.len() as u16 + 4, message_id as u16);\n\n bytes.extend_from_slice(&message);\n\n Ok(bytes)\n\n}\n", "file_path": "app/src/messages/mod.rs", "rank": 47, "score": 20765.326722834045 }, { "content": " pub payload: Vec<u8>\n\n}\n\n/* 2B Reserved | 2B SessionId | 4B RequestId | Rest Payload */\n\nimpl AuthSessionHS {\n\n pub fn decode(bytes: Vec<u8>) -> Result<AuthSessionHS> {\n\n let (session_id, request_id) = unpack_structure!(\"2xHI\", &bytes[0..8]);\n\n Ok(AuthSessionHS {\n\n session_id: session_id,\n\n request_id: request_id,\n\n payload: bytes[8..].to_vec()\n\n })\n\n }\n\n pub fn encode(self) -> Result<Vec<u8>> {\n\n let mut bytes = pack_structure!(\"2xHI\", self.session_id, self.request_id);\n\n bytes.extend_from_slice(&self.payload);\n\n Ok(bytes)\n\n }\n\n}\n\n\n\npub struct AuthSessionHS1Response {\n", "file_path": "app/src/messages/auth.rs", "rank": 48, "score": 20764.684065744485 }, { "content": " })\n\n }\n\n}\n\n\n\npub enum Auth {\n\n SessionStart(AuthSessionStart),\n\n SessionHS1(AuthSessionHS),\n\n SessionIncommingHS1(AuthSessionHS1Response),\n\n SessionHS2(AuthSessionHS),\n\n SessionIncommingHS2(AuthSessionHS),\n\n CipherEncrypt(AuthCipherCrypt),\n\n CipherEncryptResp(AuthCipherCryptResp),\n\n CipherDecrypt(AuthCipherCrypt),\n\n CipherDecryptResp(AuthCipherCryptResp),\n\n SessionClose(AuthSessionClose),\n\n SessionError(AuthSessionError)\n\n}\n", "file_path": "app/src/messages/auth.rs", "rank": 49, "score": 20762.030376539693 }, { "content": " AuthSessionHS2 = 603,\n\n AuthSessionIncommingHS2 = 604,\n\n AuthCipherEncrypt = 611,\n\n AuthCipherEncryptResp = 612,\n\n AuthCipherDecrypt = 613,\n\n AuthCipherDecryptResp = 614,\n\n AuthSessionClose = 609,\n\n AuthSessionError = 610\n\n }\n\n}\n\n\n\n#[allow(or_fun_call)]\n", "file_path": "app/src/messages/mod.rs", "rank": 50, "score": 20760.94203525569 }, { "content": " pub fn decode(bytes: Vec<u8>) -> Result<RpsPeer> {\n\n let (port, ipv) = unpack_structure!(\"HxB\", &bytes);\n\n\n\n let (next_field_offset, ip_addr) = if ipv.get_bit(0) {\n\n let (i0, i1, i2, i3, i4, i5, i6, i7) = unpack_structure!(\"8H\", &bytes[4..18]);\n\n (18, IpAddr::V6(Ipv6Addr::new(i0, i1, i2, i3, i4, i5, i6, i7)))\n\n } else {\n\n let (i0, i1, i2, i3) = unpack_structure!(\"4B\", &bytes[4..8]);\n\n (8, IpAddr::V4(Ipv4Addr::new(i0, i1, i2, i3)))\n\n };\n\n\n\n Ok(RpsPeer {\n\n port: port,\n\n ip_addr: ip_addr,\n\n hostkey: bytes[next_field_offset..].to_vec()\n\n })\n\n }\n\n}\n\n\n\npub enum Rps {\n\n Query(RpsQuery),\n\n Peer(RpsPeer)\n\n}\n", "file_path": "app/src/messages/rps.rs", "rank": 51, "score": 20760.87950561786 }, { "content": "macro_rules! unpack_structure {\n\n ($format:expr, $source:expr) => {\n\n structure!($format).unpack($source).chain_err(|| \"failed to unpack defined structure\")?\n\n }\n\n}\n\n\n\nmacro_rules! pack_structure {\n\n ($format:expr, $($input:expr),*) => {\n\n structure!($format).pack($($input),*).chain_err(|| \"failed to pack defined structure\")?\n\n }\n\n}\n\n\n\nmacro_rules! boolean {\n\n ($set:expr) => {\n\n if $set { 0b1 } else { 0b0 }\n\n }\n\n}\n", "file_path": "app/src/messages/utilities.rs", "rank": 52, "score": 20759.57019666759 }, { "content": "use mio::tcp::{TcpListener, TcpStream};\n\nuse mio::{Poll, PollOpt, Token, Events, Ready, Event};\n\nuse stoppable_thread;\n\nuse stoppable_thread::StoppableHandle;\n\n\n\nuse std::net;\n\nuse std::net::{SocketAddr};\n\nuse std::sync::{mpsc};\n\nuse std::time::Duration;\n\nuse std::io::{Read, Write};\n\n\n\nuse errors::*;\n\nuse messages::{Message, decode_message, encode_message};\n\nuse config;\n\nuse core;\n\nuse core::StreamType;\n\n\n\nconst LISTENER: Token = Token(0);\n\nconst STREAM: Token = Token(1);\n\n\n", "file_path": "app/src/brunch.rs", "rank": 55, "score": 12.359637206383043 }, { "content": "#[macro_use]\n\nextern crate enum_primitive;\n\nextern crate num;\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate rmp_serde as rmps;\n\n\n\n// Required modules\n\n#[macro_use]\n\nmod errors;\n\nmod config;\n\nmod messages;\n\nmod brunch;\n\nmod core;\n\n\n\n// This is the import order for all modules\n\n// Crate Imports\n\nuse getopts::Options;\n\nuse simplelog::{CombinedLogger, TermLogger};\n\n// Standard Imports\n\nuse std::env;\n\nuse std::panic;\n\n// Custom Imports\n\nuse errors::*;\n\n\n\n/** Specification of cmd arguments **/\n", "file_path": "app/src/main.rs", "rank": 58, "score": 7.464087824680259 }, { "content": "#![allow(unknown_lints)]\n\n#![allow(too_many_arguments)]\n\n#![allow(dead_code)]\n\n#![feature(conservative_impl_trait)]\n\n\n\nextern crate simplelog;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate getopts;\n\n#[macro_use]\n\nextern crate error_chain;\n\nextern crate colored;\n\nextern crate stoppable_thread;\n\nextern crate mio;\n\nextern crate case;\n\nextern crate bytes;\n\nextern crate byteorder;\n\n#[macro_use]\n\nextern crate structure;\n\nextern crate bit_field;\n", "file_path": "app/src/main.rs", "rank": 59, "score": 4.136381657560539 }, { "content": " });\n\n\n\n trace_labeled_error!( \"API stream encountered a problem\", {\n\n if let Ok(packed_message) = ry.try_recv() {\n\n let message = match packed_message {\n\n StreamType::API(message) => message,\n\n _ => bail!(\"only API messages are allowed here\")\n\n };\n\n\n\n stream.write_all(&encode_message(message)?)\n\n .chain_err(|| \"writing stream failed\")?;\n\n }\n\n });\n\n };\n\n });\n\n })\n\n}\n\n\n", "file_path": "app/src/brunch.rs", "rank": 60, "score": 4.076809243668962 }, { "content": " }));\n\n\n\n CombinedLogger::init(\n\n vec![\n\n TermLogger::new(log::LogLevelFilter::Info, simplelog::Config {\n\n time: Some(log::LogLevel::Warn),\n\n level: None, target: None, location: None\n\n }).expect(\"Failed to initialize terminal logger\")\n\n ]\n\n ).expect(\"Failed to initialize logger\");\n\n\n\n trace_panic! { bootstrap()? };\n\n}\n", "file_path": "app/src/main.rs", "rank": 61, "score": 3.713400263275339 } ]
Rust
weechat/src/hooks/commands.rs
troethe/rust-weechat
8533abf0e000659f567e404d3c8aa0d773eff685
use libc::{c_char, c_int}; use std::{borrow::Cow, ffi::CStr, os::raw::c_void, ptr}; use weechat_sys::{t_gui_buffer, t_weechat_plugin, WEECHAT_RC_OK}; use crate::{buffer::Buffer, Args, LossyCString, ReturnCode, Weechat}; use super::Hook; pub struct Command { _hook: Hook, _hook_data: Box<CommandHookData>, } pub trait CommandCallback { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, arguments: Args); } impl<T: FnMut(&Weechat, &Buffer, Args) + 'static> CommandCallback for T { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, arguments: Args) { self(weechat, buffer, arguments) } } #[derive(Default)] pub struct CommandSettings { name: String, description: String, arguments: Vec<String>, argument_descriptoin: String, completion: Vec<String>, } impl CommandSettings { pub fn new<P: Into<String>>(name: P) -> Self { CommandSettings { name: name.into(), ..Default::default() } } pub fn description<D: Into<String>>(mut self, descritpion: D) -> Self { self.description = descritpion.into(); self } pub fn add_argument<T: Into<String>>(mut self, argument: T) -> Self { self.arguments.push(argument.into()); self } pub fn arguments_description<T: Into<String>>(mut self, descritpion: T) -> Self { self.argument_descriptoin = descritpion.into(); self } pub fn add_completion<T: Into<String>>(mut self, completion: T) -> Self { self.completion.push(completion.into()); self } } struct CommandHookData { callback: Box<dyn CommandCallback>, weechat_ptr: *mut t_weechat_plugin, } pub struct CommandRun { _hook: Hook, _hook_data: Box<CommandRunHookData>, } pub trait CommandRunCallback { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, command: Cow<str>) -> ReturnCode; } impl<T: FnMut(&Weechat, &Buffer, Cow<str>) -> ReturnCode + 'static> CommandRunCallback for T { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, command: Cow<str>) -> ReturnCode { self(weechat, buffer, command) } } struct CommandRunHookData { callback: Box<dyn CommandRunCallback>, weechat_ptr: *mut t_weechat_plugin, } impl CommandRun { pub fn new(command: &str, callback: impl CommandRunCallback + 'static) -> Result<Self, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, buffer: *mut t_gui_buffer, command: *const std::os::raw::c_char, ) -> c_int { let hook_data: &mut CommandRunHookData = { &mut *(pointer as *mut CommandRunHookData) }; let cb = &mut hook_data.callback; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let buffer = weechat.buffer_from_ptr(buffer); let command = CStr::from_ptr(command).to_string_lossy(); cb.callback(&weechat, &buffer, command) as isize as i32 } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let data = Box::new(CommandRunHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_command_run = weechat.get().hook_command_run.unwrap(); let command = LossyCString::new(command); let hook_ptr = unsafe { hook_command_run( weechat.ptr, command.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; if hook_ptr.is_null() { Err(()) } else { let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; Ok(CommandRun { _hook: hook, _hook_data: hook_data, }) } } } impl Command { pub fn new( command_settings: CommandSettings, callback: impl CommandCallback + 'static, ) -> Result<Command, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, buffer: *mut t_gui_buffer, argc: i32, argv: *mut *mut c_char, _argv_eol: *mut *mut c_char, ) -> c_int { let hook_data: &mut CommandHookData = { &mut *(pointer as *mut CommandHookData) }; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let buffer = weechat.buffer_from_ptr(buffer); let cb = &mut hook_data.callback; let args = Args::new(argc, argv); cb.callback(&weechat, &buffer, args); WEECHAT_RC_OK } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let name = LossyCString::new(command_settings.name); let description = LossyCString::new(command_settings.description); let args = LossyCString::new(command_settings.arguments.join("||")); let args_description = LossyCString::new(command_settings.argument_descriptoin); let completion = LossyCString::new(command_settings.completion.join("||")); let data = Box::new(CommandHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_command = weechat.get().hook_command.unwrap(); let hook_ptr = unsafe { hook_command( weechat.ptr, name.as_ptr(), description.as_ptr(), args.as_ptr(), args_description.as_ptr(), completion.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; if hook_ptr.is_null() { Err(()) } else { Ok(Command { _hook: hook, _hook_data: hook_data, }) } } }
use libc::{c_char, c_int}; use std::{borrow::Cow, ffi::CStr, os::raw::c_void, ptr}; use weechat_sys::{t_gui_buffer, t_weechat_plugin, WEECHAT_RC_OK}; use crate::{buffer::Buffer, Args, LossyCString, ReturnCode, Weechat}; use super::Hook; pub struct Command { _hook: Hook, _hook_data: Box<CommandHookData>, } pub trait CommandCallback { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, arguments: Args); } impl<T: FnMut(&Weechat, &Buffer, Args) + 'static> CommandCallback for T { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, arguments: Args) { self(weechat, buffer, arguments) } } #[derive(Default)] pub struct CommandSettings { name: String, description: String, arguments: Vec<String>, argument_descriptoin: String, completion: Vec<String>, } impl CommandSettings {
pub fn description<D: Into<String>>(mut self, descritpion: D) -> Self { self.description = descritpion.into(); self } pub fn add_argument<T: Into<String>>(mut self, argument: T) -> Self { self.arguments.push(argument.into()); self } pub fn arguments_description<T: Into<String>>(mut self, descritpion: T) -> Self { self.argument_descriptoin = descritpion.into(); self } pub fn add_completion<T: Into<String>>(mut self, completion: T) -> Self { self.completion.push(completion.into()); self } } struct CommandHookData { callback: Box<dyn CommandCallback>, weechat_ptr: *mut t_weechat_plugin, } pub struct CommandRun { _hook: Hook, _hook_data: Box<CommandRunHookData>, } pub trait CommandRunCallback { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, command: Cow<str>) -> ReturnCode; } impl<T: FnMut(&Weechat, &Buffer, Cow<str>) -> ReturnCode + 'static> CommandRunCallback for T { fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, command: Cow<str>) -> ReturnCode { self(weechat, buffer, command) } } struct CommandRunHookData { callback: Box<dyn CommandRunCallback>, weechat_ptr: *mut t_weechat_plugin, } impl CommandRun { pub fn new(command: &str, callback: impl CommandRunCallback + 'static) -> Result<Self, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, buffer: *mut t_gui_buffer, command: *const std::os::raw::c_char, ) -> c_int { let hook_data: &mut CommandRunHookData = { &mut *(pointer as *mut CommandRunHookData) }; let cb = &mut hook_data.callback; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let buffer = weechat.buffer_from_ptr(buffer); let command = CStr::from_ptr(command).to_string_lossy(); cb.callback(&weechat, &buffer, command) as isize as i32 } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let data = Box::new(CommandRunHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_command_run = weechat.get().hook_command_run.unwrap(); let command = LossyCString::new(command); let hook_ptr = unsafe { hook_command_run( weechat.ptr, command.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; if hook_ptr.is_null() { Err(()) } else { let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; Ok(CommandRun { _hook: hook, _hook_data: hook_data, }) } } } impl Command { pub fn new( command_settings: CommandSettings, callback: impl CommandCallback + 'static, ) -> Result<Command, ()> { unsafe extern "C" fn c_hook_cb( pointer: *const c_void, _data: *mut c_void, buffer: *mut t_gui_buffer, argc: i32, argv: *mut *mut c_char, _argv_eol: *mut *mut c_char, ) -> c_int { let hook_data: &mut CommandHookData = { &mut *(pointer as *mut CommandHookData) }; let weechat = Weechat::from_ptr(hook_data.weechat_ptr); let buffer = weechat.buffer_from_ptr(buffer); let cb = &mut hook_data.callback; let args = Args::new(argc, argv); cb.callback(&weechat, &buffer, args); WEECHAT_RC_OK } Weechat::check_thread(); let weechat = unsafe { Weechat::weechat() }; let name = LossyCString::new(command_settings.name); let description = LossyCString::new(command_settings.description); let args = LossyCString::new(command_settings.arguments.join("||")); let args_description = LossyCString::new(command_settings.argument_descriptoin); let completion = LossyCString::new(command_settings.completion.join("||")); let data = Box::new(CommandHookData { callback: Box::new(callback), weechat_ptr: weechat.ptr, }); let data_ref = Box::leak(data); let hook_command = weechat.get().hook_command.unwrap(); let hook_ptr = unsafe { hook_command( weechat.ptr, name.as_ptr(), description.as_ptr(), args.as_ptr(), args_description.as_ptr(), completion.as_ptr(), Some(c_hook_cb), data_ref as *const _ as *const c_void, ptr::null_mut(), ) }; let hook_data = unsafe { Box::from_raw(data_ref) }; let hook = Hook { ptr: hook_ptr, weechat_ptr: weechat.ptr, }; if hook_ptr.is_null() { Err(()) } else { Ok(Command { _hook: hook, _hook_data: hook_data, }) } } }
pub fn new<P: Into<String>>(name: P) -> Self { CommandSettings { name: name.into(), ..Default::default() } }
function_block-full_function
[ { "content": "/// Trait for the completion callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\npub trait CompletionCallback {\n\n /// Callback that will be called if when a completion is requested.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `buffer` - The currently active buffer that requested the completion\n\n /// to run.\n\n ///\n\n /// * `completion_name` - The name of the completion.\n\n ///\n\n /// * `completion` - The completion object that should be populated with\n\n /// completion words by the callback.\n\n fn callback(\n\n &mut self,\n\n weechat: &Weechat,\n\n buffer: &Buffer,\n\n completion_name: Cow<str>,\n\n completion: &Completion,\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 1, "score": 168337.82841681718 }, { "content": "/// Trait for the buffer input callback\n\n///\n\n/// This is the sync version of the callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait BufferInputCallback: 'static {\n\n /// Callback that will be called when the buffer receives some input.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `buffer` - The buffer that received the input\n\n ///\n\n /// * `input` - The input that was received.\n\n fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, input: Cow<str>) -> Result<(), ()>;\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, &Buffer, Cow<str>) -> Result<(), ()> + 'static> BufferInputCallback for T {\n\n /// Callback that will be called if the user inputs something into the buffer\n\n /// input field.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `buffer` - The buffer that the user inputed some text into.\n\n ///\n\n /// * `input` - The input that was posted by the user.\n\n fn callback(&mut self, weechat: &Weechat, buffer: &Buffer, input: Cow<str>) -> Result<(), ()> {\n\n self(weechat, buffer, input)\n\n }\n\n}\n\n\n", "file_path": "weechat/src/buffer/mod.rs", "rank": 3, "score": 162615.24122326932 }, { "content": "/// Trait for the buffer input callback.\n\n///\n\n/// This is the async version of the callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait BufferInputCallbackAsync: 'static {\n\n /// Callback that will be called if the user inputs something into the buffer\n\n /// input field.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `buffer` - The buffer that the user inputed some text into.\n\n ///\n\n /// * `input` - The input that was posted by the user.\n\n async fn callback(&mut self, buffer: BufferHandle, input: String);\n\n}\n\n\n\n#[cfg(feature = \"async\")]\n\n#[async_trait(?Send)]\n\nimpl<T: FnMut(BufferHandle, String) -> LocalBoxFuture<'static, ()> + 'static>\n\n BufferInputCallbackAsync for T\n\n{\n\n async fn callback(&mut self, buffer: BufferHandle, input: String) {\n", "file_path": "weechat/src/buffer/mod.rs", "rank": 4, "score": 158187.45929681967 }, { "content": "/// Trait for the bar item callback\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait BarItemCallback: 'static {\n\n /// The callback that should be called after the bar items\n\n /// is marked to be updated.\n\n ///\n\n /// Should return a string that will be displayed by the bar item.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weeechat` - A reference to the weechat context.\n\n ///\n\n /// * `buffer` - The currently visible buffer.\n\n fn callback(&mut self, weechat: &Weechat, buffer: &Buffer) -> String;\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, &Buffer) -> String + 'static> BarItemCallback for T {\n\n fn callback(&mut self, weechat: &Weechat, buffer: &Buffer) -> String {\n\n self(weechat, buffer)\n\n }\n\n}\n\n\n", "file_path": "weechat/src/hooks/bar.rs", "rank": 5, "score": 151565.87030110735 }, { "content": "struct CompletionHookData {\n\n #[allow(clippy::type_complexity)]\n\n callback: Box<dyn CompletionCallback>,\n\n weechat_ptr: *mut t_weechat_plugin,\n\n}\n\n\n\nimpl CompletionHook {\n\n /// Create a new completion\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `name` - The name of the new completion. After this is created the\n\n /// can be used as `%(name)` when creating commands.\n\n ///\n\n /// * `description` - The description of the new completion.\n\n ///\n\n /// * `callback` - A function that will be called when the completion is\n\n /// used, the callback must populate the candidates for the completion.\n\n ///\n\n /// # Example\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 6, "score": 139056.98323112898 }, { "content": "/// Callback trait for file descriptor based hooks.\n\npub trait FdHookCallback {\n\n /// The concrete type of the hooked file descriptor object.\n\n type FdObject;\n\n /// The callback that will be called when data is available to be read or to\n\n /// be written on the file descriptor based object.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `fd_object` - The file-descriptor based object that was registered to\n\n /// be watched for reads or writes.\n\n fn callback(&mut self, weechat: &Weechat, fd_object: &mut Self::FdObject);\n\n}\n\n\n", "file_path": "weechat/src/hooks/fd.rs", "rank": 9, "score": 131409.20163401138 }, { "content": "/// Trait for the buffer close callback\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait BufferCloseCallback {\n\n /// Callback that will be called before the buffer is closed.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `buffer` - The buffer that will be closed.\n\n fn callback(&mut self, weechat: &Weechat, buffer: &Buffer) -> Result<(), ()>;\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, &Buffer) -> Result<(), ()> + 'static> BufferCloseCallback for T {\n\n fn callback(&mut self, weechat: &Weechat, buffer: &Buffer) -> Result<(), ()> {\n\n self(weechat, buffer)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"async\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(r#async)))]\n\n#[async_trait(?Send)]\n", "file_path": "weechat/src/buffer/mod.rs", "rank": 10, "score": 131162.63513060834 }, { "content": "/// Trait for the signal callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait SignalCallback {\n\n /// Callback that will be called when a signal is fired.\n\n /// input field.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `signal_name` - The name of the signal that fired the callback.\n\n ///\n\n /// * `data` - The data that was passed on by the signal.\n\n fn callback(\n\n &mut self,\n\n weechat: &Weechat,\n\n signal_name: &str,\n\n data: Option<SignalData>,\n\n ) -> ReturnCode;\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, &str, Option<SignalData>) -> ReturnCode + 'static> SignalCallback for T {\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 11, "score": 124133.31060571737 }, { "content": "/// Trait for the timer callback\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait TimerCallback {\n\n /// Callback that will be called when the timer fires.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `remaining_calls` - How many times the timer will fire.\n\n fn callback(&mut self, weechat: &Weechat, remaining_calls: RemainingCalls);\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, RemainingCalls) + 'static> TimerCallback for T {\n\n fn callback(&mut self, weechat: &Weechat, remaining_calls: RemainingCalls) {\n\n self(weechat, remaining_calls)\n\n }\n\n}\n\n\n", "file_path": "weechat/src/hooks/timer.rs", "rank": 12, "score": 124133.31060571737 }, { "content": "/// Trait for the modifier callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait ModifierCallback {\n\n /// Callback that will be called when a modifier is fired.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `modifier_name` - The name of the modifier that fired the callback.\n\n ///\n\n /// * `data` - The data that was passed on by the modifier.\n\n ///\n\n /// * `string` - The string that should be modified.\n\n fn callback(\n\n &mut self,\n\n weechat: &Weechat,\n\n modifier_name: &str,\n\n data: Option<ModifierData>,\n\n string: Cow<str>,\n\n ) -> Option<String>;\n\n}\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 13, "score": 124133.31060571737 }, { "content": "/// Trait for the section read callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait SectionReadCallback: 'static {\n\n /// Callback that will be called when the section is read.\n\n ///\n\n /// Should return if the option was successfully recognized and changed.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `config` - A borrowed version of the Weechat configuration object.\n\n ///\n\n /// * `section` - The section that is being populated with default values,\n\n /// if the Config struct is contained inside of `self` make sure not to\n\n /// borrow the same section again.\n\n ///\n\n /// * `option_name` - The name of the option that is currently being read.\n\n ///\n\n /// * `option_value` - The value of the option that is being read.\n\n fn callback(\n\n &mut self,\n", "file_path": "weechat/src/config/section.rs", "rank": 14, "score": 121523.90607319953 }, { "content": "/// Trait for the section write callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait SectionWriteCallback: 'static {\n\n /// Callback that will be called when the section needs to be written out.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `config` - A borrowed version of the Weechat configuration object.\n\n ///\n\n /// * `section` - The section that is being written, if the Config struct is\n\n /// contained inside of `self` make sure not to borrow the same section\n\n /// again.\n\n fn callback(&mut self, weechat: &Weechat, config: &Conf, section: &mut ConfigSection);\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, &Conf, &mut ConfigSection) + 'static> SectionWriteCallback for T {\n\n fn callback(&mut self, weechat: &Weechat, config: &Conf, section: &mut ConfigSection) {\n\n self(weechat, config, section)\n\n }\n\n}\n\n\n", "file_path": "weechat/src/config/section.rs", "rank": 15, "score": 121523.90607319953 }, { "content": "/// Trait for the config reload callback.\n\n///\n\n/// This trait can be implemented or a normal function or coroutine can be\n\n/// passed as the callback.\n\npub trait ConfigReloadCallback: 'static {\n\n /// Function called when configuration file is reloaded with /reload\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weeechat` - A reference to the weechat context.\n\n ///\n\n /// * `config` - A reference to the non-owned config.\n\n fn callback(&mut self, weechat: &Weechat, config: &Conf);\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, &Conf) + 'static> ConfigReloadCallback for T {\n\n fn callback(&mut self, weechat: &Weechat, config: &Conf) {\n\n self(weechat, config)\n\n }\n\n}\n\n\n\nimpl Weechat {\n\n pub(crate) fn config_option_get_string(\n\n &self,\n", "file_path": "weechat/src/config/config.rs", "rank": 16, "score": 121518.02729596941 }, { "content": "/// Trait for the section write-default callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n\npub trait SectionWriteDefaultCallback: 'static {\n\n /// Callback that will be called when the section needs to be populated with\n\n /// default values.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `config` - A borrowed version of the Weechat configuration object.\n\n ///\n\n /// * `section` - The section that is being populated with default values,\n\n /// if the Config struct is contained inside of `self` make sure not to\n\n /// borrow the same section again.\n\n fn callback(&mut self, weechat: &Weechat, config: &Conf, section: &mut ConfigSection);\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, &Conf, &mut ConfigSection) + 'static> SectionWriteDefaultCallback for T {\n\n fn callback(&mut self, weechat: &Weechat, config: &Conf, section: &mut ConfigSection) {\n\n self(weechat, config, section)\n\n }\n\n}\n\n\n", "file_path": "weechat/src/config/section.rs", "rank": 17, "score": 118123.88879728009 }, { "content": "struct BufferJob(Runnable, BufferName);\n\n\n\nimpl BufferJob {\n\n fn run(self) -> bool {\n\n self.0.run()\n\n }\n\n\n\n fn cancel(self) {\n\n drop(self)\n\n }\n\n\n\n fn tag(&self) -> &BufferName {\n\n &self.1\n\n }\n\n}\n\n\n", "file_path": "weechat/src/executor.rs", "rank": 18, "score": 117730.83060176238 }, { "content": "type BufferName = String;\n\n\n", "file_path": "weechat/src/executor.rs", "rank": 19, "score": 117217.75076646253 }, { "content": "struct ModifierHookData {\n\n callback: Box<dyn ModifierCallback>,\n\n weechat_ptr: *mut t_weechat_plugin,\n\n}\n\n\n\n/// Enum over the different data types a modifier may send.\n\npub enum ModifierData<'a> {\n\n /// String data\n\n String(Cow<'a, str>),\n\n /// Buffer that was sent with the modifier.\n\n Buffer(Buffer<'a>),\n\n}\n\n\n\nimpl<'a> ModifierData<'a> {\n\n fn pointer_is_buffer(modifier_name: &str) -> bool {\n\n // This table is taken from the Weechat plugin API docs\n\n //\n\n // https://weechat.org/files/doc/stable/weechat_plugin_api.en.html#_hook_modifier\n\n if modifier_name.starts_with(\"bar_condition_\") {\n\n true\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 20, "score": 94853.30720079281 }, { "content": "struct TimerHookData {\n\n callback: Box<dyn TimerCallback>,\n\n weechat_ptr: *mut t_weechat_plugin,\n\n}\n\n\n\nimpl TimerHook {\n\n /// Create a timer that will repeatedly fire.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `interval` - The delay between calls in milliseconds.\n\n ///\n\n /// * `align_second` - The alignment on a second. For example, if the\n\n /// current time is 09:00, if the interval = 60000 (60 seconds), and\n\n /// align_second = 60, then timer is called each minute on the 0th\n\n /// second.\n\n ///\n\n /// * `max_calls` - The number of times the callback should be called, 0\n\n /// means it's called forever.\n\n ///\n", "file_path": "weechat/src/hooks/timer.rs", "rank": 21, "score": 94853.30720079281 }, { "content": "struct SignalHookData {\n\n callback: Box<dyn SignalCallback>,\n\n weechat_ptr: *mut t_weechat_plugin,\n\n}\n\n\n\n/// Enum over the different data types a signal may send.\n\n#[non_exhaustive]\n\npub enum SignalData<'a> {\n\n /// String data\n\n String(Cow<'a, str>),\n\n /// Integer data\n\n Integer(i32),\n\n /// Buffer that was sent with the signal.\n\n Buffer(Buffer<'a>),\n\n}\n\n\n\nimpl<'a> Into<SignalData<'a>> for &'a str {\n\n fn into(self) -> SignalData<'a> {\n\n SignalData::String(Cow::from(self))\n\n }\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 22, "score": 94853.30720079281 }, { "content": "pub trait FromPtrs {\n\n /// Returns the raw pointer to the config option.\n\n fn from_ptrs(option_ptr: *mut t_config_option, weechat_ptr: *mut t_weechat_plugin) -> Self;\n\n}\n\n\n", "file_path": "weechat/src/config/config_options.rs", "rank": 23, "score": 91658.61418604672 }, { "content": "/// Weechat plugin trait.\n\n///\n\n/// Implement this trait over your struct to implement a Weechat plugin. The\n\n/// init method will get called when Weechat loads the plugin, while the\n\n///\n\n/// Drop method will be called when Weechat unloads the plugin.\n\npub trait Plugin: Sized {\n\n /// The initialization method for the plugin.\n\n ///\n\n /// This will be called when Weechat loads the pluign.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A borrow to a Weechat object that will be valid during the\n\n /// duration of the init callback.\n\n ///\n\n /// * `args` - Arguments passed to the plugin when it is loaded.\n\n fn init(weechat: &Weechat, args: Args) -> Result<Self, ()>;\n\n}\n\n\n\n#[cfg(feature = \"async\")]\n\n#[cfg_attr(feature = \"docs\", doc(cfg(r#async)))]\n\npub use executor::Task;\n\n\n\n/// Status values for Weechat callbacks\n\npub enum ReturnCode {\n", "file_path": "weechat/src/lib.rs", "rank": 24, "score": 90054.65449900134 }, { "content": "struct FdHookData<F> {\n\n callback: Box<dyn FdHookCallback<FdObject = F>>,\n\n weechat_ptr: *mut t_weechat_plugin,\n\n fd_object: F,\n\n}\n\n\n\nimpl<F> FdHook<F> {\n\n /// Hook an object that can be turned into a raw file descriptor.\n\n /// Returns the hook object.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `fd_object` - An object for wich the file descriptor will be watched\n\n /// and the callback called when read or write operations can happen\n\n /// on it.\n\n ///\n\n /// * `mode` - Configure the hook to watch for writes, reads or both on the\n\n /// file descriptor.\n\n ///\n\n /// * `callback` - A function that will be called if a watched event on the\n", "file_path": "weechat/src/hooks/fd.rs", "rank": 25, "score": 88519.01763028733 }, { "content": "#[allow(unused)]\n\nstruct Hooks {\n\n modifier: ModifierHook,\n\n input_command: CommandRun,\n\n buffer_command: CommandRun,\n\n window_command: CommandRun,\n\n}\n\n\n\nimpl Hooks {\n\n fn new(inner_go: &InnerGo) -> Self {\n\n // Override our input command.\n\n let input_command = CommandRun::new(\"2000|/input *\", inner_go.clone())\n\n .expect(\"Can't override input command\");\n\n\n\n // Disable buffer commands while in go mode.\n\n let buffer_command =\n\n CommandRun::new(\"2000|/buffer *\", |_: &Weechat, _: &Buffer, _: Cow<str>| {\n\n ReturnCode::OkEat\n\n })\n\n .expect(\"Can't override buffer command\");\n\n\n", "file_path": "weechat/examples/go/src/lib.rs", "rank": 26, "score": 86634.23674371366 }, { "content": "pub trait HidenConfigOptionT {\n\n /// Returns the raw pointer to the config option.\n\n fn get_ptr(&self) -> *mut t_config_option;\n\n fn get_weechat(&self) -> Weechat;\n\n\n\n fn get_string(&self, property: &str) -> Option<Cow<str>> {\n\n let weechat = self.get_weechat();\n\n let get_string = weechat.get().config_option_get_string.unwrap();\n\n let property = LossyCString::new(property);\n\n\n\n unsafe {\n\n let string = get_string(self.get_ptr(), property.as_ptr());\n\n if string.is_null() {\n\n None\n\n } else {\n\n Some(CStr::from_ptr(string).to_string_lossy())\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "weechat/src/config/config_options.rs", "rank": 27, "score": 84194.34769023562 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]\n\nstruct BufferData {\n\n score: i64,\n\n number: i32,\n\n indices: Vec<usize>,\n\n full_name: Rc<String>,\n\n short_name: Rc<String>,\n\n}\n\n\n\nimpl<'a> From<&Buffer<'a>> for BufferData {\n\n fn from(buffer: &Buffer) -> Self {\n\n BufferData {\n\n score: 0,\n\n number: buffer.number(),\n\n indices: Vec::new(),\n\n full_name: Rc::new(buffer.full_name().to_string()),\n\n short_name: Rc::new(buffer.short_name().to_string()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "weechat/examples/go/src/lib.rs", "rank": 28, "score": 83554.20274825032 }, { "content": "#[derive(Clone)]\n\nstruct BufferList {\n\n /// The Weechat configuration for this plugin.\n\n config: Rc<Config>,\n\n /// The list of buffers, this will first contain all buffers but can be\n\n /// filtered down with the `filter()` method.\n\n buffers: Vec<BufferData>,\n\n /// Index remembering which buffer the user selected. This can be\n\n /// manipulated using `select_next_buffer()` and `select_prev_buffer()`.\n\n selected_buffer: usize,\n\n}\n\n\n\nimpl BufferList {\n\n /// Create a new buffer list.\n\n ///\n\n /// This will fetch all the buffers from the Weechat info-list and set an\n\n /// initial score of 0 for every buffer.\n\n fn new(weechat: &Weechat, config: Rc<Config>) -> Self {\n\n let info_list = weechat\n\n .get_infolist(\"buffer\", None)\n\n .expect(\"Can't get buffer infolist\");\n", "file_path": "weechat/examples/go/src/lib.rs", "rank": 29, "score": 83554.20274825032 }, { "content": "#[derive(Default)]\n\nstruct OptionDescription<'a> {\n\n pub name: &'a str,\n\n pub option_type: OptionType,\n\n pub description: &'a str,\n\n pub string_values: &'a str,\n\n pub min: i32,\n\n pub max: i32,\n\n pub default_value: &'a str,\n\n pub value: &'a str,\n\n pub null_allowed: bool,\n\n}\n\n\n\n#[allow(missing_docs)]\n\npub enum ConfigOption<'a> {\n\n Boolean(BooleanOption<'a>),\n\n Integer(IntegerOption<'a>),\n\n String(StringOption<'a>),\n\n Color(ColorOption<'a>),\n\n}\n\n\n", "file_path": "weechat/src/config/section.rs", "rank": 30, "score": 83419.53755030023 }, { "content": "struct BarItemCbData {\n\n callback: Box<dyn BarItemCallback>,\n\n weechat_ptr: *mut t_weechat_plugin,\n\n}\n\n\n\n/// A handle to a bar item. The bar item is automatically removed when the object is\n\n/// dropped.\n\npub struct BarItem {\n\n name: String,\n\n ptr: *mut t_gui_bar_item,\n\n weechat: *mut t_weechat_plugin,\n\n _data: Box<BarItemCbData>,\n\n}\n\n\n\nimpl Drop for BarItem {\n\n fn drop(&mut self) {\n\n let weechat = Weechat::from_ptr(self.weechat);\n\n let bar_item_remove = weechat.get().bar_item_remove.unwrap();\n\n unsafe { bar_item_remove(self.ptr) };\n\n }\n", "file_path": "weechat/src/hooks/bar.rs", "rank": 31, "score": 81083.46782630763 }, { "content": " /// ```no_run\n\n /// # use std::borrow::Cow;\n\n /// # use std::collections::HashSet;\n\n /// # use weechat::Weechat;\n\n /// # use weechat::buffer::Buffer;\n\n /// # use weechat::hooks::{CompletionCallback, CompletionHook, Completion, CompletionPosition};\n\n ///\n\n /// pub struct MyMap {\n\n /// server_names: HashSet<String>,\n\n /// }\n\n ///\n\n /// impl CompletionCallback for MyMap {\n\n /// fn callback(&mut self, _: &Weechat, _: &Buffer, _: Cow<str>, completion: &Completion) -> Result<(), ()> {\n\n /// for server_name in &self.server_names {\n\n /// completion.add_with_options(server_name, false, CompletionPosition::Sorted);\n\n /// }\n\n /// Ok(())\n\n /// }\n\n /// }\n\n ///\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 32, "score": 78292.77141364207 }, { "content": "use libc::{c_char, c_int};\n\nuse std::{borrow::Cow, ffi::CStr, os::raw::c_void, ptr};\n\n\n\nuse weechat_sys::{\n\n t_gui_buffer, t_gui_completion, t_weechat_plugin, WEECHAT_RC_ERROR, WEECHAT_RC_OK,\n\n};\n\n\n\nuse crate::{buffer::Buffer, hooks::Hook, LossyCString, Weechat};\n\n\n\n/// A handle to a completion item.\n\npub struct Completion {\n\n weechat_ptr: *mut t_weechat_plugin,\n\n ptr: *mut t_gui_completion,\n\n}\n\n\n\n/// Trait for the completion callback.\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 33, "score": 78292.74423690831 }, { "content": " pub fn base_word(&self) -> Option<Cow<str>> {\n\n self.get_string(\"base_word\")\n\n }\n\n\n\n /// Get the command arguments including the base word.\n\n pub fn arguments(&self) -> Option<Cow<str>> {\n\n self.get_string(\"args\")\n\n }\n\n\n\n fn get_string(&self, property_name: &str) -> Option<Cow<str>> {\n\n let weechat = Weechat::from_ptr(self.weechat_ptr);\n\n\n\n let get_string = weechat.get().hook_completion_get_string.unwrap();\n\n\n\n let property_name = LossyCString::new(property_name);\n\n\n\n unsafe {\n\n let ret = get_string(self.ptr, property_name.as_ptr());\n\n\n\n if ret.is_null() {\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 34, "score": 78292.47697677827 }, { "content": " ) -> Result<(), ()>;\n\n}\n\n\n\nimpl<T: FnMut(&Weechat, &Buffer, Cow<str>, &Completion) -> Result<(), ()> + 'static>\n\n CompletionCallback for T\n\n{\n\n fn callback(\n\n &mut self,\n\n weechat: &Weechat,\n\n buffer: &Buffer,\n\n completion_name: Cow<str>,\n\n completion: &Completion,\n\n ) -> Result<(), ()> {\n\n self(weechat, buffer, completion_name, completion)\n\n }\n\n}\n\n\n\n/// The positions an entry can be added to a completion list.\n\n#[derive(Clone, Copy)]\n\npub enum CompletionPosition {\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 35, "score": 78289.26764708462 }, { "content": " }\n\n\n\n Weechat::check_thread();\n\n let weechat = unsafe { Weechat::weechat() };\n\n\n\n let data = Box::new(CompletionHookData {\n\n callback: Box::new(callback),\n\n weechat_ptr: weechat.ptr,\n\n });\n\n\n\n let data_ref = Box::leak(data);\n\n let hook_completion = weechat.get().hook_completion.unwrap();\n\n\n\n let completion_item = LossyCString::new(completion_item);\n\n let description = LossyCString::new(description);\n\n\n\n let hook_ptr = unsafe {\n\n hook_completion(\n\n weechat.ptr,\n\n completion_item.as_ptr(),\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 36, "score": 78287.41199816205 }, { "content": " ) -> c_int {\n\n let hook_data: &mut CompletionHookData = { &mut *(pointer as *mut CompletionHookData) };\n\n let cb = &mut hook_data.callback;\n\n let weechat = Weechat::from_ptr(hook_data.weechat_ptr);\n\n let buffer = weechat.buffer_from_ptr(buffer);\n\n\n\n let completion_item = CStr::from_ptr(completion_item).to_string_lossy();\n\n\n\n let ret = cb.callback(\n\n &weechat,\n\n &buffer,\n\n completion_item,\n\n &Completion::from_raw(hook_data.weechat_ptr, completion),\n\n );\n\n\n\n if let Ok(()) = ret {\n\n WEECHAT_RC_OK\n\n } else {\n\n WEECHAT_RC_ERROR\n\n }\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 37, "score": 78287.13053546684 }, { "content": " weechat: *mut t_weechat_plugin,\n\n completion: *mut t_gui_completion,\n\n ) -> Completion {\n\n Completion {\n\n weechat_ptr: weechat,\n\n ptr: completion,\n\n }\n\n }\n\n\n\n /// Add a word for completion, keeping the list sorted.\n\n pub fn add(&self, word: &str) {\n\n self.add_with_options(word, false, CompletionPosition::Sorted)\n\n }\n\n\n\n /// Get the command used in the completion.\n\n pub fn base_command(&self) -> Option<Cow<str>> {\n\n self.get_string(\"base_command\")\n\n }\n\n\n\n /// Get the word that is being completed.\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 38, "score": 78286.87707174788 }, { "content": " /// let servers = MyMap { server_names: HashSet::new() };\n\n ///\n\n /// let completion = CompletionHook::new(\n\n /// \"matrix_servers\",\n\n /// \"Completion for the list of added Matrix servers\",\n\n /// servers,\n\n /// ).unwrap();\n\n ///\n\n /// ```\n\n pub fn new(\n\n completion_item: &str,\n\n description: &str,\n\n callback: impl CompletionCallback + 'static,\n\n ) -> Result<CompletionHook, ()> {\n\n unsafe extern \"C\" fn c_hook_cb(\n\n pointer: *const c_void,\n\n _data: *mut c_void,\n\n completion_item: *const c_char,\n\n buffer: *mut t_gui_buffer,\n\n completion: *mut t_gui_completion,\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 39, "score": 78284.92155861075 }, { "content": " let hook_completion_list_add = weechat.get().hook_completion_list_add.unwrap();\n\n\n\n let word = LossyCString::new(word);\n\n let method = LossyCString::new(position.value());\n\n\n\n unsafe {\n\n hook_completion_list_add(self.ptr, word.as_ptr(), is_nick as i32, method.as_ptr());\n\n }\n\n }\n\n}\n\n\n\n/// Hook for a completion item, the hook is removed when the object is dropped.\n\npub struct CompletionHook {\n\n _hook: Hook,\n\n _hook_data: Box<CompletionHookData>,\n\n}\n\n\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 40, "score": 78283.74293534152 }, { "content": " description.as_ptr(),\n\n Some(c_hook_cb),\n\n data_ref as *const _ as *const c_void,\n\n ptr::null_mut(),\n\n )\n\n };\n\n\n\n let hook_data = unsafe { Box::from_raw(data_ref) };\n\n\n\n if hook_ptr.is_null() {\n\n return Err(());\n\n }\n\n\n\n let hook = Hook {\n\n ptr: hook_ptr,\n\n weechat_ptr: weechat.ptr,\n\n };\n\n\n\n Ok(CompletionHook {\n\n _hook: hook,\n\n _hook_data: hook_data,\n\n })\n\n }\n\n}\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 41, "score": 78281.77241515808 }, { "content": " None\n\n } else {\n\n Some(CStr::from_ptr(ret).to_string_lossy())\n\n }\n\n }\n\n }\n\n\n\n /// Add a word to the completion giving the position and wether the word is\n\n /// a nick.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `word` - The word that should be added to the completion.\n\n ///\n\n /// * `is_nick` - Set if the word is a nick.\n\n ///\n\n /// * `position` - Set the position where the nick should be added to.\n\n pub fn add_with_options(&self, word: &str, is_nick: bool, position: CompletionPosition) {\n\n let weechat = Weechat::from_ptr(self.weechat_ptr);\n\n\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 42, "score": 78281.37147660703 }, { "content": " /// Insert the item in a way that keeps the list sorted.\n\n Sorted,\n\n /// Insert the item at the beginning of the list.\n\n Beginning,\n\n /// Insert the item at the end of the list.\n\n End,\n\n}\n\n\n\nimpl CompletionPosition {\n\n pub(crate) fn value(&self) -> &str {\n\n match self {\n\n CompletionPosition::Sorted => \"sort\",\n\n CompletionPosition::Beginning => \"beginning\",\n\n CompletionPosition::End => \"end\",\n\n }\n\n }\n\n}\n\n\n\nimpl Completion {\n\n pub(crate) fn from_raw(\n", "file_path": "weechat/src/hooks/completion.rs", "rank": 43, "score": 78274.37301005256 }, { "content": "/// Base configuration option methods.\n\n///\n\n/// These methods are implemented for every option and don't depend on the\n\n/// option type.\n\npub trait BaseConfigOption: HidenConfigOptionT {\n\n /// Get the name of the option.\n\n fn name(&self) -> Cow<str> {\n\n self.get_string(\"name\")\n\n .expect(\"Can't get the name of the option\")\n\n }\n\n\n\n /// Get the description of the option.\n\n fn description(&self) -> Cow<str> {\n\n self.get_string(\"description\")\n\n .expect(\"Can't get the description of the option\")\n\n }\n\n\n\n /// Get the section name of the section the option belongs to.\n\n fn section_name(&self) -> Cow<str> {\n\n self.get_string(\"section_name\")\n\n .expect(\"Can't get the section name of the option\")\n\n }\n\n\n\n /// Get the config name the option belongs to.\n", "file_path": "weechat/src/config/config_options.rs", "rank": 58, "score": 73661.32976782208 }, { "content": "/// Marker trait for config options.\n\npub trait ConfigOptions: BaseConfigOption + FromPtrs {}\n\n\n\npub(crate) type CheckCB<T> = dyn FnMut(&Weechat, &T, Cow<str>) -> bool;\n\n\n\npub(crate) struct OptionPointers<T> {\n\n pub(crate) weechat_ptr: *mut t_weechat_plugin,\n\n pub(crate) check_cb: Option<Box<CheckCB<T>>>,\n\n pub(crate) change_cb: Option<Box<dyn FnMut(&Weechat, &T)>>,\n\n pub(crate) delete_cb: Option<Box<dyn FnMut(&Weechat, &T)>>,\n\n}\n", "file_path": "weechat/src/config/config_options.rs", "rank": 59, "score": 72528.39685926768 }, { "content": "type StringCheckCb = Option<Box<dyn FnMut(&Weechat, &StringOption, Cow<str>) -> bool>>;\n\n\n\n/// Settings for a new string option.\n\n#[derive(Default)]\n\npub struct StringOptionSettings {\n\n pub(crate) name: String,\n\n\n\n pub(crate) description: String,\n\n\n\n pub(crate) default_value: String,\n\n\n\n pub(crate) change_cb: Option<Box<dyn FnMut(&Weechat, &StringOption)>>,\n\n\n\n pub(crate) check_cb: StringCheckCb,\n\n}\n\n\n\nimpl StringOptionSettings {\n\n /// Create new settings that can be used to create a new string option.\n\n ///\n\n /// # Arguments\n", "file_path": "weechat/src/config/string.rs", "rank": 60, "score": 70913.28267528473 }, { "content": "#[proc_macro]\n\npub fn plugin(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let WeechatPluginInfo {\n\n plugin,\n\n name,\n\n author,\n\n description,\n\n version,\n\n license,\n\n } = parse_macro_input!(input as WeechatPluginInfo);\n\n\n\n let (name_len, name) = name;\n\n let (author_len, author) = author;\n\n let (description_len, description) = description;\n\n let (license_len, license) = license;\n\n let (version_len, version) = version;\n\n\n\n let result = quote! {\n\n #[doc(hidden)]\n\n #[no_mangle]\n\n pub static weechat_plugin_api_version: [u8; weechat::weechat_sys::WEECHAT_PLUGIN_API_VERSION_LENGTH] =\n", "file_path": "weechat-macro/src/lib.rs", "rank": 61, "score": 62348.54248885495 }, { "content": " char *name; /* short name */\n", "file_path": "weechat-sys/src/weechat-plugin.h", "rank": 62, "score": 58320.97761702046 }, { "content": "fn main() {\n\n let bundled = env::var(WEECHAT_BUNDLED_ENV).map_or(false, |bundled| {\n\n match bundled.to_lowercase().as_ref() {\n\n \"1\" | \"true\" | \"yes\" => true,\n\n \"0\" | \"false\" | \"no\" => false,\n\n _ => panic!(\"Invalid value for WEECHAT_BUNDLED, must be true/false\"),\n\n }\n\n });\n\n\n\n let plugin_file = env::var(WEECHAT_PLUGIN_FILE_ENV);\n\n\n\n let bindings = if bundled {\n\n build(\"src/weechat-plugin.h\").expect(\"Unable to generate bindings\")\n\n } else {\n\n match plugin_file {\n\n Ok(file) => {\n\n let path = PathBuf::from(file)\n\n .canonicalize()\n\n .expect(\"Can't canonicalize path\");\n\n build(path.to_str().unwrap_or_default()).unwrap_or_else(|_| {\n", "file_path": "weechat-sys/build.rs", "rank": 63, "score": 56487.77272312255 }, { "content": "struct ConfigPointers {\n\n reload_cb: Option<Box<dyn ConfigReloadCallback>>,\n\n weechat_ptr: *mut t_weechat_plugin,\n\n}\n\n\n", "file_path": "weechat/src/config/config.rs", "rank": 64, "score": 52807.042034091595 }, { "content": "struct Ripgrep {\n\n _config: Rc<RefCell<Config>>,\n\n _command: Command,\n\n _runtime: Rc<RefCell<Option<Runtime>>>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct RipgrepCommand {\n\n config: Rc<RefCell<Config>>,\n\n buffer: Rc<RefCell<Option<GrepBuffer>>>,\n\n runtime: Rc<RefCell<Option<Runtime>>>,\n\n last_search_file: Rc<RefCell<Option<PathBuf>>>,\n\n}\n\n\n\nimpl RipgrepCommand {\n\n /// Wait for the result from the search task and print it out.\n\n ///\n\n /// This runs on the main Weechat thread.\n\n // TODO we could spawn this task from the search task running on the Tokio\n\n // runtime using Weechat::spawn_from_thread(). This would get rid of the\n", "file_path": "weechat/examples/grep/src/lib.rs", "rank": 65, "score": 52807.042034091595 }, { "content": "#[allow(unused)]\n\nstruct Go {\n\n command: Command,\n\n}\n\n\n", "file_path": "weechat/examples/go/src/lib.rs", "rank": 66, "score": 52807.042034091595 }, { "content": "#[allow(unused)]\n\nstruct Infolist {\n\n command: Command,\n\n}\n\n\n", "file_path": "weechat/examples/infolist/src/lib.rs", "rank": 67, "score": 52807.042034091595 }, { "content": "struct WeechatPluginInfo {\n\n plugin: syn::Ident,\n\n name: (usize, Literal),\n\n author: (usize, Literal),\n\n description: (usize, Literal),\n\n version: (usize, Literal),\n\n license: (usize, Literal),\n\n}\n\n\n", "file_path": "weechat-macro/src/lib.rs", "rank": 68, "score": 52446.15240882779 }, { "content": "struct RunningState {\n\n /// Hooks that are necessary to enable go-mode.\n\n hooks: Hooks,\n\n /// The input of the current buffer before we entered go-mode.\n\n saved_input: InputState,\n\n /// Our stored input while in go-mode.\n\n last_input: String,\n\n /// The current list of buffers we are presenting, will initially contain\n\n /// all buffers but will get filtered down as we input patterns.\n\n buffers: BufferList,\n\n}\n\n\n\nimpl RunningState {\n\n fn new(inner_go: &InnerGo, weechat: &Weechat, buffer: &Buffer) -> Self {\n\n RunningState {\n\n hooks: Hooks::new(inner_go),\n\n last_input: \"\".to_owned(),\n\n saved_input: InputState::from(buffer),\n\n buffers: BufferList::new(weechat, inner_go.config.clone()),\n\n }\n", "file_path": "weechat/examples/go/src/lib.rs", "rank": 69, "score": 51283.247959065775 }, { "content": "struct SamplePlugin {\n\n _rust_hook: Command,\n\n _rust_config: Config,\n\n _item: BarItem,\n\n _signal: SignalHook,\n\n}\n\n\n\nimpl SamplePlugin {\n\n fn input_cb(\n\n _weechat: &Weechat,\n\n buffer: &Buffer,\n\n input: Cow<str>,\n\n ) -> Result<(), ()> {\n\n buffer.print(&input);\n\n Ok(())\n\n }\n\n\n\n fn close_cb(_weechat: &Weechat, _buffer: &Buffer) -> Result<(), ()> {\n\n Weechat::print(\"Closing buffer\");\n\n Ok(())\n", "file_path": "weechat/examples/sample/src/lib.rs", "rank": 70, "score": 51283.247959065775 }, { "content": "#[derive(Clone)]\n\nstruct InnerGo {\n\n running_state: Rc<RefCell<Option<RunningState>>>,\n\n config: Rc<Config>,\n\n}\n\n\n\nimpl InnerGo {\n\n fn stop(&self, weechat: &Weechat, switch_buffer: bool) {\n\n if let Some(state) = self.running_state.borrow_mut().take() {\n\n state.stop(weechat, switch_buffer);\n\n }\n\n }\n\n}\n\n\n", "file_path": "weechat/examples/go/src/lib.rs", "rank": 71, "score": 51283.247959065775 }, { "content": "#[derive(Clone)]\n\nstruct InputState {\n\n input_string: Rc<String>,\n\n input_position: i32,\n\n}\n\n\n\nimpl InputState {\n\n /// Restore the input state on the given buffer.\n\n fn restore_for_buffer(&self, buffer: &Buffer) {\n\n buffer.set_input(&self.input_string);\n\n buffer.set_input_position(self.input_position);\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a Buffer<'a>> for InputState {\n\n fn from(buffer: &Buffer) -> Self {\n\n InputState {\n\n input_string: Rc::new(buffer.input().to_string()),\n\n input_position: buffer.input_position(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "weechat/examples/go/src/lib.rs", "rank": 72, "score": 51283.247959065775 }, { "content": "#[derive(Default, Clone)]\n\nstruct InnerInfolist {\n\n buffer: Rc<RefCell<Option<BufferHandle>>>,\n\n}\n\n\n\nimpl InnerInfolist {\n\n fn set_title(&self, weechat: &Weechat, buffer: &Buffer) {\n\n let infolist = weechat\n\n .get_infolist(\"hook\", Some(\"infolist\"))\n\n .expect(\"Can't get the infolist list\");\n\n\n\n let infolist_names: Vec<String> = infolist\n\n .filter_map(|item| {\n\n let name = item.get(\"infolist_name\")?;\n\n if let InfolistVariable::String(n) = name {\n\n Some(n.to_string())\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n", "file_path": "weechat/examples/infolist/src/lib.rs", "rank": 73, "score": 51283.247959065775 }, { "content": "fn build(file: &str) -> Result<Bindings, ()> {\n\n const INCLUDED_TYPES: &[&str] = &[\n\n \"t_weechat_plugin\",\n\n \"t_gui_buffer\",\n\n \"t_gui_nick\",\n\n \"t_gui_nick_group\",\n\n \"t_hook\",\n\n \"t_hdata\",\n\n ];\n\n const INCLUDED_VARS: &[&str] = &[\n\n \"WEECHAT_PLUGIN_API_VERSION\",\n\n \"WEECHAT_HASHTABLE_INTEGER\",\n\n \"WEECHAT_HASHTABLE_STRING\",\n\n \"WEECHAT_HASHTABLE_POINTER\",\n\n \"WEECHAT_HASHTABLE_BUFFER\",\n\n \"WEECHAT_HASHTABLE_TIME\",\n\n \"WEECHAT_HOOK_SIGNAL_STRING\",\n\n \"WEECHAT_HOOK_SIGNAL_INT\",\n\n \"WEECHAT_HOOK_SIGNAL_POINTER\",\n\n ];\n", "file_path": "weechat-sys/build.rs", "rank": 74, "score": 41401.58477711235 }, { "content": " /// use weechat::Weechat;\n\n /// use weechat::config::StringOptionSettings;\n\n ///\n\n /// let settings = StringOptionSettings::new(\"address\")\n\n /// .set_change_callback(|weechat, option| {\n\n /// Weechat::print(\"Option changed\");\n\n /// });\n\n /// ```\n\n pub fn set_change_callback(\n\n mut self,\n\n callback: impl FnMut(&Weechat, &StringOption) + 'static,\n\n ) -> Self {\n\n self.change_cb = Some(Box::new(callback));\n\n self\n\n }\n\n\n\n /// Set a callback to check the validity of the string option.\n\n ///\n\n /// # Arguments\n\n ///\n", "file_path": "weechat/src/config/string.rs", "rank": 75, "score": 41385.57721937436 }, { "content": " ///\n\n /// * `name` - The name of the new option.\n\n pub fn new<N: Into<String>>(name: N) -> Self {\n\n StringOptionSettings {\n\n name: name.into(),\n\n ..Default::default()\n\n }\n\n }\n\n\n\n /// Set the description of the option.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `description` - The description of the new option.\n\n pub fn description<D: Into<String>>(mut self, descritpion: D) -> Self {\n\n self.description = descritpion.into();\n\n self\n\n }\n\n\n\n /// Set the default value of the option.\n", "file_path": "weechat/src/config/string.rs", "rank": 76, "score": 41383.669882172355 }, { "content": " /// * `callback` - The callback that will be run.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// use weechat::config::StringOptionSettings;\n\n ///\n\n /// let settings = StringOptionSettings::new(\"address\")\n\n /// .set_check_callback(|weechat, option, value| {\n\n /// value.starts_with(\"http\")\n\n /// });\n\n /// ```\n\n pub fn set_check_callback(\n\n mut self,\n\n callback: impl FnMut(&Weechat, &StringOption, Cow<str>) -> bool + 'static,\n\n ) -> Self {\n\n self.check_cb = Some(Box::new(callback));\n\n self\n\n }\n\n}\n\n\n", "file_path": "weechat/src/config/string.rs", "rank": 77, "score": 41381.32701316005 }, { "content": "/// A config option with a string value.\n\npub struct StringOption<'a> {\n\n pub(crate) ptr: *mut t_config_option,\n\n pub(crate) weechat_ptr: *mut t_weechat_plugin,\n\n pub(crate) _phantom: PhantomData<&'a ConfigSection>,\n\n}\n\n\n\nimpl<'a> StringOption<'a> {\n\n /// Get the value of the option.\n\n pub fn value(&self) -> Cow<str> {\n\n let weechat = self.get_weechat();\n\n let config_string = weechat.get().config_string.unwrap();\n\n unsafe {\n\n let string = config_string(self.get_ptr());\n\n CStr::from_ptr(string).to_string_lossy()\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> FromPtrs for StringOption<'a> {\n", "file_path": "weechat/src/config/string.rs", "rank": 78, "score": 41380.62033595494 }, { "content": " fn from_ptrs(option_ptr: *mut t_config_option, weechat_ptr: *mut t_weechat_plugin) -> Self {\n\n StringOption {\n\n ptr: option_ptr,\n\n weechat_ptr,\n\n _phantom: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> HidenConfigOptionT for StringOption<'a> {\n\n fn get_ptr(&self) -> *mut t_config_option {\n\n self.ptr\n\n }\n\n\n\n fn get_weechat(&self) -> Weechat {\n\n Weechat::from_ptr(self.weechat_ptr)\n\n }\n\n}\n\n\n\nimpl<'a> BaseConfigOption for StringOption<'a> {}\n\nimpl<'a> ConfigOptions for StringOption<'_> {}\n", "file_path": "weechat/src/config/string.rs", "rank": 79, "score": 41380.21636544021 }, { "content": " ///\n\n /// This is the value the option will have if it isn't set by the user. If\n\n /// the option is reset, the option will take this value.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `value` - The value that should act as the default value.\n\n pub fn default_value<V: Into<String>>(mut self, value: V) -> Self {\n\n self.default_value = value.into();\n\n self\n\n }\n\n\n\n /// Set the callback that will run when the value of the option changes.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `callback` - The callback that will be run.\n\n ///\n\n /// # Examples\n\n /// ```\n", "file_path": "weechat/src/config/string.rs", "rank": 80, "score": 41370.62357751534 }, { "content": "use crate::{\n\n config::{\n\n config_options::{ConfigOptions, FromPtrs, HidenConfigOptionT},\n\n BaseConfigOption, ConfigSection,\n\n },\n\n Weechat,\n\n};\n\nuse std::{borrow::Cow, ffi::CStr, marker::PhantomData};\n\nuse weechat_sys::{t_config_option, t_weechat_plugin};\n\n\n", "file_path": "weechat/src/config/string.rs", "rank": 81, "score": 41368.4647912644 }, { "content": "//! Weechat Hook module.\n\n//!\n\n//! Weechat hooks are used for many different things, to create commands, to\n\n//! listen to events on a file descriptor, add completions to Weechat, etc.\n\n\n\nmod signal;\n\n\n\nmod bar;\n\nmod commands;\n\nmod completion;\n\nmod fd;\n\n#[cfg(feature = \"unsound\")]\n\nmod modifier;\n\nmod timer;\n\n\n\npub use bar::{BarItem, BarItemCallback};\n\npub use commands::{Command, CommandCallback, CommandRun, CommandRunCallback, CommandSettings};\n\npub use completion::{Completion, CompletionCallback, CompletionHook, CompletionPosition};\n\n\n\npub use fd::{FdHook, FdHookCallback, FdHookMode};\n", "file_path": "weechat/src/hooks/mod.rs", "rank": 82, "score": 40821.14331956387 }, { "content": "#[cfg(feature = \"unsound\")]\n\npub use modifier::{ModifierCallback, ModifierData, ModifierHook};\n\npub use signal::{SignalCallback, SignalData, SignalHook};\n\npub use timer::{RemainingCalls, TimerCallback, TimerHook};\n\n\n\nuse crate::Weechat;\n\nuse weechat_sys::{t_hook, t_weechat_plugin};\n\n\n\n/// Weechat Hook type. The hook is unhooked automatically when the object is\n\n/// dropped.\n\npub(crate) struct Hook {\n\n pub(crate) ptr: *mut t_hook,\n\n pub(crate) weechat_ptr: *mut t_weechat_plugin,\n\n}\n\n\n\nimpl Drop for Hook {\n\n fn drop(&mut self) {\n\n let weechat = Weechat::from_ptr(self.weechat_ptr);\n\n let unhook = weechat.get().unhook.unwrap();\n\n unsafe { unhook(self.ptr) };\n\n }\n\n}\n", "file_path": "weechat/src/hooks/mod.rs", "rank": 83, "score": 40819.97124338486 }, { "content": "use libc::c_char;\n\nuse std::{borrow::Cow, ffi::CStr, os::raw::c_void, ptr};\n\n\n\nuse weechat_sys::{t_gui_buffer, t_weechat_plugin};\n\n\n\nuse super::Hook;\n\nuse crate::{buffer::Buffer, LossyCString, Weechat};\n\n\n\n/// Hook for a modifier, the hook is removed when the object is dropped.\n\n#[cfg_attr(feature = \"docs\", doc(cfg(unsound)))]\n\npub struct ModifierHook {\n\n _hook: Hook,\n\n _hook_data: Box<ModifierHookData>,\n\n}\n\n\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 84, "score": 40819.46734827294 }, { "content": "use std::{\n\n borrow::Cow,\n\n cell::Cell,\n\n ffi::CStr,\n\n os::raw::{c_char, c_int, c_void},\n\n ptr,\n\n rc::Rc,\n\n};\n\n\n\nuse weechat_sys::{t_gui_buffer, t_weechat_plugin};\n\n\n\nuse super::Hook;\n\nuse crate::{\n\n buffer::{Buffer, InnerBuffer, InnerBuffers},\n\n LossyCString, ReturnCode, Weechat,\n\n};\n\n\n\n/// Hook for a signal, the hook is removed when the object is dropped.\n\npub struct SignalHook {\n\n _hook: Hook,\n\n _hook_data: Box<SignalHookData>,\n\n}\n\n\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 85, "score": 40819.448979783156 }, { "content": " string: Cow<str>,\n\n ) -> Option<String> {\n\n self(weechat, modifier_name, data, string)\n\n }\n\n}\n\n\n\nimpl ModifierHook {\n\n /// Hook a modifier.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `modifier_name` - The modifier to hook.\n\n ///\n\n /// * `callback` - A function or a struct that implements ModifierCallback,\n\n /// the callback method of the trait will be called when the modifier is\n\n /// fired.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if the method is not called from the main Weechat thread.\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 86, "score": 40819.10216034204 }, { "content": " /// ```\n\n #[cfg_attr(feature = \"docs\", doc(cfg(unsound)))]\n\n pub fn new(modifier_name: &str, callback: impl ModifierCallback + 'static) -> Result<Self, ()> {\n\n unsafe extern \"C\" fn c_hook_cb(\n\n pointer: *const c_void,\n\n _data: *mut c_void,\n\n modifier_name: *const c_char,\n\n modifier_data: *const c_char,\n\n string: *const c_char,\n\n ) -> *mut c_char {\n\n let hook_data: &mut ModifierHookData = { &mut *(pointer as *mut ModifierHookData) };\n\n let cb = &mut hook_data.callback;\n\n\n\n let modifier_name = CStr::from_ptr(modifier_name).to_str().unwrap_or_default();\n\n\n\n let string = if string.is_null() {\n\n Cow::from(\"\")\n\n } else {\n\n CStr::from_ptr(string).to_string_lossy()\n\n };\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 87, "score": 40817.76152077966 }, { "content": " ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use std::borrow::Cow;\n\n /// # use weechat::{Weechat, ReturnCode};\n\n /// # use weechat::hooks::{ModifierData, ModifierHook};\n\n ///\n\n /// let modifier_hook = ModifierHook::new(\n\n /// \"input_text_display_with_cursor\",\n\n /// |_weechat: &Weechat,\n\n /// _modifier_name: &str,\n\n /// data: Option<ModifierData>,\n\n /// string: Cow<str>| {\n\n /// if let ModifierData::Buffer(buffer) = data? {\n\n /// buffer.print(\"Modifying the input buffer\")\n\n /// }\n\n ///\n\n /// None\n\n /// });\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 88, "score": 40817.42935724032 }, { "content": "\n\nimpl<T: FnMut(&Weechat, &str, Option<ModifierData>, Cow<str>) -> Option<String> + 'static>\n\n ModifierCallback for T\n\n{\n\n /// Callback that will be called when a modifier is fired.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `weechat` - A Weechat context.\n\n ///\n\n /// * `modifier_name` - The name of the modifier that fired the callback.\n\n ///\n\n /// * `data` - The data that was passed on by the modifier.\n\n ///\n\n /// * `string` - The string that should be modified.\n\n fn callback(\n\n &mut self,\n\n weechat: &Weechat,\n\n modifier_name: &str,\n\n data: Option<ModifierData>,\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 89, "score": 40817.089873854755 }, { "content": "//! Bar items are used to display status information in Weechat.\n\nuse core::ptr;\n\nuse libc::c_char;\n\nuse std::os::raw::c_void;\n\nuse weechat_sys::{t_gui_bar_item, t_gui_buffer, t_gui_window, t_hashtable, t_weechat_plugin};\n\n\n\nuse crate::{buffer::Buffer, LossyCString, Weechat};\n\n\n\n/// Trait for the bar item callback\n\n///\n\n/// A blanket implementation for pure `FnMut` functions exists, if data needs to\n\n/// be passed to the callback implement this over your struct.\n", "file_path": "weechat/src/hooks/bar.rs", "rank": 90, "score": 40816.96714795648 }, { "content": " /// be a string, an i32 number, or a buffer.\n\n ///\n\n /// ```no_run\n\n /// # use weechat::Weechat;\n\n /// # use weechat::buffer::BufferBuilder;\n\n /// # let buffer_handle = BufferBuilder::new(\"test\")\n\n /// # .build()\n\n /// # .unwrap();\n\n /// # let buffer = buffer_handle.upgrade().unwrap();\n\n /// // Fetch the chat history for the buffer.\n\n /// Weechat::hook_signal_send(\"logger_backlog\", &buffer);\n\n ///\n\n /// // Signal that the input text changed.\n\n /// Weechat::hook_signal_send(\"input_text_changed\", \"\");\n\n /// ```\n\n ///\n\n /// [reference]: https://weechat.org/files/doc/stable/weechat_plugin_api.en.html#_hook_signal_send\n\n pub fn hook_signal_send<'a, D: Into<SignalData<'a>>>(signal_name: &str, data: D) -> ReturnCode {\n\n Weechat::check_thread();\n\n let weechat = unsafe { Weechat::weechat() };\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 91, "score": 40816.66549554782 }, { "content": "}\n\n\n\nimpl BarItem {\n\n /// Create a new bar item that can be added by a user.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `name` - The name of the new bar item.\n\n ///\n\n /// * `callback` - The callback that should be called after the bar items\n\n /// is marked to be updated.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if the method is not called from the main Weechat thread.\n\n ///\n\n /// # Example\n\n /// ```no_run\n\n /// # use weechat::Weechat;\n\n /// # use weechat::buffer::Buffer;\n", "file_path": "weechat/src/hooks/bar.rs", "rank": 92, "score": 40816.21618723872 }, { "content": "\n\n let signal_name = LossyCString::new(signal_name);\n\n let signal_send = weechat.get().hook_signal_send.unwrap();\n\n let data = data.into();\n\n\n\n let ret = if let SignalData::String(string) = data {\n\n let string = LossyCString::new(string);\n\n unsafe {\n\n signal_send(\n\n signal_name.as_ptr(),\n\n weechat_sys::WEECHAT_HOOK_SIGNAL_STRING as *const _ as *const c_char,\n\n string.as_ptr() as *mut _,\n\n )\n\n }\n\n } else {\n\n let (ptr, data_type) = match data {\n\n SignalData::Integer(number) => (\n\n number as *mut _,\n\n weechat_sys::WEECHAT_HOOK_SIGNAL_INT as *const u8,\n\n ),\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 93, "score": 40815.78064813689 }, { "content": " fn callback(\n\n &mut self,\n\n weechat: &Weechat,\n\n signal_name: &str,\n\n data: Option<SignalData>,\n\n ) -> ReturnCode {\n\n self(weechat, signal_name, data)\n\n }\n\n}\n\n\n\nimpl SignalHook {\n\n /// Hook a signal.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `signal_name` - The signal to hook (wildcard `*` is allowed).\n\n ///\n\n /// * `callback` - A function or a struct that implements SignalCallback,\n\n /// the callback method of the trait will be called when the signal is\n\n /// fired.\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 94, "score": 40815.75666509227 }, { "content": "\n\n let data = Box::new(ModifierHookData {\n\n callback: Box::new(callback),\n\n weechat_ptr: weechat.ptr,\n\n });\n\n\n\n let data_ref = Box::leak(data);\n\n let hook_modifier = weechat.get().hook_modifier.unwrap();\n\n\n\n let modifier_name = LossyCString::new(modifier_name);\n\n\n\n let hook_ptr = unsafe {\n\n hook_modifier(\n\n weechat.ptr,\n\n modifier_name.as_ptr(),\n\n Some(c_hook_cb),\n\n data_ref as *const _ as *const c_void,\n\n ptr::null_mut(),\n\n )\n\n };\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 95, "score": 40815.74033763069 }, { "content": "\n\n let weechat = Weechat::from_ptr(hook_data.weechat_ptr);\n\n\n\n let data = ModifierData::from_name(&weechat, modifier_name, modifier_data);\n\n\n\n let modified_string = cb.callback(&weechat, modifier_name, data, string);\n\n\n\n if let Some(modified_string) = modified_string {\n\n let string_length = modified_string.len();\n\n let modified_string = LossyCString::new(modified_string);\n\n\n\n let strndup = weechat.get().strndup.unwrap();\n\n strndup(modified_string.as_ptr(), string_length as i32)\n\n } else {\n\n ptr::null_mut()\n\n }\n\n }\n\n\n\n Weechat::check_thread();\n\n let weechat = unsafe { Weechat::weechat() };\n", "file_path": "weechat/src/hooks/modifier.rs", "rank": 96, "score": 40815.63061041304 }, { "content": "impl<'a> Into<SignalData<'a>> for &'a Buffer<'a> {\n\n fn into(self) -> SignalData<'a> {\n\n let ptr = self.ptr();\n\n\n\n SignalData::Buffer(Buffer {\n\n inner: InnerBuffers::BorrowedBuffer(InnerBuffer {\n\n ptr,\n\n weechat: self.inner.weechat(),\n\n closing: Rc::new(Cell::new(false)),\n\n }),\n\n })\n\n }\n\n}\n\n\n\nimpl<'a> SignalData<'a> {\n\n fn pointer_is_buffer(signal_name: &str) -> bool {\n\n // This table is taken from the Weechat plugin API docs\n\n //\n\n // https://weechat.org/files/doc/stable/weechat_plugin_api.en.html#_hook_signal\n\n #[allow(clippy::match_like_matches_macro)]\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 97, "score": 40815.45251785575 }, { "content": " ///\n\n /// # Panics\n\n ///\n\n /// Panics if the method is not called from the main Weechat thread.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use weechat::{Weechat, ReturnCode};\n\n /// # use weechat::hooks::{SignalData, SignalHook};\n\n /// let signal_hook = SignalHook::new(\n\n /// \"buffer_switch\",\n\n /// |_weechat: &Weechat, _signal_name: &str, data: Option<SignalData>| {\n\n /// if let Some(data) = data {\n\n /// match data {\n\n /// SignalData::Buffer(buffer) => {\n\n /// buffer.print(\"Switched buffer\")\n\n /// }\n\n /// _ => (),\n\n /// }\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 98, "score": 40815.24153150707 }, { "content": "\n\n let weechat = Weechat::from_ptr(hook_data.weechat_ptr);\n\n let data =\n\n SignalData::from_type_and_name(&weechat, signal_name, data_type, signal_data);\n\n\n\n cb.callback(&weechat, signal_name, data) as i32\n\n }\n\n\n\n Weechat::check_thread();\n\n let weechat = unsafe { Weechat::weechat() };\n\n\n\n let data = Box::new(SignalHookData {\n\n callback: Box::new(callback),\n\n weechat_ptr: weechat.ptr,\n\n });\n\n\n\n let data_ref = Box::leak(data);\n\n let hook_signal = weechat.get().hook_signal.unwrap();\n\n\n\n let signal_name = LossyCString::new(signal_name);\n", "file_path": "weechat/src/hooks/signal.rs", "rank": 99, "score": 40815.11691579899 } ]
Rust
src/cli.rs
benmaddison/rfz
d4c24a28c2178db24858943a4a29a3cbf218b0c8
use std::convert::TryInto; use std::ffi::{OsStr, OsString}; use std::io::stdout; use std::path::PathBuf; use std::result; use std::str::FromStr; use clap::{crate_authors, crate_description, crate_name, crate_version}; use directories::ProjectDirs; use crate::cmd::{ArgProvider, CmdExec}; use crate::errors::{Error, Result}; pub trait DefaultsProvider { fn dir(&self) -> &OsStr; fn jobs(&self) -> &str; } pub struct Defaults { dir: OsString, jobs: String, } impl Defaults { pub fn get() -> Result<Self> { let dir = match ProjectDirs::from("", "", "rfz") { Some(dirs) => dirs.data_dir().as_os_str().to_owned(), None => { return Err(Error::UserDirectories( "Failed to infer user directory locations".to_string(), )) } }; let jobs = num_cpus::get().to_string(); Ok(Defaults { dir, jobs }) } } impl DefaultsProvider for Defaults { fn dir(&self) -> &OsStr { &self.dir } fn jobs(&self) -> &str { &self.jobs } } pub struct Cli<'a> { defaults: &'a dyn DefaultsProvider, args: clap::ArgMatches<'a>, } impl<'a> Cli<'a> { pub fn init(defaults: &'a dyn DefaultsProvider) -> Self { match Self::init_from(defaults, None) { Ok(cli) => cli, Err(e) => e.exit(), } } fn init_from( defaults: &'a dyn DefaultsProvider, argv: Option<Vec<&str>>, ) -> result::Result<Self, clap::Error> { let app = Cli::build_cli(defaults); let args = match argv { Some(argv) => app.get_matches_from_safe(argv), None => app.get_matches_safe(), }; Ok(Cli { defaults, args: args?, }) } fn build_cli(defaults: &'a dyn DefaultsProvider) -> clap::App { clap::app_from_crate!() .setting(clap::AppSettings::SubcommandRequired) .arg( clap::Arg::with_name("jobs") .short("j") .long("jobs") .takes_value(true) .global(true) .default_value(defaults.jobs()) .help("Number of concurrent jobs to run"), ) .arg( clap::Arg::with_name("dir") .short("d") .long("dir") .takes_value(true) .global(true) .default_value_os(defaults.dir()) .help("Directory containing IETF html docs"), ) .arg( clap::Arg::with_name("verbosity") .short("v") .multiple(true) .global(true) .help("Increase output verbosity"), ) .subcommand( clap::SubCommand::with_name("completions") .about("Print shell completion script") .arg( clap::Arg::with_name("shell") .required(true) .possible_values(&clap::Shell::variants()) .help("Shell for which to generate completion script"), ), ) .subcommand( clap::SubCommand::with_name("index") .about( "List the latest version of each document \ with associated metadata", ) .arg( clap::Arg::with_name("type") .short("t") .long("type") .takes_value(true) .multiple(true) .possible_values(&["draft", "rfc", "bcp", "std"]) .help("Limit output by document type"), ), ) .subcommand( clap::SubCommand::with_name("summary") .about("Print a summary of the metadata in <doc>") .arg( clap::Arg::with_name("doc") .required(true) .help("Path to the document"), ), ) .subcommand( clap::SubCommand::with_name("sync") .about("Syncronize the local document mirror") .arg( clap::Arg::with_name("remote") .short("r") .long("remote") .default_value("rsync.tools.ietf.org::tools.html") .help("Remote 'rsync' target to sync from"), ) .arg( clap::Arg::with_name("command") .long("command") .default_value("rsync") .help("Rsync command"), ), ) } pub fn run(&self) -> Result<()> { match self.args.subcommand() { ("completions", Some(sub_matches)) => { self.print_completions(sub_matches); Ok(()) } (subcommand, Some(sub_matches)) => { let args = CliArgs::from(sub_matches); let exec = CmdExec::init(subcommand, &args)?; exec.run() } _ => Err(Error::CliError("No sub-command was found".to_string())), } } fn print_completions(&self, sub_matches: &clap::ArgMatches) { let shell = clap::Shell::from_str(sub_matches.value_of("shell").unwrap()).unwrap(); let mut app = Cli::build_cli(self.defaults); let _stdout = stdout(); #[cfg(not(test))] let mut writer = _stdout.lock(); #[cfg(test)] let mut writer = std::io::sink(); app.gen_completions_to(crate_name!(), shell, &mut writer); } } struct CliArgs<'a>(&'a clap::ArgMatches<'a>); impl<'a> CliArgs<'a> { fn from(sub_matches: &'a clap::ArgMatches<'a>) -> Self { CliArgs(sub_matches) } } impl ArgProvider for CliArgs<'_> { fn jobs(&self) -> usize { usize::from_str(self.0.value_of("jobs").unwrap()).unwrap() } fn dir(&self) -> PathBuf { PathBuf::from(self.0.value_of("dir").unwrap()) } fn verbosity(&self) -> usize { match self.0.occurrences_of("verbosity").try_into() { Ok(n) => n, Err(_) => usize::MAX, } } fn path(&self) -> PathBuf { PathBuf::from(self.0.value_of("doc").unwrap()) } fn rsync_cmd(&self) -> &str { self.0.value_of("command").unwrap() } fn rsync_remote(&self) -> &str { self.0.value_of("remote").unwrap() } fn types(&self) -> Option<Vec<&str>> { match self.0.values_of("type") { Some(values) => Some(values.collect()), None => None, } } } #[cfg(test)] mod test { use super::*; use crate::test::resource_path; use std::str::FromStr; struct DummyDefaults; impl DefaultsProvider for DummyDefaults { fn jobs(&self) -> &str { "1" } fn dir(&self) -> &OsStr { OsStr::new("/home/foo/rfz") } } #[test] fn test_cli_defaults() -> Result<()> { let defaults = Defaults::get()?; assert!(usize::from_str(defaults.jobs()).unwrap() > 0); Ok(()) } #[test] fn test_empty_args() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz"]); match Cli::init_from(&defaults, argv) { Err(e) => assert_eq!(e.kind, clap::ErrorKind::MissingSubcommand), Ok(_) => panic!("Expected MissingSubcommand Error"), } } #[test] fn test_dummy_index() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "index"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "index"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.jobs(), 1); assert_eq!(cli_args.dir(), PathBuf::from("/home/foo/rfz")); assert_eq!(cli_args.types(), None); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_index_filtered() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "index", "--type", "rfc"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "index"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.jobs(), 1); assert_eq!(cli_args.dir(), PathBuf::from("/home/foo/rfz")); assert_eq!(cli_args.types(), Some(vec!["rfc"])); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_summary() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "summary", "/home/foo/rfz/bar.html"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "summary"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.path(), PathBuf::from("/home/foo/rfz/bar.html")); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_sync() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "sync", "-v"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "sync"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.rsync_cmd(), "rsync"); assert_eq!(cli_args.rsync_remote(), "rsync.tools.ietf.org::tools.html"); assert_eq!(cli_args.verbosity(), 1) } _ => panic!("Cli parsing failed"), } } #[test] fn test_exec_index() -> Result<()> { let defaults = Defaults::get()?; let dir = resource_path(""); let argv = Some(vec!["rfz", "index", "-d", dir.to_str().unwrap()]); let cli = Cli::init_from(&defaults, argv).unwrap(); cli.run() } #[test] fn test_exec_completions() -> Result<()> { let defaults = Defaults::get()?; let argv = Some(vec!["rfz", "completions", "bash"]); let cli = Cli::init_from(&defaults, argv).unwrap(); cli.run() } #[test] fn test_exec_unknown_shell() -> Result<()> { let defaults = Defaults::get()?; let argv = Some(vec!["rfz", "completions", "crash"]); match Cli::init_from(&defaults, argv) { Err(e) => assert_eq!(e.kind, clap::ErrorKind::InvalidValue), Ok(_) => panic!("Expected InvalidValue Error"), }; Ok(()) } }
use std::convert::TryInto; use std::ffi::{OsStr, OsString}; use std::io::stdout; use std::path::PathBuf; use std::result; use std::str::FromStr; use clap::{crate_authors, crate_description, crate_name, crate_version}; use directories::ProjectDirs; use crate::cmd::{ArgProvider, CmdExec}; use crate::errors::{Error, Result}; pub trait DefaultsProvider { fn dir(&self) -> &OsStr; fn jobs(&self) -> &str; } pub struct Defaults { dir: OsString, jobs: String, } impl Defaults { pub fn get() -> Result<Self> { let dir = match ProjectDirs::from("", "", "rfz") { Some(dirs) => dirs.data_dir().as_os_str().to_owned(), None => { return Err(Error::UserDirectories( "Failed to infer user directory locations".to_string(), )) } }; let jobs = num_cpus::get().to_string(); Ok(Defaults { dir, jobs }) } } impl DefaultsProvider for Defaults { fn dir(&self) -> &OsStr { &self.dir } fn jobs(&self) -> &str { &self.jobs } } pub struct Cli<'a> { defaults: &'a dyn DefaultsProvider, args: clap::ArgMatches<'a>, } impl<'a> Cli<'a> { pub fn init(defaults: &'a dyn DefaultsProvider) -> Self { match Self::init_from(defaults, None) { Ok(cli) => cli, Err(e) => e.exit(), } } fn init_from( defaults: &'a dyn DefaultsProvider, argv: Option<Vec<&str>>, ) -> result::Result<Self, clap::Error> { let app = Cli::build_cli(defaults); let args = match argv { Some(argv) => app.get_matches_from_safe(argv), None => app.get_matches_safe(), }; Ok(Cli { defaults, args: args?, }) } fn build_cli(defaults: &'a dyn DefaultsProvider) -> clap::App { clap::app_from_crate!() .setting(clap::AppSettings::SubcommandRequired) .arg( clap::Arg::with_name("jobs") .short("j") .long("jobs") .takes_value(true) .global(true) .default_value(defaults.jobs()) .help("Number of concurrent jobs to run"), ) .arg( clap::Arg::with_name("dir") .short("d") .long("dir") .takes_value(true) .global(true) .default_value_os(defaults.dir()) .help("Directory containing IETF html docs"), ) .arg( clap::Arg::with_name("verbosity") .short("v") .multiple(true) .global(true) .help("Increase output verbosity"), ) .subcommand( clap::SubCommand::with_name("completions") .about("Print shell completion script") .arg( clap::Arg::with_name("shell") .required(true) .possible_values(&clap::Shell::variants()) .help("Shell for which to generate completion script"), ), ) .subcommand( clap::SubCommand::with_name("index") .about( "List the latest version of each document \ with associated metadata", ) .arg( clap::Arg::with_name("type") .short("t") .long("type") .takes_value(true) .multiple(true) .possible_values(&["draft", "rfc", "bcp", "std"]) .help("Limit output by document type"), ), ) .subcommand( clap::SubCommand::with_name("summary") .about("Print a summary of the metadata in <doc>") .arg( clap::Arg::with_name("doc") .required(true) .help("Path to the document"), ), ) .subcommand( clap::SubCommand::with_name("sync") .about("Syncronize the local document mirror") .arg( clap::Arg::with_name("remote") .short("r") .long("remote") .default_value("rsync.tools.ietf.org::tools.html") .help("Remote 'rsync' target to sync from"), ) .arg( clap::Arg::with_name("command") .long("command") .default_value("rsync") .help("Rsync command"), ), ) } pub fn run(&self) -> Result<()> { match self.args.subcommand() { ("completions", Some(sub_matches)) => { self.print_completions(sub_matches); Ok(()) } (subcommand, Some(sub_matches)) => { let args = CliArgs::from(sub_matches); let exec = CmdExec::init(subcommand, &args)?; exec.run() } _ => Err(Error::CliError("No sub-command was found".to_string())), } } fn print_completions(&self, sub_matches: &clap::ArgMatches) { let shell = clap::Shell::from_str(sub_matches.value_of("shell").unwrap()).unwrap(); let mut app = Cli::build_cli(self.defaults); let _stdout = stdout(); #[cfg(not(test))] let mut writer = _stdout.lock(); #[cfg(test)] let mut writer = std::io::sink(); app.gen_completions_to(crate_name!(), shell, &mut writer); } } struct CliArgs<'a>(&'a clap::ArgMatches<'a>); impl<'a> CliArgs<'a> { fn from(sub_matches: &'a clap::ArgMatches<'a>) -> Self { CliArgs(sub_matches) } } impl ArgProvider for CliArgs<'_> { fn jobs(&self) -> usize { usize::from_str(self.0.value_of("jobs").unwrap()).unwrap() } fn dir(&self) -> PathBuf { PathBuf::from(self.0.value_of("dir").unwrap()) } fn verbosity(&self) -> usize { match self.0.occurrences_of("verbosity").try_into() { Ok(n) => n, Err(_) => usize::MAX, } } fn path(&self) -> PathBuf { PathBuf::from(self.0.value_of("doc").unwrap()) } fn rsync_cmd(&self) -> &str { self.0.value_of("command").unwrap() } fn rsync_remote(&self) -> &str { self.0.value_of("remote").unwrap() } fn types(&self) -> Option<Vec<&str>> { match self.0.values_of("type") { Some(values) => Some(values.collect()), None => None, } } } #[cfg(test)] mod test { use super::*; use crate::test::resource_path; use std::str::FromStr; struct DummyDefaults; impl DefaultsProvider for DummyDefaults { fn jobs(&self) -> &str { "1" } fn dir(&self) -> &OsStr { OsStr::new("/home/foo/rfz") } } #[test] fn test_cli_defaults() -> Result<()> { let defaults = Defaults::get()?; assert!(usize::from_str(defaults.jobs()).unwrap() > 0); Ok(()) } #[test] fn test_empty_args() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz"]); match Cli::init_from(&defaults, argv) { Err(e) => assert_eq!(e.kind, clap::ErrorKind::MissingSubcommand), Ok(_) => panic!("Expected MissingSubcommand Error"), } } #[test] fn test_dummy_index() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "index"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "index"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.jobs(), 1); assert_eq!(cli_args.dir(), PathBuf::from("/home/foo/rfz")); assert_eq!(cli_args.types(), None); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_index_filtered() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "index", "--type", "rfc"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "index"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.jobs(), 1); assert_eq!(cli_args.dir(), PathBuf::from("/home/foo/rfz")); assert_eq!(cli_args.types(), Some(vec!["rfc"])); } _ => panic!("Cli parsing failed"), } } #[test] fn test_dummy_summary() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "summary", "/home/foo/rfz/bar.html"]); let cli = Cli::init_from(&defaults, argv).unwrap();
} #[test] fn test_dummy_sync() { let defaults = DummyDefaults {}; let argv = Some(vec!["rfz", "sync", "-v"]); let cli = Cli::init_from(&defaults, argv).unwrap(); match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "sync"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.rsync_cmd(), "rsync"); assert_eq!(cli_args.rsync_remote(), "rsync.tools.ietf.org::tools.html"); assert_eq!(cli_args.verbosity(), 1) } _ => panic!("Cli parsing failed"), } } #[test] fn test_exec_index() -> Result<()> { let defaults = Defaults::get()?; let dir = resource_path(""); let argv = Some(vec!["rfz", "index", "-d", dir.to_str().unwrap()]); let cli = Cli::init_from(&defaults, argv).unwrap(); cli.run() } #[test] fn test_exec_completions() -> Result<()> { let defaults = Defaults::get()?; let argv = Some(vec!["rfz", "completions", "bash"]); let cli = Cli::init_from(&defaults, argv).unwrap(); cli.run() } #[test] fn test_exec_unknown_shell() -> Result<()> { let defaults = Defaults::get()?; let argv = Some(vec!["rfz", "completions", "crash"]); match Cli::init_from(&defaults, argv) { Err(e) => assert_eq!(e.kind, clap::ErrorKind::InvalidValue), Ok(_) => panic!("Expected InvalidValue Error"), }; Ok(()) } }
match cli.args.subcommand() { (subcommand, Some(args)) => { assert_eq!(subcommand, "summary"); let cli_args = CliArgs::from(args); assert_eq!(cli_args.path(), PathBuf::from("/home/foo/rfz/bar.html")); } _ => panic!("Cli parsing failed"), }
if_condition
[ { "content": "fn index(args: &dyn ArgProvider) -> Result<()> {\n\n let collection = match Collection::from_dir(args.dir()) {\n\n Ok(set) => set,\n\n Err(e) => return Err(e),\n\n };\n\n let _stdout = stdout();\n\n #[cfg(not(test))]\n\n let mut writer = _stdout.lock();\n\n #[cfg(test)]\n\n let mut writer = std::io::sink();\n\n for result in collection\n\n .filter_types(args.types())\n\n .newest(1)\n\n .with_threads(args.jobs())\n\n .map(|doc| doc.fmt_line())\n\n {\n\n match result {\n\n Ok(line) => {\n\n if writeln!(writer, \"{}\", line).is_err() {\n\n return Ok(());\n\n }\n\n }\n\n Err(e) => eprintln!(\"{:?}\", e),\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 0, "score": 126843.1061820974 }, { "content": "fn sync(args: &dyn ArgProvider) -> Result<()> {\n\n let mut proc = Command::new(args.rsync_cmd());\n\n if args.verbosity() > 0 {\n\n proc.arg(format!(\"-{}\", \"v\".repeat(args.verbosity())));\n\n }\n\n proc.arg(\"--archive\")\n\n .arg(\"--compress\")\n\n .arg(\"--include=*.html\")\n\n .arg(\"--exclude=**\")\n\n .arg(\"--prune-empty-dirs\")\n\n .arg(args.rsync_remote())\n\n .arg(args.dir());\n\n let status = proc.status();\n\n match status {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(Error::SyncError(e)),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/cmd.rs", "rank": 1, "score": 126843.1061820974 }, { "content": "type Cmd = fn(&dyn ArgProvider) -> Result<()>;\n\n\n\npub struct CmdExec<'a> {\n\n func: Cmd,\n\n args: &'a dyn ArgProvider,\n\n}\n\n\n\nimpl<'a> CmdExec<'a> {\n\n pub fn init(command: &str, args: &'a dyn ArgProvider) -> Result<Self> {\n\n let func = match command {\n\n \"index\" => index,\n\n \"summary\" => summary,\n\n \"sync\" => sync,\n\n _ => {\n\n return Err(Error::ImplementationNotFound(format!(\n\n \"Failed to find an implementation for sub-command '{}'\",\n\n command\n\n )))\n\n }\n\n };\n\n Ok(CmdExec { func, args })\n\n }\n\n\n\n pub fn run(&self) -> Result<()> {\n\n (self.func)(self.args)\n\n }\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 2, "score": 119819.58818057156 }, { "content": "fn summary(args: &dyn ArgProvider) -> Result<()> {\n\n match Document::from_path(args.path()) {\n\n Some(result) => match result {\n\n Ok(doc) => println!(\"{}\", doc.fmt_summary()?),\n\n Err(e) => return Err(e),\n\n },\n\n None => {\n\n return Err(Error::DocumentNotFound(format!(\n\n \"Failed to create a valid document from path '{:?}'\",\n\n args.path()\n\n )))\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 3, "score": 116091.64970861317 }, { "content": "pub fn resource_path(name: &str) -> PathBuf {\n\n let mut d = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n d.push(\"tests/resources\");\n\n d.push(name);\n\n d\n\n}\n", "file_path": "src/test.rs", "rank": 4, "score": 103137.89611825414 }, { "content": "pub trait ArgProvider {\n\n fn jobs(&self) -> usize;\n\n fn dir(&self) -> PathBuf;\n\n fn verbosity(&self) -> usize;\n\n fn path(&self) -> PathBuf;\n\n fn rsync_cmd(&self) -> &str;\n\n fn rsync_remote(&self) -> &str;\n\n fn types(&self) -> Option<Vec<&str>>;\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 7, "score": 83844.0001037048 }, { "content": "struct CollectionMap<'a>(HashMap<&'a String, BTreeMap<&'a i8, &'a Document>>);\n\n\n\nimpl CollectionMap<'_> {\n\n fn newest(self, count: u8) -> Collection {\n\n let mut collection = Vec::new();\n\n for versions in self.0.values() {\n\n for doc in versions.values().take(count.into()) {\n\n collection.push(doc.to_owned().to_owned());\n\n }\n\n }\n\n Collection(collection)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n use crate::test::resource_path;\n\n\n", "file_path": "src/collection.rs", "rank": 8, "score": 64868.31408664814 }, { "content": "fn main() -> Result<()> {\n\n let defaults = Defaults::get()?;\n\n let cli = Cli::init(&defaults);\n\n cli.run()\n\n}\n", "file_path": "src/main.rs", "rank": 9, "score": 62037.038018759675 }, { "content": "use std::convert::From;\n\nuse std::io;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n AttributeTypeMismatch(String),\n\n CliError(String),\n\n DirectoryReadError(io::Error),\n\n DocumentNotFound(String),\n\n DocumentParseError(io::Error),\n\n DuplicateAttribute(String),\n\n ImplementationNotFound(String),\n\n MetadataNotFound(String),\n\n MetadataRetrieval(String),\n\n SyncError(io::Error),\n\n UserDirectories(String),\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(err: io::Error) -> Self {\n", "file_path": "src/errors.rs", "rank": 10, "score": 23517.00573493517 }, { "content": " Error::DocumentParseError(err)\n\n }\n\n}\n\n\n\nimpl From<()> for Error {\n\n fn from(_: ()) -> Self {\n\n Error::MetadataNotFound(\"No <meta/> tags found in document <head/>\".to_string())\n\n }\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n", "file_path": "src/errors.rs", "rank": 11, "score": 23511.248992053206 }, { "content": "use std::path::PathBuf;\n\n\n", "file_path": "src/test.rs", "rank": 12, "score": 23495.86015314713 }, { "content": " -i8::from_str(split.get(1).unwrap_or(&\"\").to_owned()).unwrap_or(0),\n\n )\n\n }\n\n None => return None,\n\n };\n\n Some(Ok(Document {\n\n id,\n\n version,\n\n path,\n\n meta: AtomicLazyCell::new(),\n\n }))\n\n }\n\n\n\n pub fn ensure_meta(&self) -> Result<&Self> {\n\n if !self.meta.filled() {\n\n let html = kuchiki::parse_html().from_utf8().from_file(&self.path)?;\n\n let meta = Metadata::from_html(html)?;\n\n match self.meta.fill(meta) {\n\n Ok(()) => {}\n\n Err(val) => {\n", "file_path": "src/document.rs", "rank": 13, "score": 21874.10536618688 }, { "content": " version: i8,\n\n path: PathBuf,\n\n meta: AtomicLazyCell<Metadata>,\n\n}\n\n\n\nimpl Document {\n\n pub fn from_path(path: PathBuf) -> Option<Result<Document>> {\n\n let file_name = match path.file_name() {\n\n Some(name) => match name.to_str() {\n\n Some(name) => name,\n\n None => return None,\n\n },\n\n None => return None,\n\n };\n\n let (id, version) = match file_name.strip_suffix(\".html\") {\n\n Some(name) => {\n\n let mut split = name.rsplitn(2, '-').collect::<Vec<&str>>();\n\n split.reverse();\n\n (\n\n split.get(0).unwrap().to_string(),\n", "file_path": "src/document.rs", "rank": 14, "score": 21873.120488233293 }, { "content": " }\n\n output.push_str(&format!(\n\n \"{}\",\n\n Colour::White.italic().paint(self.meta()?.fmt_summary())\n\n ));\n\n Ok(output)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Metadata(HashMap<String, MetadataAttr>);\n\n\n\nimpl Metadata {\n\n fn from_html(html: kuchiki::NodeRef) -> Result<Metadata> {\n\n let mut meta = HashMap::new();\n\n for node in html.select(SELECTOR)? {\n\n let attrs = node.attributes.borrow();\n\n let key = match attrs.get(\"name\") {\n\n Some(key) if key.starts_with(PREFIX) => {\n\n key.strip_prefix(PREFIX).unwrap().to_string()\n", "file_path": "src/document.rs", "rank": 15, "score": 21872.2799967833 }, { "content": " return Err(Error::MetadataRetrieval(format!(\n\n \"Failed to save metadata for document {:?}: {:?}\",\n\n self, val\n\n )))\n\n }\n\n };\n\n }\n\n Ok(self)\n\n }\n\n\n\n pub fn id(&self) -> &String {\n\n &self.id\n\n }\n\n\n\n pub fn version(&self) -> &i8 {\n\n &self.version\n\n }\n\n\n\n pub fn path(&self) -> &PathBuf {\n\n &self.path\n", "file_path": "src/document.rs", "rank": 16, "score": 21869.782306680747 }, { "content": " }\n\n\n\n pub fn meta(&self) -> Result<&Metadata> {\n\n Ok(&self.ensure_meta()?.meta.borrow().unwrap())\n\n }\n\n\n\n pub fn fmt_line(&self) -> Result<String> {\n\n let mut output = format!(\"{} \", self.path().to_str().unwrap());\n\n if self.id.starts_with(\"draft\") {\n\n output.push_str(&format!(\n\n \"{} (version {}) \",\n\n Colour::Blue.paint(self.id()),\n\n -self.version()\n\n ));\n\n } else {\n\n output.push_str(&format!(\n\n \"{} \",\n\n Colour::Cyan.bold().paint(self.id().to_uppercase())\n\n ));\n\n }\n", "file_path": "src/document.rs", "rank": 17, "score": 21869.608435648133 }, { "content": " assert!(matches!(maybe_doc, None))\n\n }\n\n\n\n #[test]\n\n fn test_no_html_suffix() {\n\n let file = \"not-found.xhtml\";\n\n let path = resource_path(file);\n\n let maybe_doc = Document::from_path(path);\n\n assert!(matches!(maybe_doc, None))\n\n }\n\n\n\n #[test]\n\n fn test_not_found() {\n\n let file = \"not-found.html\";\n\n let path = resource_path(file);\n\n let maybe_doc = Document::from_path(path).unwrap().unwrap();\n\n assert!(matches!(\n\n maybe_doc.ensure_meta(),\n\n Err(Error::DocumentParseError(_))\n\n ))\n", "file_path": "src/document.rs", "rank": 18, "score": 21869.47273843315 }, { "content": "use std::collections::hash_map::Entry;\n\nuse std::collections::HashMap;\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\nuse ansi_term::Colour;\n\nuse kuchiki::traits::*;\n\nuse lazycell::AtomicLazyCell;\n\n\n\nuse crate::errors::{Error, Result};\n\n\n\nconst SELECTOR: &str = \"head>meta\";\n\n\n\nconst PREFIX: &str = \"DC.\";\n\n\n\nconst MULTIVALUED: &[&str] = &[\"Creator\", \"Relation.Replaces\"];\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Document {\n\n id: String,\n", "file_path": "src/document.rs", "rank": 19, "score": 21869.13275518569 }, { "content": " output.push_str(&format!(\n\n \"{}\",\n\n Colour::Black.italic().paint(self.meta()?.fmt_line())\n\n ));\n\n Ok(output)\n\n }\n\n\n\n pub fn fmt_summary(&self) -> Result<String> {\n\n let mut output = format!(\"{} \", self.path().to_str().unwrap());\n\n if self.id.starts_with(\"draft\") {\n\n output.push_str(&format!(\n\n \"{} (version {})\\n\\n\",\n\n Colour::Blue.paint(self.id()),\n\n -self.version()\n\n ));\n\n } else {\n\n output.push_str(&format!(\n\n \"{}\\n\\n\",\n\n Colour::Cyan.bold().paint(self.id().to_uppercase())\n\n ));\n", "file_path": "src/document.rs", "rank": 20, "score": 21868.603082409725 }, { "content": " }\n\n Some(_) | None => continue,\n\n };\n\n let multivalued = MULTIVALUED.contains(&(key.as_str()));\n\n let value = match attrs.get(\"content\") {\n\n Some(value) => value.to_string(),\n\n None => continue,\n\n };\n\n match meta.entry(key) {\n\n Entry::Vacant(e) => {\n\n if multivalued {\n\n e.insert(MetadataAttr::Many(Vec::from([value])));\n\n } else {\n\n e.insert(MetadataAttr::One(value));\n\n }\n\n }\n\n Entry::Occupied(mut e) => {\n\n if multivalued {\n\n match e.get_mut() {\n\n MetadataAttr::One(_) => {\n", "file_path": "src/document.rs", "rank": 21, "score": 21868.102534329017 }, { "content": " use super::*;\n\n\n\n use crate::test::resource_path;\n\n\n\n #[test]\n\n fn test_well_formed_rfc() -> Result<()> {\n\n let file = \"rfc6468.html\";\n\n let path = resource_path(file);\n\n let test_path = path.clone();\n\n let doc = Document::from_path(path).unwrap()?;\n\n assert_eq!(\"rfc6468\", doc.id());\n\n assert_eq!(&0, doc.version());\n\n assert_eq!(&test_path, doc.path());\n\n let _meta = doc.meta()?;\n\n let strings = &[\n\n file,\n\n \"RFC6468\",\n\n \"urn:ietf:rfc:6468\",\n\n \"Sieve Notification Mechanism: SIP MESSAGE\",\n\n \"Alexey Melnikov <[email protected]>\",\n", "file_path": "src/document.rs", "rank": 22, "score": 21867.828226703215 }, { "content": " \"February, 2012\",\n\n \"draft-melnikov-sieve-notify-sip-message\",\n\n \"This document describes a profile of the Sieve extension for\",\n\n \"notifications, to allow notifications to be sent over the SIP MESSAGE.\",\n\n ];\n\n for out in &[doc.fmt_line()?, doc.fmt_summary()?] {\n\n for string in strings {\n\n assert!(out.contains(string), \"'{}' not found in output\", string);\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_well_formed_draft() -> Result<()> {\n\n let file = \"draft-ietf-sidrops-rpkimaxlen-05.html\";\n\n let path = resource_path(file);\n\n let test_path = path.clone();\n\n let doc = Document::from_path(path).unwrap()?;\n\n assert_eq!(\"draft-ietf-sidrops-rpkimaxlen\", doc.id());\n", "file_path": "src/document.rs", "rank": 23, "score": 21867.558631961034 }, { "content": " \"The recommendations complement and extend those in RFC 7115. The\",\n\n \"document also discusses creation of ROAs for facilitating the use of\",\n\n \"Distributed Denial of Service (DDoS) mitigation services.\",\n\n \"Considerations related to ROAs and origin validation in the context of\",\n\n \"destination-based Remote Triggered Black Hole (RTBH) filtering are\",\n\n \"also highlighted.\",\n\n ];\n\n for out in &[doc.fmt_line()?, doc.fmt_summary()?] {\n\n for string in strings {\n\n assert!(out.contains(string), \"'{}' not found in output\", string);\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_bad_path() {\n\n let file = \"...\";\n\n let path = resource_path(file);\n\n let maybe_doc = Document::from_path(path);\n", "file_path": "src/document.rs", "rank": 24, "score": 21866.837442392472 }, { "content": " }\n\n\n\n #[test]\n\n fn test_duplicate_attributes() {\n\n let file = \"draft-duplicates-00.html\";\n\n let path = resource_path(file);\n\n let maybe_doc = Document::from_path(path).unwrap().unwrap();\n\n assert!(matches!(\n\n maybe_doc.ensure_meta(),\n\n Err(Error::DuplicateAttribute(_))\n\n ))\n\n }\n\n}\n", "file_path": "src/document.rs", "rank": 25, "score": 21865.940830831256 }, { "content": " return Err(Error::AttributeTypeMismatch(format!(\n\n \"Expected multivalued attribute type for '{}'\",\n\n e.key()\n\n )))\n\n }\n\n MetadataAttr::Many(values) => values.push(value),\n\n }\n\n } else {\n\n return Err(Error::DuplicateAttribute(format!(\n\n \"Got unexpected duplicate attribute '{}'\",\n\n e.key()\n\n )));\n\n }\n\n }\n\n }\n\n }\n\n Ok(Metadata(meta))\n\n }\n\n\n\n fn fmt(&self, attr_sep: &str, keyval_sep: &str, val_sep: &str, replace_nl: bool) -> String {\n", "file_path": "src/document.rs", "rank": 26, "score": 21865.610723767753 }, { "content": " .join(attr_sep)\n\n }\n\n\n\n fn fmt_line(&self) -> String {\n\n format!(\"<{}>\", self.fmt(\" // \", \": \", \"; \", true))\n\n }\n\n\n\n fn fmt_summary(&self) -> String {\n\n self.fmt(\"\\n\\n\", \":\\n\", \";\\n\", false)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum MetadataAttr {\n\n One(String),\n\n Many(Vec<String>),\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/document.rs", "rank": 27, "score": 21864.72041315523 }, { "content": " assert_eq!(&(-5), doc.version());\n\n assert_eq!(&test_path, doc.path());\n\n let _meta = doc.meta()?;\n\n let strings = &[\n\n file,\n\n \"draft-ietf-sidrops-rpkimaxlen\",\n\n \"(version 5)\",\n\n \"urn:ietf:id:ietf-sidrops-rpkimaxlen\",\n\n \"The Use of Maxlength in the RPKI\",\n\n \"Snijders, Job;\",\n\n \"Gilad, Yossi;\",\n\n \"Maddison, Ben;\",\n\n \"Goldberg, Sharon;\",\n\n \"Sriram, Kotikalapudi\",\n\n \"2020-11-02\",\n\n \"draft-yossigi-rpkimaxlen\",\n\n \"This document recommends ways to reduce forged-origin hijack attack\",\n\n \"surface by prudently limiting the set of IP prefixes that are included\",\n\n \"in a Route Origin Authorization (ROA). One recommendation is to avoid\",\n\n \"using the maxLength attribute in ROAs except in some specific cases.\",\n", "file_path": "src/document.rs", "rank": 28, "score": 21861.693889085764 }, { "content": " (&self.0)\n\n .iter()\n\n .map(|(key, value)| -> String {\n\n format!(\n\n \"{}{}{}\",\n\n key,\n\n keyval_sep,\n\n match value {\n\n MetadataAttr::One(value) => {\n\n if replace_nl {\n\n value.replace(\"\\n\", \" \")\n\n } else {\n\n value.to_string()\n\n }\n\n }\n\n MetadataAttr::Many(values) => values.join(val_sep),\n\n }\n\n )\n\n })\n\n .collect::<Vec<String>>()\n", "file_path": "src/document.rs", "rank": 29, "score": 21858.98752886251 }, { "content": "# `rfz`\n\n\n\nAn indexer and metadata viewer for repositories of IETF documents synced to the\n\nlocal file system.\n\n\n\n[![crates.io](http://meritbadge.herokuapp.com/rfz)](https://crates.io/crates/rfz)\n\n[![ci](https://github.com/benmaddison/rfz/workflows/ci/badge.svg)](https://github.com/benmaddison/rfz/actions?query=workflow%3Aci)\n\n[![audit](https://github.com/benmaddison/rfz/workflows/audit/badge.svg)](https://github.com/benmaddison/rfz/actions?query=workflow%3Aaudit)\n\n[![codecov](https://codecov.io/gh/benmaddison/rfz/branch/master/graph/badge.svg?token=ktq7dOYD49)](https://codecov.io/gh/benmaddison/rfz)\n\n\n\n## Installation\n\n\n\nInstall via cargo:\n\n\n\n```bash\n\n$ cargo install rfz\n\n```\n\n\n\n`rsync` is required in order to use `rfz sync`.\n\n\n\n## Usage\n\n\n\nSee `rfz --help` for basic command-line usage.\n\n\n\n`rfz` expects to find a directory containing a local mirror of the\n\n`rsync.tools.ietf.org::tools.html` `rsync` target.\n\n\n\nThe path to this directory can be set with `--dir` and defaults to\n\n`${XDG_DATA_DIR:-${HOME}/.local/share}/rfz`.\n\n\n\n`rfz sync` will create the directory if it does not already exist, and call\n\n`rsync` to retrieve the contents.\n\n\n\nExample `systemd` units to run `rfz sync` every hour are included in `extras/`.\n\n\n\n`rfz` can be used standalone, but is designed to be used along side `fzf` and a\n\ntext-mode browser (e.g. `lynx` or `w3m`):\n\n\n\n```bash\n\n#!/usr/bin/env bash\n\nrfz index | fzf \\\n\n # trim the path from the 'fzf' display\n\n --with-nth=2.. \\\n\n # show coloured output\n\n --ansi \\\n\n # show the document metadata in the preview\n\n --preview 'rfz summary {1}' \\\n\n | cut -d' ' -f1 \\\n\n | lynx -\n\n```\n", "file_path": "README.md", "rank": 47, "score": 14182.37886769992 }, { "content": "\n\n #[test]\n\n fn test_document_not_found() {\n\n let args = DummyArgs {\n\n jobs: None,\n\n dir: None,\n\n verbosity: 0,\n\n path: Some(resource_path(\"not-found\")),\n\n rsync_cmd: None,\n\n rsync_remote: None,\n\n types: None,\n\n };\n\n let exec = CmdExec::init(\"summary\", &args).unwrap();\n\n match exec.run() {\n\n Err(Error::DocumentNotFound(_)) => (),\n\n _ => panic!(\"Expected DocumentNotFound error\"),\n\n }\n\n }\n\n}\n", "file_path": "src/cmd.rs", "rank": 48, "score": 25.763977306100163 }, { "content": " fn test_index_cmd() -> Result<()> {\n\n let args = DummyArgs {\n\n jobs: Some(2),\n\n dir: Some(resource_path(\"\")),\n\n verbosity: 0,\n\n path: None,\n\n rsync_cmd: None,\n\n rsync_remote: None,\n\n types: None,\n\n };\n\n let exec = CmdExec::init(\"index\", &args)?;\n\n exec.run()\n\n }\n\n\n\n #[test]\n\n fn test_summary_cmd() -> Result<()> {\n\n let args = DummyArgs {\n\n jobs: None,\n\n dir: None,\n\n verbosity: 0,\n", "file_path": "src/cmd.rs", "rank": 49, "score": 24.64134136420852 }, { "content": "use std::collections::{hash_map, BTreeMap, HashMap};\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::slice;\n\nuse std::vec;\n\n\n\nuse crate::document::Document;\n\nuse crate::errors::{Error, Result};\n\n\n\n#[derive(Clone)]\n\npub struct Collection(Vec<Document>);\n\n\n\nimpl Collection {\n\n pub fn from_dir(path: PathBuf) -> Result<Self> {\n\n let dir = match fs::read_dir(path) {\n\n Ok(dir) => dir,\n\n Err(e) => return Err(Error::DirectoryReadError(e)),\n\n };\n\n let mut collection = Vec::new();\n\n for dir_entry in dir {\n", "file_path": "src/collection.rs", "rank": 50, "score": 24.40498433756195 }, { "content": " path: Some(resource_path(\"rfc6468.html\")),\n\n rsync_cmd: None,\n\n rsync_remote: None,\n\n types: None,\n\n };\n\n let exec = CmdExec::init(\"summary\", &args)?;\n\n exec.run()\n\n }\n\n\n\n #[test]\n\n fn test_sync_cmd() -> Result<()> {\n\n let args = DummyArgs {\n\n jobs: None,\n\n dir: Some(resource_path(\"\")),\n\n verbosity: 2,\n\n path: None,\n\n rsync_cmd: Some(String::from(\"/bin/true\")),\n\n rsync_remote: Some(String::from(\"rsync.example.com::dummy\")),\n\n types: None,\n\n };\n", "file_path": "src/cmd.rs", "rank": 51, "score": 24.380469779928497 }, { "content": " let exec = CmdExec::init(\"sync\", &args)?;\n\n exec.run()\n\n }\n\n\n\n #[test]\n\n fn test_not_implemented() {\n\n let args = DummyArgs {\n\n jobs: None,\n\n dir: None,\n\n verbosity: 0,\n\n path: None,\n\n rsync_cmd: None,\n\n rsync_remote: None,\n\n types: None,\n\n };\n\n match CmdExec::init(\"invalid\", &args) {\n\n Err(Error::ImplementationNotFound(_)) => (),\n\n _ => panic!(\"Expected ImplementationNotFound error\"),\n\n }\n\n }\n", "file_path": "src/cmd.rs", "rank": 52, "score": 23.72261225181834 }, { "content": "mod test {\n\n use super::*;\n\n\n\n use crate::test::resource_path;\n\n\n\n struct DummyArgs {\n\n jobs: Option<usize>,\n\n dir: Option<PathBuf>,\n\n verbosity: usize,\n\n path: Option<PathBuf>,\n\n rsync_cmd: Option<String>,\n\n rsync_remote: Option<String>,\n\n types: Option<Vec<&'static str>>,\n\n }\n\n\n\n impl ArgProvider for DummyArgs {\n\n fn jobs(&self) -> usize {\n\n self.jobs.unwrap()\n\n }\n\n fn dir(&self) -> PathBuf {\n", "file_path": "src/cmd.rs", "rank": 53, "score": 23.257001641610902 }, { "content": "extern crate clap;\n\nextern crate directories;\n\nextern crate kuchiki;\n\nextern crate lazycell;\n\nextern crate num_cpus;\n\nextern crate pipeliner;\n\n\n\nmod cli;\n\nmod cmd;\n\nmod collection;\n\nmod document;\n\nmod errors;\n\n\n\n#[cfg(test)]\n\nmod test;\n\n\n\npub use cli::{Cli, Defaults};\n\npub use errors::Result;\n", "file_path": "src/lib.rs", "rank": 54, "score": 18.62980804512697 }, { "content": " let doc_path = match dir_entry {\n\n Ok(e) => e.path(),\n\n Err(_) => continue,\n\n };\n\n if !doc_path.is_file() {\n\n continue;\n\n }\n\n let doc = match Document::from_path(doc_path) {\n\n Some(result) => match result {\n\n Ok(doc) => doc,\n\n Err(e) => return Err(e),\n\n },\n\n None => continue,\n\n };\n\n collection.push(doc);\n\n }\n\n Ok(Collection(collection))\n\n }\n\n\n\n pub fn newest(&self, count: u8) -> Self {\n", "file_path": "src/collection.rs", "rank": 55, "score": 18.29469331645749 }, { "content": "use std::io::{stdout, Write};\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\n\n\nuse pipeliner::Pipeline;\n\n\n\nuse crate::collection::Collection;\n\nuse crate::document::Document;\n\nuse crate::errors::{Error, Result};\n\n\n", "file_path": "src/cmd.rs", "rank": 56, "score": 15.399316749247768 }, { "content": " let filtered = Collection::from_dir(path)?.filter_types(types);\n\n assert_eq!(filtered.into_iter().count(), 1);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_bad_path() {\n\n let path = resource_path(\"not-found\");\n\n let maybe_collection = Collection::from_dir(path);\n\n assert!(matches!(\n\n maybe_collection,\n\n Err(Error::DirectoryReadError(_))\n\n ))\n\n }\n\n}\n", "file_path": "src/collection.rs", "rank": 57, "score": 14.961842202022511 }, { "content": " #[test]\n\n fn test_construct_collection() -> Result<()> {\n\n let path = resource_path(\"\");\n\n let collection = Collection::from_dir(path)?;\n\n assert_eq!(collection.into_iter().count(), 4);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_newest_collection() -> Result<()> {\n\n let path = resource_path(\"\");\n\n let newest = Collection::from_dir(path)?.newest(1);\n\n assert_eq!(newest.into_iter().count(), 3);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_type_filter() -> Result<()> {\n\n let path = resource_path(\"\");\n\n let types = Some(vec![\"rfc\", \"bcp\"]);\n", "file_path": "src/collection.rs", "rank": 58, "score": 13.538698323329587 }, { "content": "use rfz::{Cli, Defaults, Result};\n\n\n", "file_path": "src/main.rs", "rank": 59, "score": 12.928342868900305 }, { "content": " let mut map = BTreeMap::new();\n\n map.insert(doc.version(), doc);\n\n e.insert(map);\n\n }\n\n hash_map::Entry::Occupied(mut e) => {\n\n let map = e.get_mut();\n\n map.insert(doc.version(), doc);\n\n }\n\n };\n\n }\n\n CollectionMap(map)\n\n }\n\n}\n\n\n\nimpl IntoIterator for Collection {\n\n type Item = Document;\n\n type IntoIter = vec::IntoIter<Document>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.0.into_iter()\n", "file_path": "src/collection.rs", "rank": 60, "score": 12.03193453354653 }, { "content": " self.to_map().newest(count)\n\n }\n\n\n\n pub fn filter_types(&self, types: Option<Vec<&str>>) -> Self {\n\n match types {\n\n Some(types) => Collection(\n\n self.into_iter()\n\n .filter(|&doc| types.iter().any(|t| doc.id().starts_with(t)))\n\n .map(|doc| doc.to_owned())\n\n .collect(),\n\n ),\n\n None => self.to_owned(),\n\n }\n\n }\n\n\n\n fn to_map(&self) -> CollectionMap {\n\n let mut map = HashMap::new();\n\n for doc in self {\n\n match map.entry(doc.id()) {\n\n hash_map::Entry::Vacant(e) => {\n", "file_path": "src/collection.rs", "rank": 61, "score": 11.751823146896553 }, { "content": " self.dir.as_ref().unwrap().to_owned()\n\n }\n\n fn verbosity(&self) -> usize {\n\n self.verbosity.to_owned()\n\n }\n\n fn path(&self) -> PathBuf {\n\n self.path.as_ref().unwrap().to_owned()\n\n }\n\n fn rsync_cmd(&self) -> &str {\n\n self.rsync_cmd.as_ref().unwrap()\n\n }\n\n fn rsync_remote(&self) -> &str {\n\n self.rsync_remote.as_ref().unwrap()\n\n }\n\n fn types(&self) -> Option<Vec<&str>> {\n\n self.types.to_owned()\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/cmd.rs", "rank": 62, "score": 10.398242406026299 }, { "content": " }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a Collection {\n\n type Item = &'a Document;\n\n type IntoIter = slice::Iter<'a, Document>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n (&self.0).iter()\n\n }\n\n}\n\n\n", "file_path": "src/collection.rs", "rank": 63, "score": 7.376168790312111 } ]
Rust
src/protocol/argument.rs
dylanmckay/flep
c020400f4ead85c6261dbe29bded876aad83af97
use {Error, ErrorKind}; use std::io::prelude::*; use std::ascii::AsciiExt; use std::io; pub trait Argument : Sized { fn read_with_space(read: &mut BufRead) -> Result<Self, Error> { let mut buf: [u8; 1] = [0]; assert_eq!(read.read(&mut buf)?, 1, "unexpected EOF while checking for space"); assert_eq!(buf[0] as char, ' ', "expected space preceding argument"); Self::read(read) } fn read(read: &mut BufRead) -> Result<Self, Error>; fn write(&self, write: &mut Write) -> Result<(), Error>; fn parse_text(text: &str) -> Self { let mut buffer = io::Cursor::new(text); Self::read_with_space(&mut buffer).unwrap() } fn bytes(&self) -> Vec<u8> { let mut buffer = io::Cursor::new(Vec::new()); self.write(&mut buffer).unwrap(); buffer.into_inner() } fn to_string(&self) -> String { String::from_utf8(self.bytes()).unwrap() } } impl Argument for String { fn read(read: &mut BufRead) -> Result<Self, Error> { let bytes: Result<Vec<u8>, _> = read.bytes().collect(); let bytes = bytes?; match String::from_utf8(bytes) { Ok(s) => Ok(s), Err(..) => Err(ErrorKind::InvalidArgument("argument is not valid UTF-8".to_owned()).into()), } } fn write(&self, write: &mut Write) -> Result<(), Error> { for c in self.chars() { assert!(c.is_ascii(), "only ASCII is supported in FTP"); } write!(write, "{}", self)?; Ok(()) } } macro_rules! impl_argument_integer { ($ty:ty) => { impl Argument for $ty { fn read(read: &mut BufRead) -> Result<Self, Error> { let s = String::read(read)?; match s.parse() { Ok(i) => Ok(i), Err(..) => Err(ErrorKind::InvalidArgument("argument is not an integer".to_owned()).into()), } } fn write(&self, write: &mut Write) -> Result<(), Error> { write!(write, "{}", self)?; Ok(()) } } } } impl_argument_integer!(u8); impl_argument_integer!(i8); impl_argument_integer!(u16); impl_argument_integer!(i16); impl_argument_integer!(u32); impl_argument_integer!(i32); impl_argument_integer!(u64); impl_argument_integer!(i64); impl<T: Argument> Argument for Option<T> { fn read_with_space(read: &mut BufRead) -> Result<Self, Error> { let mut buf: [u8; 1] = [0]; if read.read(&mut buf)? == 1 { let inner = T::read(read)?; Ok(Some(inner)) } else { Ok(None) } } fn read(_read: &mut BufRead) -> Result<Self, Error> { unreachable!(); } fn write(&self, write: &mut Write) -> Result<(), Error> { if let Some(ref thing) = *self { write!(write, " ")?; thing.write(write)?; } Ok(()) } } #[cfg(test)] mod test { pub use super::*; mod optional { use std::io; pub use super::*; fn parse<T: Argument>(text: &str) -> Option<T> { let mut buf = io::Cursor::new(text); Argument::read_with_space(&mut buf).unwrap() } #[test] fn correctly_reads_a_present_value() { let value: Option<String> = parse(" foo"); assert_eq!(value, Some("foo".to_owned())); } #[test] fn correctly_reads_a_missing_value() { let value: Option<String> = parse(""); assert_eq!(value, None); } #[test] fn correctly_writes_a_present_value() { assert_eq!(Some("foo".to_owned()).to_string(), " foo"); } #[test] fn correctly_writes_an_empty_value() { let value: Option<String> = None; assert_eq!(value.to_string(), ""); } } }
use {Error, ErrorKind}; use std::io::prelude::*; use std::ascii::AsciiExt; use std::io; pub trait Argument : Sized { fn read_with_space(read: &mut BufRead) -> Result<Self, Error> { let mut buf: [u8; 1] = [0]; assert_eq!(read.read(&mut buf)?, 1, "unexpected EOF while checking for space"); assert_eq!(buf[0] as char, ' ', "expected space preceding argument"); Self::read(read) } fn read(read: &mut BufRead) -> Result<Self, Error>; fn write(&self, write: &mut Write) -> Result<(), Error>; fn parse_text(text: &str) -> Self { let mut buffer = io::Cursor::new(text); Self::read_with_space(&mut buffer).unwrap() } fn bytes(&self) -> Vec<u8> { let mut buffer = io::Cursor::new(Vec::new()); self.write(&mut buffer).unwrap(); buffer.into_inner() } fn to_string(&self) -> String { String::from_utf8(self.bytes()).unwrap() } } impl Argument for String { fn read(read: &mut BufRead) -> Result<Self, Error> { let bytes: Result<Vec<u8>, _> = read.bytes().collect(); let bytes = bytes?;
} fn write(&self, write: &mut Write) -> Result<(), Error> { for c in self.chars() { assert!(c.is_ascii(), "only ASCII is supported in FTP"); } write!(write, "{}", self)?; Ok(()) } } macro_rules! impl_argument_integer { ($ty:ty) => { impl Argument for $ty { fn read(read: &mut BufRead) -> Result<Self, Error> { let s = String::read(read)?; match s.parse() { Ok(i) => Ok(i), Err(..) => Err(ErrorKind::InvalidArgument("argument is not an integer".to_owned()).into()), } } fn write(&self, write: &mut Write) -> Result<(), Error> { write!(write, "{}", self)?; Ok(()) } } } } impl_argument_integer!(u8); impl_argument_integer!(i8); impl_argument_integer!(u16); impl_argument_integer!(i16); impl_argument_integer!(u32); impl_argument_integer!(i32); impl_argument_integer!(u64); impl_argument_integer!(i64); impl<T: Argument> Argument for Option<T> { fn read_with_space(read: &mut BufRead) -> Result<Self, Error> { let mut buf: [u8; 1] = [0]; if read.read(&mut buf)? == 1 { let inner = T::read(read)?; Ok(Some(inner)) } else { Ok(None) } } fn read(_read: &mut BufRead) -> Result<Self, Error> { unreachable!(); } fn write(&self, write: &mut Write) -> Result<(), Error> { if let Some(ref thing) = *self { write!(write, " ")?; thing.write(write)?; } Ok(()) } } #[cfg(test)] mod test { pub use super::*; mod optional { use std::io; pub use super::*; fn parse<T: Argument>(text: &str) -> Option<T> { let mut buf = io::Cursor::new(text); Argument::read_with_space(&mut buf).unwrap() } #[test] fn correctly_reads_a_present_value() { let value: Option<String> = parse(" foo"); assert_eq!(value, Some("foo".to_owned())); } #[test] fn correctly_reads_a_missing_value() { let value: Option<String> = parse(""); assert_eq!(value, None); } #[test] fn correctly_writes_a_present_value() { assert_eq!(Some("foo".to_owned()).to_string(), " foo"); } #[test] fn correctly_writes_an_empty_value() { let value: Option<String> = None; assert_eq!(value.to_string(), ""); } } }
match String::from_utf8(bytes) { Ok(s) => Ok(s), Err(..) => Err(ErrorKind::InvalidArgument("argument is not valid UTF-8".to_owned()).into()), }
if_condition
[ { "content": "/// Runs a FTP server on a given address.\n\n///\n\n/// Sets up an FTP server locally and begins to wait for clients\n\n/// to connect.\n\npub fn run<F,A>(server: &mut F, address: A) -> Result<(), Error>\n\n where F: Server,\n\n A: ToSocketAddrs {\n\n let mut addresses = address.to_socket_addrs()?;\n\n let address = match addresses.next() {\n\n Some(addr) => addr,\n\n None => return Err(\"could not resolve to any addresses\".into()),\n\n };\n\n\n\n debug!(\"running server\");\n\n\n\n // Setup the server socket\n\n let listener = TcpListener::bind(&address)?;\n\n let mut io = Io::new()?;\n\n\n\n // Start listening for incoming connections\n\n io.poll.register(&listener, SERVER_TOKEN, Ready::readable(),\n\n PollOpt::edge())?;\n\n\n\n // Create storage for events\n", "file_path": "src/server/run.rs", "rank": 0, "score": 186431.26222120406 }, { "content": "/// Handle the 'PWD' command.\n\npub fn handle(client: &mut ClientState) -> Result<Action, Error> {\n\n let session = client.session.expect_ready()?;\n\n Ok(Action::Reply(protocol::reply::pwd::success(&session.working_dir)))\n\n}\n", "file_path": "src/server/client/state/handle/pwd.rs", "rank": 1, "score": 185654.6862786807 }, { "content": "/// Handle the 'CDUP' command.\n\npub fn handle(client: &mut ClientState) -> Result<Action, Error> {\n\n let mut session = client.session.expect_ready_mut()?;\n\n\n\n match session.working_dir.parent().map(ToOwned::to_owned) {\n\n Some(parent) => {\n\n session.working_dir = parent;\n\n Ok(Action::Reply(protocol::reply::cdup::success()))\n\n },\n\n None => Ok(Action::Reply(protocol::reply::cdup::no_parent()))\n\n }\n\n}\n", "file_path": "src/server/client/state/handle/cdup.rs", "rank": 2, "score": 185654.6862786807 }, { "content": "/// Handle the 'SYST' command.\n\npub fn handle() -> Result<Action, Error> {\n\n Ok(Action::Reply(protocol::reply::syst::success(protocol::rfc1700::system::UNIX.to_owned())))\n\n}\n", "file_path": "src/server/client/state/handle/syst.rs", "rank": 4, "score": 165922.64408830227 }, { "content": "/// Handle the 'FEAT' command.\n\npub fn handle() -> Result<Action, Error> {\n\n Ok(Action::Reply(protocol::reply::feat::Features::default().into()))\n\n}\n", "file_path": "src/server/client/state/handle/feat.rs", "rank": 5, "score": 165922.64408830227 }, { "content": "/// Handle the 'QUIT' command.\n\npub fn handle() -> Result<Action, Error> {\n\n Ok(Action::Reply(protocol::Reply::new(\n\n protocol::reply::code::SERVICE_CLOSING_CONTROL_CONNECTION,\n\n \"goodbye\")))\n\n}\n", "file_path": "src/server/client/state/handle/quit.rs", "rank": 6, "score": 165922.64408830227 }, { "content": "/// Generate a reply for an unimplemented command.\n\nfn unimplemented(command_name: &'static str) -> Result<Action, Error> {\n\n Err(protocol::Error::from_kind(protocol::ErrorKind::UnimplementedCommand(\n\n command_name.to_string(),\n\n )).into())\n\n}\n", "file_path": "src/server/client/state/handle/mod.rs", "rank": 7, "score": 157394.0111438636 }, { "content": "/// Initializes the default logger.\n\npub fn initialize_default() -> Result<(), log::SetLoggerError> {\n\n log::set_logger(|max_log_level| {\n\n max_log_level.set(LOG_LEVEL_FILTER);\n\n Box::new(SimpleLogger)\n\n })\n\n}\n\n\n\nimpl log::Log for SimpleLogger {\n\n fn enabled(&self, metadata: &LogMetadata) -> bool {\n\n metadata.level() <= LOG_LEVEL_FILTER.to_log_level().unwrap()\n\n }\n\n\n\n fn log(&self, record: &LogRecord) {\n\n if self.enabled(record.metadata()) {\n\n println!(\"{} - {}\", record.level(), record.args());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/util/log.rs", "rank": 8, "score": 147906.89018448218 }, { "content": "/// Handles an IO event on the protocol or data connections.\n\npub fn handle_event(state: &mut ClientState,\n\n event: &mio::Event,\n\n connection: &mut Connection,\n\n the_token: mio::Token,\n\n server: &mut Server,\n\n io: &mut Io)\n\n -> Result<(), Error> {\n\n if the_token == connection.pi.token && event.readiness().is_readable() {\n\n handle_protocol_event(state, event, connection, io, server)\n\n } else {\n\n handle_data_event(event, connection, io)\n\n }\n\n}\n\n\n", "file_path": "src/server/client/client_io.rs", "rank": 9, "score": 119597.74482320281 }, { "content": "/// Handles a command sent to a server from a client.\n\npub fn command(client: &mut ClientState,\n\n command: &protocol::CommandKind,\n\n server: &mut Server) -> Result<Action, Error> {\n\n use protocol::CommandKind::*;\n\n\n\n debug!(\"received command: {:?}\", command);\n\n\n\n match *command {\n\n // User attempting to log in.\n\n USER(ref user) => self::user::handle(user, client, server),\n\n PASS(ref pass) => self::pass::handle(pass, client, server),\n\n PWD(..) => self::pwd::handle(client),\n\n CWD(ref cwd) => self::cwd::handle(cwd, client),\n\n CDUP(..) => self::cdup::handle(client),\n\n MKD(ref mkd) => self::mkd::handle(mkd, client, server),\n\n LIST(ref list) => self::list::handle(list, client, server),\n\n // ClientState requesting information about the server system.\n\n SYST(..) => self::syst::handle(),\n\n FEAT(..) => self::feat::handle(),\n\n TYPE(ref ty) => self::ty::handle(ty, client),\n", "file_path": "src/server/client/state/handle/mod.rs", "rank": 10, "score": 119597.74482320281 }, { "content": "pub fn handle_epsv(client: &mut ClientState)\n\n -> Result<Action, Error> {\n\n let port = listen_passive_dtp(client)?;\n\n Ok(Action::EstablishDataConnection {\n\n reply: protocol::reply::epsv::success(port),\n\n mode: DataTransferMode::Passive { port: port }\n\n })\n\n}\n\n\n", "file_path": "src/server/client/state/handle/passive.rs", "rank": 11, "score": 117676.96795980385 }, { "content": "pub fn handle_pasv(client: &mut ClientState)\n\n -> Result<Action, Error> {\n\n let port = listen_passive_dtp(client)?;\n\n Ok(Action::EstablishDataConnection {\n\n reply: protocol::reply::pasv::success(port),\n\n mode: DataTransferMode::Passive { port: port }\n\n })\n\n}\n\n\n", "file_path": "src/server/client/state/handle/passive.rs", "rank": 12, "score": 117676.96795980385 }, { "content": "/// An FTP server instance.\n\npub trait Server\n\n{\n\n /// Gets the welcome message shown when connecting to the server.\n\n fn welcome_message(&self) -> String;\n\n\n\n /// Attempts to authenticate a user.\n\n fn authenticate_user(&self, _credentials: &Credentials) -> bool { true }\n\n\n\n fn file_system(&self) -> &FileSystem;\n\n fn file_system_mut(&mut self) -> &mut FileSystem;\n\n}\n\n\n", "file_path": "src/server/server.rs", "rank": 13, "score": 99794.57131713841 }, { "content": "/// A filesystem mountable as FTP.\n\npub trait FileSystem\n\n{\n\n /// List all files/directories at a specific path.\n\n fn list(&self, path: &Path) -> Result<Vec<String>, Error>;\n\n\n\n /// Make a new directory.\n\n fn create_dir(&mut self, path: &Path) -> Result<(), Error>;\n\n\n\n /// Write data into a file.\n\n fn write_file(&mut self, path: &Path, data: Vec<u8>) -> Result<(), Error>;\n\n\n\n /// Read data from a file.\n\n fn read_file(&self, path: &Path) -> Result<Vec<u8>, Error>;\n\n}\n\n\n", "file_path": "src/fs/mod.rs", "rank": 14, "score": 97869.53783578635 }, { "content": "/// Something which we can derive a reply code from.\n\npub trait AsReplyCode {\n\n /// Gets the reply code for the object.\n\n fn as_reply_code(&self) -> Code;\n\n}\n\n\n\npub const OK: Code = Code(200);\n\npub const INVALID_COMMAND: Code = Code(500);\n\npub const SYNTAX_ERROR: Code = Code(501);\n\npub const COMMAND_NOT_IMPLEMENTED_SUPERFLOUS: Code = Code(202);\n\npub const COMMAND_NOT_IMPLEMENTED: Code = Code(502);\n\npub const BAD_COMMAND_SEQUENCE: Code = Code(503);\n\npub const COMMAND_NOT_IMPLEMENTED_FOR_PARAMETER: Code = Code(504);\n\npub const RESTART_MARKER_REPLY: Code = Code(110);\n\npub const STATUS_OR_HELP_REPLY: Code = Code(211);\n\npub const DIRECTORY_STATUS: Code = Code(212);\n\npub const FILE_STATUS: Code = Code(213);\n\npub const HELP_MESSAGE: Code = Code(214);\n\npub const SYSTEM_NAME_TYPE: Code = Code(215);\n\npub const SERVICE_READY_ETA: Code = Code(120);\n\npub const SERVICE_READY_FOR_NEW_USER: Code = Code(220);\n", "file_path": "src/protocol/reply/code.rs", "rank": 15, "score": 96061.34373113693 }, { "content": "/// Does the state tick.\n\nfn tick(state: &mut ClientState,\n\n connection: &mut Connection,\n\n io: &mut Io) -> Result<(), Error> {\n\n match state.session {\n\n Session::Ready(ref mut session) => {\n\n let active_transfer = std::mem::replace(&mut session.active_transfer, None);\n\n\n\n if let Some(active_transfer) = active_transfer {\n\n let dtp = std::mem::replace(&mut connection.dtp, DataTransfer::None);\n\n\n\n debug!(\"server is ready and we have an active transfer\");\n\n connection.dtp = match dtp {\n\n DataTransfer::None => {\n\n assert_eq!(session.data_transfer_mode, DataTransferMode::Active);\n\n\n\n let client_addr = session.client_addr.expect(\"attempted a transfer but client address is not set\");\n\n let stream = mio::tcp::TcpStream::connect(&client_addr)?;\n\n\n\n let token = io.allocate_token();\n\n io.poll.register(&stream, token,\n", "file_path": "src/server/client/client.rs", "rank": 16, "score": 86961.00695756139 }, { "content": "/// Handles an IO event on the protocol stream.\n\nfn handle_protocol_event(state: &mut ClientState,\n\n event: &mio::Event,\n\n connection: &mut Connection,\n\n io: &mut Io,\n\n server: &mut Server)\n\n -> Result<(), Error> {\n\n let mut buffer: [u8; 10000] = [0; 10000];\n\n let bytes_written = connection.pi.stream.read(&mut buffer)?;\n\n let mut data = io::Cursor::new(&buffer[0..bytes_written]);\n\n\n\n assert_eq!(event.readiness().is_readable(), true);\n\n\n\n if !data.get_ref().is_empty() {\n\n let command = protocol::CommandKind::read(&mut data)?;\n\n let action = match state.handle_command(&command, server) {\n\n Ok(action) => action,\n\n Err(Error(ErrorKind::Protocol(e), _)) => {\n\n // If it was state error, tell them.\n\n Action::Reply(protocol::Reply::new(e.as_reply_code(),\n\n format!(\"error: {}\", e)))\n", "file_path": "src/server/client/client_io.rs", "rank": 17, "score": 82669.38678758363 }, { "content": "/// Attempts to open a data connection passively.\n\nfn listen_passive_dtp(client: &mut ClientState)\n\n -> Result<u16, Error> {\n\n let mut session = client.session.expect_ready_mut()?;\n\n let port = 5166;\n\n\n\n session.data_transfer_mode = DataTransferMode::Passive { port: port };\n\n Ok(port)\n\n}\n", "file_path": "src/server/client/state/handle/passive.rs", "rank": 18, "score": 81385.75037198131 }, { "content": "/// An FTP command.\n\npub trait Command : Clone + fmt::Debug + PartialEq + Eq\n\n{\n\n /// Writes the command to a buffer.\n\n fn write(&self, write: &mut Write) -> Result<(), Error> {\n\n // Write the payload to a temporary space\n\n let mut payload_buffer = io::Cursor::new(Vec::new());\n\n self.write_payload(&mut payload_buffer)?;\n\n let payload = payload_buffer.into_inner();\n\n\n\n // Don't write a redundant space unless there actually is a payload.\n\n if payload.is_empty() {\n\n write!(write, \"{}\", self.command_name())?;\n\n } else {\n\n write!(write, \"{} \", self.command_name())?;\n\n write.write(&payload)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "src/protocol/command/mod.rs", "rank": 19, "score": 79265.62956626154 }, { "content": "/// Handle the 'MKD' command.\n\npub fn handle(mkd: &protocol::MKD,\n\n client: &mut ClientState,\n\n server: &mut Server)\n\n-> Result<Action, Error> {\n\n let session = client.session.expect_ready()?;\n\n\n\n let path = Path::new(&mkd.remote_filename);\n\n\n\n let path = if path.has_root() {\n\n path.to_owned()\n\n } else {\n\n session.working_dir.join(path)\n\n };\n\n\n\n match server.file_system_mut().create_dir(&path) {\n\n Ok(..) => Ok(Action::Reply(protocol::reply::mkd::success())),\n\n // IO errors are caused by the client, not us.\n\n Err(Error(ErrorKind::Io(..), _)) => {\n\n unimplemented!();\n\n },\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n", "file_path": "src/server/client/state/handle/mkd.rs", "rank": 20, "score": 78248.92023148607 }, { "content": "/// Handle the 'LIST' command.\n\npub fn handle(list: &protocol::LIST,\n\n client: &mut ClientState,\n\n server: &mut Server)\n\n -> Result<Action, Error> {\n\n if list.remote_filespec.is_some() {\n\n unimplemented!();\n\n }\n\n\n\n let working_dir = client.session.expect_ready()?.working_dir.clone();\n\n\n\n let entries = server.file_system().list(&working_dir)?;\n\n let mut data: String = entries.join(\"\\r\\n\");\n\n data.extend(\"\\r\\n\".chars());\n\n\n\n Ok(Action::Transfer(server::Transfer {\n\n file_type: FileType::ascii(),\n\n data: data.as_bytes().to_owned(),\n\n }))\n\n}\n", "file_path": "src/server/client/state/handle/list.rs", "rank": 21, "score": 78248.92023148607 }, { "content": "/// Handle the 'PASS' command.\n\npub fn handle(pass: &protocol::PASS,\n\n client: &mut ClientState,\n\n server: &mut Server)\n\n -> Result<Action, Error> {\n\n let session = client.session.expect_login()?.clone();\n\n\n\n if let session::Login::WaitingForPassword { username } = session {\n\n let credentials = Credentials { username: username.to_owned(), password: Some(pass.password.to_owned()) };\n\n\n\n if server.authenticate_user(&credentials) {\n\n client.session = Session::Ready(session::Ready::new(credentials));\n\n Ok(Action::Reply(protocol::reply::pass::logged_in()))\n\n } else {\n\n Ok(Action::Reply(protocol::reply::pass::not_logged_in(\"invalid credentials\")))\n\n }\n\n } else {\n\n Err(protocol::Error::from_kind(protocol::ErrorKind::InvalidCommandSequence(\n\n \"the client must send password immediately after the username is sent\".to_owned()\n\n )).into())\n\n }\n\n}\n", "file_path": "src/server/client/state/handle/pass.rs", "rank": 22, "score": 78248.92023148607 }, { "content": "/// Handle the 'CWD' command.\n\npub fn handle(cwd: &protocol::CWD,\n\n client: &mut ClientState) -> Result<Action, Error> {\n\n let mut session = client.session.expect_ready_mut()?;\n\n\n\n session.working_dir = cwd.path.clone().into();\n\n Ok(Action::Reply(protocol::reply::cwd::success()))\n\n}\n", "file_path": "src/server/client/state/handle/cwd.rs", "rank": 23, "score": 78248.92023148607 }, { "content": "/// Handle the 'USER' command.\n\npub fn handle(user: &protocol::USER,\n\n client: &mut ClientState,\n\n server: &mut Server)\n\n -> Result<Action, Error> {\n\n let session = client.session.expect_login()?.clone();\n\n\n\n if let session::Login::WaitingForUsername = session {\n\n let credentials = Credentials { username: user.username.to_owned(), password: None };\n\n\n\n // The user may authenticate with no password\n\n if server.authenticate_user(&credentials) {\n\n client.session = Session::Ready(session::Ready::new(credentials));\n\n Ok(Action::Reply(protocol::reply::user::logged_in()))\n\n } else {\n\n // The user needs a password to get through.\n\n client.session = Session::Login(session::Login::WaitingForPassword {\n\n username: user.username.to_owned(),\n\n });\n\n\n\n Ok(Action::Reply(protocol::reply::user::need_password()))\n\n }\n\n } else {\n\n Err(protocol::Error::from_kind(protocol::ErrorKind::InvalidCommandSequence(\n\n \"the client wait until we send the welcome message to log in\".to_owned(),\n\n )).into())\n\n }\n\n}\n", "file_path": "src/server/client/state/handle/user.rs", "rank": 24, "score": 78248.92023148607 }, { "content": "/// Handle the 'TYPE' command.\n\npub fn handle(ty: &protocol::TYPE,\n\n client: &mut ClientState) -> Result<Action, Error> {\n\n let mut session = client.session.expect_ready_mut()?;\n\n\n\n session.transfer_type = ty.file_type;\n\n\n\n debug!(\"file type set to {:?}\", ty.file_type);\n\n Ok(Action::Reply(protocol::Reply::new(protocol::reply::code::OK, \"file type set\")))\n\n}\n", "file_path": "src/server/client/state/handle/ty.rs", "rank": 25, "score": 78248.92023148607 }, { "content": "/// Handle the 'RETR' command.\n\npub fn handle(retr: &protocol::RETR,\n\n client: &mut ClientState,\n\n server: &mut Server)\n\n -> Result<Action, Error> {\n\n client.session.expect_ready()?;\n\n\n\n let data = server.file_system().read_file(&Path::new(&retr.remote_filename))?;\n\n\n\n Ok(Action::Transfer(server::Transfer {\n\n file_type: FileType::ascii(),\n\n data: data,\n\n }))\n\n}\n", "file_path": "src/server/client/state/handle/retr.rs", "rank": 26, "score": 78248.92023148607 }, { "content": "/// Handle the 'PORT' command.\n\npub fn handle_port(port: &protocol::PORT,\n\n client: &mut ClientState)\n\n -> Result<Action, Error> {\n\n let mut session = client.session.expect_ready_mut()?;\n\n\n\n debug!(\"client requested we initiate an active DTP connection on port {}\", port.port);\n\n\n\n // For active mode, we set the socket address on the session so that\n\n // we keep the address for later use. We do not have to worry in passive\n\n // mode because the client always initiates the data connection.\n\n session.client_addr = Some(port.to_socket_addr());\n\n Ok(Action::Reply(protocol::Reply::new(protocol::reply::code::OK, \"port\")))\n\n}\n", "file_path": "src/server/client/state/handle/active.rs", "rank": 27, "score": 76906.46380144959 }, { "content": "fn main() {\n\n flep::util::log::initialize_default().expect(\"could not setup logging\");\n\n\n\n let mut file_system = flep::fs::Memory::new();\n\n // FIXME: add methods to `Memory` to ease construction.\n\n file_system.write_file(&Path::new(\"README.txt\"),\n\n \"hello there\\nit is me\".as_bytes().to_owned()).unwrap();\n\n\n\n let mut server = Server { file_system: file_system };\n\n flep::server::run(&mut server, \"127.0.0.1:2222\")\n\n .expect(\"error whilst running server\");\n\n}\n", "file_path": "examples/basic_server.rs", "rank": 28, "score": 48491.309786864374 }, { "content": "/// Handles an IO event on the data stream.\n\nfn handle_data_event(event: &mio::Event,\n\n connection: &mut Connection,\n\n io: &mut Io)\n\n -> Result<(), Error> {\n\n if event.readiness().is_writable() {\n\n let dtp = std::mem::replace(&mut connection.dtp,\n\n DataTransfer::None);\n\n\n\n connection.dtp = match dtp {\n\n DataTransfer::None => unreachable!(),\n\n DataTransfer::Listening { listener, .. } => {\n\n let (sock, _) = listener.accept()?;\n\n\n\n let connection_token = io.allocate_token();\n\n io.poll.register(&sock, connection_token,\n\n mio::Ready::readable() | UnixReady::hup(),\n\n mio::PollOpt::edge())?;\n\n\n\n debug!(\"data connection established via PASV mode\");\n\n\n", "file_path": "src/server/client/client_io.rs", "rank": 29, "score": 39669.341227337616 }, { "content": "use protocol;\n\n\n\nerror_chain! {\n\n types {\n\n Error, ErrorKind, ResultExt;\n\n }\n\n\n\n links {\n\n Protocol(protocol::Error, protocol::ErrorKind);\n\n }\n\n\n\n foreign_links {\n\n Io(::std::io::Error);\n\n }\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 30, "score": 34203.34286154731 }, { "content": "error_chain! {\n\n types {\n\n Error, ErrorKind, ResultExt;\n\n }\n\n\n\n foreign_links {\n\n Io(::std::io::Error);\n\n InvalidUtf8(::std::string::FromUtf8Error);\n\n }\n\n\n\n errors {\n\n NotLoggedIn {\n\n description(\"client not logged in\")\n\n display(\"client not logged in\")\n\n }\n\n\n\n InvalidCommand(name: String) {\n\n description(\"received invalid command\")\n\n display(\"received invalid command: '{}'\", name)\n\n }\n", "file_path": "src/protocol/errors.rs", "rank": 39, "score": 33021.860412772825 }, { "content": "\n\n InvalidArgument(message: String) {\n\n description(\"received invalid argument\")\n\n display(\"received invalid argument: {}\", message)\n\n }\n\n\n\n InvalidCommandSequence(message: String) {\n\n description(\"received invalid command sequence\")\n\n display(\"received invalid command sequence: {}\", message)\n\n }\n\n\n\n UnimplementedCommand(name: String) {\n\n description(\"received command that is not implemented yet\")\n\n display(\"received command that is not implemented yet: '{}'\", name)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/protocol/errors.rs", "rank": 40, "score": 33019.80681956982 }, { "content": " /// Writes the payload data.\n\n fn write_payload(&self, write: &mut Write) -> Result<(), Error>;\n\n\n\n /// Reads payload data.\n\n fn read_payload(read: &mut BufRead) -> Result<Self, Error>;\n\n\n\n /// Gets the name of the command.\n\n fn command_name(&self) -> &'static str;\n\n\n\n fn bytes(&self) -> Vec<u8> {\n\n let mut buffer = io::Cursor::new(Vec::new());\n\n self.write(&mut buffer).expect(\"IO failure while writing to memory buffer\");\n\n buffer.into_inner()\n\n }\n\n\n\n /// Generates the text string for this packet.\n\n fn to_string(&self) -> String {\n\n String::from_utf8(self.bytes()).unwrap()\n\n }\n\n}\n", "file_path": "src/protocol/command/mod.rs", "rank": 41, "score": 29.117048532104135 }, { "content": " fn read_payload(read: &mut BufRead) -> Result<Self, Error> {\n\n let mut payload = String::new();\n\n read.read_to_string(&mut payload)?;\n\n\n\n assert_eq!(payload.chars().next(), Some(' '), \"missing space after command\");\n\n let payload: String = payload.chars().skip(1).collect();\n\n\n\n let textual_bytes: Vec<&str> = payload.split(\",\").collect();\n\n assert_eq!(textual_bytes.len(), 6, \"there should be 6 bytes in a PORT payload\");\n\n\n\n let bytes: Result<Vec<u8>, _> = textual_bytes.into_iter().map(|tb| match tb.parse() {\n\n Ok(b) => Ok(b),\n\n Err(..) => Err(Error::from_kind(ErrorKind::InvalidArgument(\n\n format!(\"PORT addresses should be comma-separated integers\")))),\n\n }).collect();\n\n let bytes = bytes?;\n\n\n\n let host = [bytes[0], bytes[1], bytes[2], bytes[3]];\n\n let port = NetworkEndian::read_u16(&bytes[4..6]);\n\n\n", "file_path": "src/protocol/command/port.rs", "rank": 42, "score": 23.264685320230946 }, { "content": "/// Defines a packet which takes no arguments.\n\nmacro_rules! define_basic_command {\n\n ($name:ident, $module_name:ident) => {\n\n pub use self::$module_name::$name;\n\n\n\n pub mod $module_name {\n\n use Command;\n\n use std::io::prelude::*;\n\n\n\n #[derive(Clone, Debug, PartialEq, Eq)]\n\n pub struct $name;\n\n\n\n impl Command for $name\n\n {\n\n fn write_payload(&self, _: &mut Write) -> Result<(), $crate::Error> { Ok(()) }\n\n fn read_payload(_: &mut BufRead) -> Result<Self, $crate::Error> { Ok($name) }\n\n\n\n fn command_name(&self) -> &'static str { stringify!($name) }\n\n }\n\n\n", "file_path": "src/protocol/command/basic.rs", "rank": 43, "score": 23.040613802507337 }, { "content": "\n\nimpl Argument for Mode\n\n{\n\n fn read(read: &mut BufRead) -> Result<Self, Error> {\n\n let c = read.read_u8()? as char;\n\n\n\n match c {\n\n 'S' => Ok(Mode::Stream),\n\n 'B' => Ok(Mode::Block),\n\n 'C' => Ok(Mode::Compressed),\n\n _ => panic!(\"unknown argument code: {}\", c),\n\n }\n\n }\n\n\n\n fn write(&self, write: &mut Write) -> Result<(), Error> {\n\n let mode_character = match *self {\n\n Mode::Stream => 'S',\n\n Mode::Block => 'B',\n\n Mode::Compressed => 'C',\n\n };\n", "file_path": "src/protocol/command/mode.rs", "rank": 44, "score": 22.608015526166472 }, { "content": " },\n\n 'I' => Ok(FileType::Binary),\n\n 'L' => {\n\n assert_eq!(read.read_u8()? as char, ' ');\n\n let bits_per_byte_char = read.read_u8()? as char;\n\n\n\n match bits_per_byte_char.to_string().parse() {\n\n Ok(bits_per_byte) => Ok(FileType::LocalFormat { bits_per_byte: bits_per_byte }),\n\n Err(..) => Err(ErrorKind::InvalidArgument(\n\n format!(\"file type should be single digit number, got '{}'\", bits_per_byte_char)).into()),\n\n }\n\n },\n\n c => panic!(\"invalid file type char: '{}'\", c),\n\n }\n\n }\n\n\n\n fn write(&self, write: &mut Write) -> Result<(), Error> {\n\n write!(write, \" \")?;\n\n\n\n match *self {\n", "file_path": "src/protocol/file_type.rs", "rank": 45, "score": 21.108572654138452 }, { "content": "}\n\n\n\nimpl FileType\n\n{\n\n pub fn ascii() -> Self { FileType::AsciiText(TextFormat::NonPrint) }\n\n}\n\n\n\nimpl Argument for FileType\n\n{\n\n fn read(read: &mut BufRead) -> Result<Self, Error> {\n\n let type_byte = read.read_u8()?;\n\n\n\n match type_byte as char {\n\n 'A' => {\n\n let format = TextFormat::read(read)?;\n\n Ok(FileType::AsciiText(format))\n\n },\n\n 'E' => {\n\n let format = TextFormat::read(read)?;\n\n Ok(FileType::EbcdicText(format))\n", "file_path": "src/protocol/file_type.rs", "rank": 46, "score": 21.03400129679818 }, { "content": " /// Get the address of the socket described by the command.\n\n pub fn to_socket_addr(&self) -> SocketAddr {\n\n let host = Ipv4Addr::from(self.host_address.clone());\n\n SocketAddr::V4(SocketAddrV4::new(host, self.port))\n\n }\n\n}\n\n\n\nimpl Command for PORT\n\n{\n\n fn write_payload(&self, write: &mut Write) -> Result<(), Error> {\n\n let mut port_buf = [0; 2];\n\n NetworkEndian::write_u16(&mut port_buf, self.port);\n\n\n\n let address_str = self.host_address.iter().map(|b| b.to_string()).join(\",\");\n\n let port_str = port_buf.iter().map(|b| b.to_string()).join(\",\");\n\n\n\n write!(write, \"{},{}\", address_str, port_str)?;\n\n Ok(())\n\n }\n\n\n", "file_path": "src/protocol/command/port.rs", "rank": 47, "score": 20.49469483487926 }, { "content": " XMKD(XMKD),\n\n /// Print the current working directory.\n\n XPWD(XPWD),\n\n XRCP(XRCP),\n\n /// Remove the directory.\n\n XRMD(XRMD),\n\n XRSQ(XRSQ),\n\n /// Send, mail if cannot.\n\n XSEM(XSEM),\n\n /// Send to terminal.\n\n XSEN(XSEN),\n\n}\n\n\n\nimpl CommandKind\n\n{\n\n /// Reads a command from a buffer.\n\n pub fn read(read: &mut Read) -> Result<Self, Error> {\n\n let line_bytes: Result<Vec<u8>, _> = read.bytes().take_while(|b| b.as_ref().map(|&b| (b as char) != '\\n').unwrap_or(true)).collect();\n\n let mut line_bytes = line_bytes?;\n\n\n", "file_path": "src/protocol/command_kind.rs", "rank": 48, "score": 19.988831501037353 }, { "content": "macro_rules! define_unimplemented_command\n\n{\n\n ($name:ident) => {\n\n #[derive(Clone, Debug, PartialEq, Eq)]\n\n pub struct $name;\n\n\n\n impl $crate::Command for $name\n\n {\n\n fn write_payload(&self, _: &mut ::std::io::Write)\n\n -> Result<(), $crate::Error> {\n\n unimplemented!();\n\n }\n\n\n\n fn read_payload(_: &mut ::std::io::BufRead)\n\n -> Result<Self, $crate::Error> {\n\n panic!(\"received unimplemented command: {}\", stringify!($name));\n\n }\n\n\n\n fn command_name(&self) -> &'static str { stringify!($name) }\n\n }\n", "file_path": "src/protocol/command/unimplemented.rs", "rank": 49, "score": 18.94567908303128 }, { "content": " // Every new line should use '\\r\\n', and we trimmed the '\\n' above.\n\n assert_eq!(line_bytes.last(), Some(&('\\r' as u8)), \"new lines should be CR+LF, not just LF\");\n\n line_bytes.pop();\n\n\n\n let line_string = String::from_utf8(line_bytes)?;\n\n\n\n // Split the line up.\n\n let (command_name, payload) = if line_string.contains(' ') {\n\n line_string.split_at(line_string.chars().position(|c| c == ' ').expect(\"no space in line\"))\n\n } else {\n\n // If the line has no space, it has no payload.\n\n (line_string.as_str(), \"\")\n\n };\n\n\n\n let mut payload_reader = io::BufReader::new(io::Cursor::new(payload));\n\n\n\n macro_rules! read_commands {\n\n ( $cmd_name:ident => $( $name:ident ),+ ) => {\n\n match command_name {\n\n $( stringify!($name) => Ok(CommandKind::$name($name::read_payload(&mut payload_reader)?)), )+\n", "file_path": "src/protocol/command_kind.rs", "rank": 50, "score": 18.089842568496437 }, { "content": "{\n\n fn read(read: &mut BufRead) -> Result<Self, Error> {\n\n let mut buf: [u8; 1] = [0; 1];\n\n\n\n // Check if we received a character.\n\n if read.read(&mut buf)? == 1 {\n\n assert_eq!(buf[0] as char, ' ');\n\n\n\n match read.read_u8()? as char {\n\n 'N' => Ok(TextFormat::NonPrint),\n\n 'T' => Ok(TextFormat::TelnetFormatControl),\n\n 'C' => Ok(TextFormat::ASACarriageControl),\n\n _ => panic!(\"invalid text format character\"),\n\n }\n\n } else {\n\n // The default is non-print.\n\n Ok(TextFormat::NonPrint)\n\n }\n\n }\n\n\n", "file_path": "src/protocol/file_type.rs", "rank": 51, "score": 17.69930826938862 }, { "content": "use Error;\n\nuse mio::*;\n\n\n\npub struct Io\n\n{\n\n pub poll: Poll,\n\n token_accumulator: usize,\n\n}\n\n\n\nimpl Io\n\n{\n\n pub fn new() -> Result<Self, Error> {\n\n Ok(Io {\n\n poll: Poll::new()?,\n\n token_accumulator: 100,\n\n })\n\n }\n\n\n\n pub fn allocate_token(&mut self) -> Token {\n\n self.token_accumulator += 1;\n\n Token(self.token_accumulator)\n\n }\n\n}\n", "file_path": "src/io/io.rs", "rank": 52, "score": 17.56819164051467 }, { "content": "\n\n pub fn expect_ready_mut(&mut self) -> Result<&mut Ready, Error> {\n\n if let Session::Ready(ref mut ready) = *self {\n\n Ok(ready)\n\n } else {\n\n Err(protocol::Error::from_kind(protocol::ErrorKind::NotLoggedIn.into()).into())\n\n }\n\n }\n\n\n\n pub fn expect_login(&self) -> Result<&Login, Error> {\n\n if let Session::Login(ref login) = *self {\n\n Ok(login)\n\n } else {\n\n // FIXME: return a more appropriate error.\n\n Err(protocol::Error::from_kind(protocol::ErrorKind::NotLoggedIn.into()).into())\n\n }\n\n }\n\n}\n\n\n\nimpl Ready\n", "file_path": "src/server/client/state/session.rs", "rank": 53, "score": 16.896688353172593 }, { "content": "impl Connection\n\n{\n\n pub fn send_command<C>(&mut self, command: &C) -> Result<(), Error>\n\n where C: protocol::Command {\n\n command.write(&mut self.pi.stream)?;\n\n Ok(())\n\n }\n\n\n\n pub fn send_reply<R>(&mut self, reply: R) -> Result<(), Error>\n\n where R: Into<protocol::Reply> {\n\n let reply = reply.into();\n\n reply.write(&mut self.pi.stream)?;\n\n Ok(())\n\n }\n\n\n\n pub fn uses_token(&self, the_token: mio::Token) -> bool {\n\n if self.pi.token == the_token { return true };\n\n\n\n match self.dtp {\n\n DataTransfer::None => false,\n", "file_path": "src/io/connection.rs", "rank": 54, "score": 16.720434448980242 }, { "content": "use std::fmt;\n\n\n\n#[derive(Clone, PartialEq, Eq)]\n\npub struct Credentials\n\n{\n\n pub username: String,\n\n pub password: Option<String>,\n\n}\n\n\n\n// We have a custom Debug implementation so we don't print the password.\n\nimpl fmt::Debug for Credentials\n\n{\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n write!(fmt, \"Credentials {{ username: {:?} }}\", self.username)\n\n }\n\n}\n", "file_path": "src/credentials.rs", "rank": 55, "score": 16.28251198956903 }, { "content": "# flep\n\n\n\n[![Crates.io](https://img.shields.io/crates/v/flep.svg)](https://crates.io/crates/flep)\n\n[![Build Status](https://travis-ci.org/dylanmckay/flep.svg?branch=master)](https://travis-ci.org/dylanmckay/flep)\n\n[![license](https://img.shields.io/github/license/dylanmckay/flep.svg)]()\n\n\n\nAn FTP server library.\n\n\n\n[Documentation](https://docs.rs/flep)\n\n\n\n## Example\n\n\n\nRun `ftp 127.0.0.1 2222` to connect to the server.\n\n\n\n```rust\n\npub struct Server\n\n{\n\n file_system: flep::fs::Memory,\n\n}\n\n\n\nimpl flep::server::Server for Server\n\n{\n\n fn welcome_message(&self) -> String { \"Hello there!\".to_string() }\n\n\n\n fn file_system(&self) -> &flep::fs::FileSystem {\n\n &self.file_system\n\n }\n\n\n\n fn file_system_mut(&mut self) -> &mut flep::fs::FileSystem {\n\n &mut self.file_system\n\n }\n\n}\n\n\n\nfn main() {\n\n flep::util::log::initialize_default().expect(\"could not setup logging\");\n\n\n\n // Set up an in-memory file system.\n\n let mut file_system = flep::fs::Memory::new();\n\n file_system.write_file(&Path::new(\"README.txt\"),\n\n \"hello there\\nit is me\".as_bytes().to_owned()).unwrap();\n\n\n\n // Start on port 2222\n\n let mut server = Server { file_system: file_system };\n\n flep::server::run(&mut server, \"127.0.0.1:2222\")\n\n .expect(\"error whilst running server\");\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 56, "score": 16.199524650786664 }, { "content": "\n\n fn create_dir(&mut self, _path: &Path) -> Result<(), Error> {\n\n unimplemented!();\n\n }\n\n\n\n fn write_file(&mut self, _path: &Path, _data: Vec<u8>) -> Result<(), Error> {\n\n unimplemented!();\n\n }\n\n\n\n fn read_file(&self, _path: &Path) -> Result<Vec<u8>, Error> {\n\n unimplemented!();\n\n }\n\n}\n", "file_path": "src/fs/physical.rs", "rank": 57, "score": 15.743020061205844 }, { "content": "/// Defines an new raw FTP command.\n\nmacro_rules! define_command {\n\n ($name:ident { $( $arg_name:ident : $arg_ty:ty),* }) => {\n\n #[derive(Clone, Debug, PartialEq, Eq)]\n\n pub struct $name {\n\n $( pub $arg_name : $arg_ty ),*\n\n }\n\n\n\n impl $crate::Command for $name {\n\n #[allow(unused_variables)]\n\n fn write_payload(&self, write: &mut ::std::io::Write)\n\n -> Result<(), $crate::Error> {\n\n #[allow(unused_imports)]\n\n use $crate::Argument;\n\n\n\n $( self.$arg_name.write(write)?; )*\n\n Ok(())\n\n }\n\n\n\n #[allow(unused_variables)]\n", "file_path": "src/protocol/command/macros.rs", "rank": 58, "score": 15.27179794307872 }, { "content": "use Error;\n\nuse super::FileSystem;\n\n\n\nuse std::path::{Path, PathBuf};\n\nuse std::fs;\n\n\n\n/// A folder on the physical on-disk filesystem.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Physical\n\n{\n\n /// The root directory.\n\n pub root: PathBuf,\n\n}\n\n\n\nimpl Physical\n\n{\n\n /// Creates a new physical filesystem.\n\n pub fn new<P>(root: P) -> Self\n\n where P: Into<PathBuf> {\n\n Physical {\n", "file_path": "src/fs/physical.rs", "rank": 59, "score": 14.90271641185804 }, { "content": " fn read_payload(read: &mut ::std::io::BufRead)\n\n -> Result<Self, $crate::Error> {\n\n Ok($name {\n\n $( $arg_name : <$arg_ty as $crate::Argument>::read_with_space(read)?, )*\n\n })\n\n }\n\n\n\n fn command_name(&self) -> &'static str { stringify!($name) }\n\n }\n\n };\n\n\n\n // Allow trailing commas.\n\n ($name:ident { $( $arg_name:ident : $arg_ty:ty),* , }) => {\n\n define_command!($name { $( $arg_name : $arg_ty ),* });\n\n };\n\n}\n", "file_path": "src/protocol/command/macros.rs", "rank": 60, "score": 14.81712111134179 }, { "content": " pub fn write(&self, write: &mut Write) -> Result<(), io::Error> {\n\n match self.text {\n\n Text::SingleLine(ref line) => {\n\n write!(write, \"{} {}\\r\\n\", self.code.0, line)\n\n },\n\n Text::MultiLine(..) => unimplemented!(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<String> for Text\n\n{\n\n fn from(s: String) -> Text {\n\n let lines: Vec<_> = s.lines().collect();\n\n assert_eq!(lines.is_empty(), false);\n\n\n\n if lines.len() == 1 {\n\n Text::SingleLine(lines[0].to_owned())\n\n } else {\n\n Text::MultiLine(lines.into_iter().map(|l| l.to_owned()).collect())\n", "file_path": "src/protocol/reply/mod.rs", "rank": 61, "score": 14.798237095422518 }, { "content": " fn write(&self, write: &mut Write) -> Result<(), Error> {\n\n write!(write, \" \")?;\n\n\n\n match *self {\n\n TextFormat::NonPrint => write!(write, \"N\")?,\n\n TextFormat::TelnetFormatControl => write!(write, \"T\")?,\n\n TextFormat::ASACarriageControl => write!(write, \"C\")?,\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test\n\n{\n\n use Argument;\n\n use super::*;\n\n\n\n #[test]\n\n fn correctly_writes_ascii_nonprint() {\n", "file_path": "src/protocol/file_type.rs", "rank": 62, "score": 14.343278205063516 }, { "content": "use {Command, Error, ErrorKind};\n\n\n\nuse itertools::Itertools;\n\nuse byteorder::{ByteOrder, NetworkEndian};\n\n\n\nuse std::net::{SocketAddr, SocketAddrV4, Ipv4Addr};\n\nuse std::io::prelude::*;\n\n\n\n/// Sets up an IPv4 port\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct PORT\n\n{\n\n /// The IPv4 address of the host.\n\n pub host_address: [u8; 4],\n\n /// The port number.\n\n pub port: u16,\n\n}\n\n\n\nimpl PORT\n\n{\n", "file_path": "src/protocol/command/port.rs", "rank": 63, "score": 14.115909921640512 }, { "content": "use {Argument, Error, ErrorKind};\n\n\n\nuse byteorder::ReadBytesExt;\n\nuse std::io::prelude::*;\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub enum FileType\n\n{\n\n AsciiText(TextFormat),\n\n EbcdicText(TextFormat),\n\n Binary,\n\n LocalFormat { bits_per_byte: u8 },\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub enum TextFormat\n\n{\n\n NonPrint,\n\n TelnetFormatControl,\n\n ASACarriageControl,\n", "file_path": "src/protocol/file_type.rs", "rank": 64, "score": 13.560436969278276 }, { "content": " pub uuid: Uuid,\n\n pub session: Session,\n\n}\n\n\n\nimpl ClientState\n\n{\n\n /// Creates a new client state.\n\n pub fn new() -> Self {\n\n ClientState {\n\n uuid: Uuid::new_v4(),\n\n session: Default::default(),\n\n }\n\n }\n\n\n\n /// Handle a command and update the state accordingly.\n\n pub fn handle_command(&mut self,\n\n command: &protocol::CommandKind,\n\n server: &mut Server)\n\n -> Result<server::client::Action, Error> {\n\n handle::command(self, command, server)\n", "file_path": "src/server/client/state/mod.rs", "rank": 65, "score": 12.953057629148493 }, { "content": "impl Client\n\n{\n\n /// Attempts to update the state of the client with any\n\n /// information received from the network.\n\n pub fn tick(&mut self, io: &mut Io) -> Result<(), Error> {\n\n self::tick(&mut self.state, &mut self.connection, io)\n\n }\n\n\n\n pub fn handle_io_event(&mut self,\n\n event: &mio::Event,\n\n the_token: mio::Token,\n\n server: &mut Server,\n\n io: &mut Io)\n\n -> Result<(), Error> {\n\n super::client_io::handle_event(&mut self.state, event,\n\n &mut self.connection, the_token,\n\n server, io)\n\n }\n\n}\n\n\n\n/// Does the state tick.\n", "file_path": "src/server/client/client.rs", "rank": 66, "score": 12.881454774847782 }, { "content": " }\n\n\n\n fn find_parent_and_name(&self, path: &Path) -> Result<(&Node, String), Error> {\n\n if let Some(parent) = path.parent() {\n\n let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();\n\n Ok((self.find_node(parent)?, file_name))\n\n } else {\n\n // FIXME: better error handling.\n\n panic!(\"no parent\");\n\n }\n\n }\n\n\n\n fn find_parent_and_name_mut(&mut self, path: &Path) -> Result<(&mut Node, String), Error> {\n\n if let Some(parent) = path.parent() {\n\n let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();\n\n Ok((self.find_node_mut(parent)?, file_name))\n\n } else {\n\n // FIXME: better error handling.\n\n panic!(\"no parent\");\n\n }\n", "file_path": "src/fs/memory.rs", "rank": 67, "score": 12.85058282817383 }, { "content": "//! Raw FTP protocol definitions.\n\n//!\n\n//! * [RFC 959](https://www.w3.org/Protocols/rfc959)\n\n//! * http://www.nsftools.com/tips/RawFTP.htm\n\n\n\npub extern crate rfc1700;\n\n\n\nextern crate itertools;\n\nextern crate byteorder;\n\n#[macro_use]\n\nextern crate error_chain;\n\n\n\npub use self::command_kind::CommandKind;\n\npub use self::argument::Argument;\n\npub use self::reply::Reply;\n\npub use self::command::*;\n\npub use self::errors::*;\n\npub use self::file_type::{FileType, TextFormat};\n\n\n\npub mod command_kind;\n\npub mod argument;\n\npub mod reply;\n\npub mod command;\n\npub mod errors;\n\npub mod file_type;\n\n\n", "file_path": "src/protocol/lib.rs", "rank": 68, "score": 12.467419557426826 }, { "content": " pub transfer_type: FileType,\n\n /// Whether the connection is active or passive.\n\n pub data_transfer_mode: DataTransferMode,\n\n\n\n /// The port given by the `PORT` command.\n\n /// Can be empty if passive mode is used.\n\n pub client_addr: Option<SocketAddr>,\n\n /// The data transfer operations we have queued.\n\n pub active_transfer: Option<server::Transfer>,\n\n}\n\n\n\nimpl Session\n\n{\n\n pub fn expect_ready(&self) -> Result<&Ready, Error> {\n\n if let Session::Ready(ref ready) = *self {\n\n Ok(ready)\n\n } else {\n\n Err(protocol::Error::from_kind(protocol::ErrorKind::NotLoggedIn.into()).into())\n\n }\n\n }\n", "file_path": "src/server/client/state/session.rs", "rank": 69, "score": 12.329359158839843 }, { "content": "extern crate flep;\n\n\n\nuse flep::fs::FileSystem;\n\nuse std::path::Path;\n\n\n\npub struct Server\n\n{\n\n file_system: flep::fs::Memory,\n\n}\n\n\n\nimpl flep::server::Server for Server\n\n{\n\n fn welcome_message(&self) -> String { \"Hello there!\".to_string() }\n\n\n\n fn file_system(&self) -> &flep::fs::FileSystem {\n\n &self.file_system\n\n }\n\n\n\n fn file_system_mut(&mut self) -> &mut flep::fs::FileSystem {\n\n &mut self.file_system\n\n }\n\n}\n\n\n", "file_path": "examples/basic_server.rs", "rank": 70, "score": 12.108182103356077 }, { "content": "\n\n write.write(&[mode_character as u8])?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test\n\n{\n\n use super::*;\n\n use {Command, CommandKind};\n\n use std::io;\n\n\n\n fn read(text: &str) -> MODE {\n\n let command_kind = CommandKind::read(&mut io::Cursor::new(text)).unwrap();\n\n\n\n if let CommandKind::MODE(mode) = command_kind {\n\n mode\n\n } else {\n\n panic!();\n", "file_path": "src/protocol/command/mode.rs", "rank": 71, "score": 11.864247851725395 }, { "content": "\n\n Ok(DataTransfer::Listening {\n\n listener: listener,\n\n token: token,\n\n })\n\n }\n\n\n\n pub fn connect() -> Result<Self, Error> {\n\n unimplemented!();\n\n }\n\n}\n\n\n\nimpl Default for DataTransferMode\n\n{\n\n // FTP defaults to active mode (unless you send 'PASV').\n\n fn default() -> Self { DataTransferMode::Active }\n\n}\n", "file_path": "src/io/connection.rs", "rank": 72, "score": 11.754123849203687 }, { "content": " Ok(PORT { host_address: host, port: port })\n\n }\n\n\n\n fn command_name(&self) -> &'static str { \"PORT\" }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test\n\n{\n\n use {CommandKind, Command};\n\n use super::*;\n\n use std::io;\n\n\n\n #[test]\n\n fn correctly_writes_basic_packets() {\n\n let packet = PORT { host_address: [127,0,0,1], port: 22 };\n\n let raw_bytes = packet.bytes();\n\n let text = String::from_utf8(raw_bytes).unwrap();\n\n\n\n assert_eq!(text, \"PORT 127,0,0,1,0,22\");\n", "file_path": "src/protocol/command/port.rs", "rank": 73, "score": 11.69070484946146 }, { "content": "use {Argument, Error};\n\n\n\nuse std::io::prelude::*;\n\n\n\nuse byteorder::ReadBytesExt;\n\n\n\ndefine_command!(MODE {\n\n mode: Mode,\n\n});\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub enum Mode\n\n{\n\n /// Mode character 'S'.\n\n Stream,\n\n /// Mode character 'B'.\n\n Block,\n\n /// Mode character 'C'.\n\n Compressed,\n\n}\n", "file_path": "src/protocol/command/mode.rs", "rank": 74, "score": 11.316079358382318 }, { "content": " FileType::AsciiText(format) => {\n\n write!(write, \"A\")?;\n\n format.write(write)?;\n\n },\n\n FileType::EbcdicText(format) => {\n\n write!(write, \"E\")?;\n\n format.write(write)?;\n\n },\n\n FileType::Binary => write!(write, \"I\")?,\n\n FileType::LocalFormat { bits_per_byte } => {\n\n assert!(bits_per_byte < 10,\n\n \"bits per byte must be single-digit\");\n\n write!(write, \"L {}\", bits_per_byte)?;\n\n },\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Argument for TextFormat\n", "file_path": "src/protocol/file_type.rs", "rank": 75, "score": 11.26763258818054 }, { "content": " #[cfg(test)]\n\n mod test\n\n {\n\n use super::*;\n\n use {Command, CommandKind};\n\n use std::io;\n\n\n\n #[test]\n\n fn correctly_writes_basic_packets() {\n\n let packet = $name;\n\n let raw_bytes = packet.bytes();\n\n let text = String::from_utf8(raw_bytes).unwrap();\n\n\n\n assert_eq!(text, stringify!($name));\n\n }\n\n\n\n #[test]\n\n fn correctly_reads_basic_packets() {\n\n let raw_text = format!(\"{}\\r\\n\", stringify!($name));\n\n let command = CommandKind::read(&mut io::Cursor::new(raw_text.as_bytes().to_vec())).unwrap();\n", "file_path": "src/protocol/command/basic.rs", "rank": 76, "score": 11.131968130380452 }, { "content": "pub use self::port::PORT;\n\npub use self::mode::{MODE, Mode};\n\npub use self::basic::{ABOR, CDUP, EPSV, FEAT, NOOP, PASV, PWD,\n\n QUIT, REIN, STOU, SYST};\n\npub use self::misc::{ACCT, APPE, CWD, DELE, HELP, LIST, MDTM, MKD, NLST,\n\n RETR, RMD, RNFR, RNTO, SITE, SIZE, STAT, STOR, TYPE,\n\n USER, PASS};\n\npub use self::security::{ADAT, AUTH, CCC, CONF, ENC, MIC, PBSZ, PROT};\n\npub use self::unimplemented::*;\n\n\n\n#[macro_use]\n\npub mod macros;\n\npub mod port;\n\npub mod mode;\n\n/// Commands which take no arguments.\n\npub mod basic;\n\npub mod misc;\n\npub mod security;\n\npub mod unimplemented;\n\n\n\nuse Error;\n\nuse std::io::prelude::*;\n\nuse std::{io, fmt};\n\n\n\n/// An FTP command.\n", "file_path": "src/protocol/command/mod.rs", "rank": 77, "score": 10.999847971671471 }, { "content": "//! The code for the generic `FileSystem` trait.\n\n//!\n\n//! Also contains both physical and in-memory implementations\n\n//! of a file system.\n\n\n\npub use self::physical::Physical;\n\npub use self::memory::Memory;\n\n\n\nmod physical;\n\nmod memory;\n\n\n\nuse Error;\n\nuse std::path::Path;\n\n\n\n/// A filesystem mountable as FTP.\n", "file_path": "src/fs/mod.rs", "rank": 78, "score": 10.87272700767371 }, { "content": " self\n\n }\n\n}\n\n\n\nimpl FileSystem for Memory\n\n{\n\n fn list(&self, path: &Path)\n\n -> Result<Vec<String>, Error> {\n\n let parent_node = self.find_node(path)?;\n\n\n\n match parent_node.kind {\n\n NodeKind::Directory(ref dir) => {\n\n Ok(dir.nodes.values().map(|node| node.name.clone()).collect())\n\n },\n\n // FIXME: better error handling\n\n NodeKind::File(..) => panic!(\"this is not a directory\"),\n\n }\n\n }\n\n\n\n fn create_dir(&mut self, path: &Path) -> Result<(), Error> {\n", "file_path": "src/fs/memory.rs", "rank": 79, "score": 10.781889691802423 }, { "content": " let child_parts = parts[1..].to_owned();\n\n\n\n for node in dir.nodes.values_mut() { if let Some(node) = node.find_node_mut(child_parts.clone()) {\n\n return Some(node)\n\n }}\n\n }\n\n None\n\n }\n\n}\n\n\n\nimpl Directory\n\n{\n\n #[cfg(test)]\n\n pub fn new() -> Self {\n\n Directory { nodes: HashMap::new() }\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn add(mut self, node: Node) -> Self {\n\n self.nodes.insert(node.name.clone(), node);\n", "file_path": "src/fs/memory.rs", "rank": 80, "score": 10.457577748073641 }, { "content": "//! FTP server library.\n\n\n\npub extern crate flep_protocol as protocol;\n\n\n\nextern crate mio;\n\nextern crate uuid;\n\n#[macro_use]\n\nextern crate error_chain;\n\n#[macro_use]\n\nextern crate log;\n\n\n\npub use self::credentials::Credentials;\n\npub use self::errors::*;\n\n\n\npub use protocol::FileType;\n\n\n\npub mod server;\n\npub mod io;\n\npub mod fs;\n\npub mod util;\n\nmod credentials;\n\nmod errors;\n", "file_path": "src/lib.rs", "rank": 81, "score": 10.209473579730066 }, { "content": "\n\nimpl Node\n\n{\n\n fn find_node(&self, parts: Vec<&str>) -> Option<&Self> {\n\n if parts == vec![&self.name] { return Some(self) };\n\n\n\n if let NodeKind::Directory(ref dir) = self.kind {\n\n let child_parts = parts[1..].to_owned();\n\n\n\n for node in dir.nodes.values() { if let Some(node) = node.find_node(child_parts.clone()) {\n\n return Some(node)\n\n }}\n\n }\n\n None\n\n }\n\n\n\n fn find_node_mut(&mut self, parts: Vec<&str>) -> Option<&mut Self> {\n\n if parts == vec![&self.name] { return Some(self) };\n\n\n\n if let NodeKind::Directory(ref mut dir) = self.kind {\n", "file_path": "src/fs/memory.rs", "rank": 82, "score": 10.127246171049851 }, { "content": "}\n\n\n\n/// Reply text.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum Text\n\n{\n\n /// The reply is only a single line of text.\n\n SingleLine(String),\n\n /// The reply is multiple lines of text.\n\n MultiLine(Vec<String>),\n\n}\n\n\n\nimpl Reply\n\n{\n\n pub fn new<C,S>(code: C, text: S) -> Self\n\n where C: Into<Code>, S: Into<String> {\n\n let text: String = text.into();\n\n\n\n Reply {\n\n code: code.into(),\n", "file_path": "src/protocol/reply/mod.rs", "rank": 83, "score": 9.91964883042412 }, { "content": " root: root.into(),\n\n }\n\n }\n\n}\n\n\n\nimpl FileSystem for Physical\n\n{\n\n fn list(&self, path: &Path) -> Result<Vec<String>, Error> {\n\n let full_path = self.root.join(path);\n\n\n\n let entries: Result<Vec<fs::DirEntry>, _> = fs::read_dir(&full_path)?.collect();\n\n let entries = entries?;\n\n\n\n let names: Vec<String>= entries.into_iter().map(|entry| {\n\n let base_name = entry.path().strip_prefix(&full_path).unwrap().to_owned();\n\n base_name.to_str().unwrap().to_owned()\n\n }).collect();\n\n\n\n Ok(names)\n\n }\n", "file_path": "src/fs/physical.rs", "rank": 84, "score": 9.908487546496925 }, { "content": " DataTransfer::Listening { ref token, .. } => *token == the_token,\n\n DataTransfer::Connecting { ref token, .. } => *token == the_token,\n\n DataTransfer::Connected { ref token, .. } => *token == the_token,\n\n }\n\n }\n\n}\n\n\n\nimpl DataTransfer\n\n{\n\n /// Start listening for a new data transfer on a port.\n\n pub fn bind(port: u16, io: &mut Io) -> Result<Self, Error> {\n\n use std::net::ToSocketAddrs;\n\n\n\n let addr = (\"127.0.0.1\", port).to_socket_addrs()?.next().unwrap();\n\n let listener = TcpListener::bind(&addr)?;\n\n\n\n let token = io.allocate_token();\n\n\n\n io.poll.register(&listener, token, mio::Ready::readable(),\n\n mio::PollOpt::edge())?;\n", "file_path": "src/io/connection.rs", "rank": 85, "score": 9.88854017644076 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Text\n\n{\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Text::SingleLine(ref line) => write!(fmt, \"{}\", line),\n\n Text::MultiLine(ref lines) => {\n\n for line in lines { write!(fmt, \"{}\\n\", line)?; }\n\n Ok(())\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/protocol/reply/mod.rs", "rank": 86, "score": 9.779228757743454 }, { "content": " }\n\n\n\n /// Attempts to progress the state of the client if need be.\n\n pub fn progress(&mut self,\n\n server: &mut Server,\n\n connection: &mut Connection)\n\n -> Result<(), Error> {\n\n let session = std::mem::replace(&mut self.session, Session::default());\n\n\n\n self.session = match session {\n\n Session::PendingWelcome => {\n\n debug!(\"sending welcome to client\");\n\n\n\n let welcome = protocol::Reply::new(protocol::reply::code::OK, server.welcome_message());\n\n welcome.write(&mut connection.pi.stream)?;\n\n\n\n Session::Login(session::Login::WaitingForUsername)\n\n },\n\n session => session,\n\n };\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/server/client/state/mod.rs", "rank": 87, "score": 9.73530074506705 }, { "content": "pub const REQUESTED_ACTION_ABORTED_LOCAL_ERROR_IN_PROCESSING: Code = Code(451);\n\npub const REQUESTED_ACTION_ABORTED_PAGE_TYPE_UNKNOWN: Code = Code(551);\n\npub const REQUESTED_ACTION_NOT_TAKEN_INSUFFICIENT_STORAGE: Code = Code(452);\n\npub const REQUESTED_FILE_ACTION_ABORTED_EXCEEDED_ALLOCATION: Code = Code(552);\n\npub const INVALID_FILE_NAME: Code = Code(553);\n\n\n\nimpl AsReplyCode for ErrorKind {\n\n fn as_reply_code(&self) -> Code {\n\n use ErrorKind::*;\n\n\n\n match *self {\n\n InvalidCommand(..) => INVALID_COMMAND,\n\n // If the othe end sends us invalid text, report it as an\n\n // invalid command.\n\n InvalidUtf8(..) => INVALID_COMMAND,\n\n NotLoggedIn => USER_NOT_LOGGED_IN,\n\n InvalidArgument(..) => SYNTAX_ERROR,\n\n InvalidCommandSequence(..) => BAD_COMMAND_SEQUENCE,\n\n UnimplementedCommand(..) => COMMAND_NOT_IMPLEMENTED,\n\n Msg(..) | Io(..)\n", "file_path": "src/protocol/reply/code.rs", "rank": 88, "score": 9.528163629618117 }, { "content": " let (parent, file_name) = self.find_parent_and_name_mut(path)?;\n\n\n\n if let NodeKind::Directory(ref mut dir) = parent.kind {\n\n dir.nodes.insert(file_name.clone(), Node {\n\n name: file_name,\n\n kind: NodeKind::Directory(Directory { nodes: HashMap::new() }),\n\n });\n\n Ok(())\n\n } else {\n\n // FIXME: better error handling\n\n panic!(\"not a dir\")\n\n }\n\n }\n\n\n\n fn write_file(&mut self, path: &Path, data: Vec<u8>) -> Result<(), Error> {\n\n let (parent, file_name) = self.find_parent_and_name_mut(path)?;\n\n\n\n if let NodeKind::Directory(ref mut dir) = parent.kind {\n\n dir.nodes.insert(file_name.clone(), Node {\n\n name: file_name,\n", "file_path": "src/fs/memory.rs", "rank": 89, "score": 9.372975160359413 }, { "content": "use Error;\n\nuse super::FileSystem;\n\n\n\nuse std::collections::HashMap;\n\nuse std::path::Path;\n\n\n\nconst ROOT_DIR_NAME: &'static str = \"\";\n\n\n\n/// An in-memory filesystem.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Memory\n\n{\n\n /// The root directory.\n\n root: Node,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n", "file_path": "src/fs/memory.rs", "rank": 90, "score": 9.323949280092908 }, { "content": "use Error;\n\nuse protocol;\n\nuse super::Io;\n\n\n\nuse mio::tcp::{TcpStream, TcpListener};\n\nuse mio;\n\n\n\n/// An FTP connection\n\npub struct Connection\n\n{\n\n pub pi: Interpreter,\n\n pub dtp: DataTransfer,\n\n}\n\n\n\n/// The protocol interpreter (PI) stream.\n\npub struct Interpreter\n\n{\n\n /// The underlying socket.\n\n pub stream: TcpStream,\n\n /// The token used to listen for events on the PI stream.\n", "file_path": "src/io/connection.rs", "rank": 91, "score": 9.145335433948556 }, { "content": "{\n\n pub fn new<F>(features: F) -> Self\n\n where F: IntoIterator<Item=Feature> {\n\n Features { features: features.into_iter().collect() }\n\n }\n\n}\n\n\n\nimpl Default for Features\n\n{\n\n fn default() -> Self { Features { features: Vec::new() }}\n\n}\n\n\n\nimpl Into<Reply> for Features\n\n{\n\n fn into(self) -> Reply {\n\n if self.features.is_empty() {\n\n Reply::single_line(REPLY_CODE, \"no additional features supported\")\n\n } else {\n\n let mut lines = Vec::new();\n\n lines.push(\"Extensions supported:\".to_owned());\n", "file_path": "src/protocol/reply/feat.rs", "rank": 92, "score": 9.01211138405107 }, { "content": "\n\n fn find_node_mut(&mut self, path: &Path) -> Result<&mut Node, Error> {\n\n let mut parts: Vec<_> = path.iter().map(|s| s.to_str().unwrap()).collect();\n\n\n\n // Skip the '/' if it exists.\n\n if parts.first() == Some(&\"/\") {\n\n parts = parts[1..].to_owned();\n\n }\n\n\n\n if parts.is_empty() {\n\n Ok(&mut self.root)\n\n } else {\n\n let mut the_parts = vec![ROOT_DIR_NAME];\n\n the_parts.extend(parts);\n\n let node = self.root.find_node_mut(the_parts);\n\n\n\n if let Some(node) = node { Ok(node) } else { panic!(\"path does not exist\") }\n\n }\n\n }\n\n}\n", "file_path": "src/fs/memory.rs", "rank": 93, "score": 8.88723700055848 }, { "content": "use {Reply, reply};\n\n\n\nconst REPLY_CODE: reply::Code = reply::code::STATUS_OR_HELP_REPLY;\n\n\n\n/// A single feature supported by the server.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Feature\n\n{\n\n pub name: String,\n\n}\n\n\n\n/// The response to a 'FEAT' command.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Features\n\n{\n\n /// The list of supported features.\n\n pub features: Vec<Feature>,\n\n}\n\n\n\nimpl Features\n", "file_path": "src/protocol/reply/feat.rs", "rank": 94, "score": 8.837927848962943 }, { "content": "//! Reply code definitions.\n\n\n\nuse {Error, ErrorKind};\n\n\n\n/// A reply code.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Code(pub u16);\n\n\n\n/// Something which we can derive a reply code from.\n", "file_path": "src/protocol/reply/code.rs", "rank": 95, "score": 8.675984389065357 }, { "content": "use {Credentials, Error, FileType};\n\nuse io::DataTransferMode;\n\nuse {server, protocol};\n\n\n\nuse std::net::SocketAddr;\n\nuse std::path::PathBuf;\n\n\n\n/// The state of a client.\n\n#[derive(Clone, Debug)]\n\npub enum Session\n\n{\n\n /// We need to send them a welcome message.\n\n PendingWelcome,\n\n /// We are waiting for (or in the middle of) the user to login.\n\n Login(Login),\n\n /// We are connected and logged in as a user.\n\n Ready(Ready),\n\n}\n\n\n\n/// The state of a client that is in the login process.\n", "file_path": "src/server/client/state/session.rs", "rank": 96, "score": 8.666982986003223 }, { "content": "//! Client state.\n\n//!\n\n//! This module should be free of *all* network IO.\n\n\n\npub use self::session::Session;\n\n\n\nuse {Error, server, protocol};\n\nuse server::Server;\n\nuse io::Connection;\n\n\n\nuse std;\n\n\n\nuse uuid::Uuid;\n\n\n\nmod handle;\n\nmod session;\n\n\n\n/// An FTP client from the point-of-view of the FTP server.\n\npub struct ClientState\n\n{\n", "file_path": "src/server/client/state/mod.rs", "rank": 97, "score": 8.643367899484083 }, { "content": " }\n\n\n\n mod read_file {\n\n pub use super::*;\n\n use super::super::FileSystem;\n\n use std::path::Path;\n\n\n\n #[test]\n\n fn correctly_reads_a_top_level_file() {\n\n let mut fs = Memory::new();\n\n\n\n fs.write_file(&Path::new(\"/foo.txt\"), vec![1,2,3]).unwrap();\n\n assert_eq!(fs.read_file(&Path::new(\"/foo.txt\")).unwrap(), vec![1,2,3]);\n\n }\n\n }\n\n}\n", "file_path": "src/fs/memory.rs", "rank": 98, "score": 8.632145632549406 }, { "content": "//! The `Transfer` type.\n\n\n\nuse FileType;\n\n\n\n/// A data transfer.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Transfer\n\n{\n\n pub file_type: FileType,\n\n pub data: Vec<u8>,\n\n}\n", "file_path": "src/server/transfer.rs", "rank": 99, "score": 8.603976210438088 } ]
Rust
wincolor/src/winapi_inline.rs
crlf0710/termcolor
1059a1e540ac8ab7a2c99508e1b304fcce192705
#![allow(bad_style, overflowing_literals, unused_macros, unused_imports, dead_code)] #[macro_use] pub mod macros { macro_rules! STRUCT { (#[debug] $($rest:tt)*) => ( STRUCT!{#[cfg_attr(feature = "impl-debug", derive(Debug))] $($rest)*} ); ($(#[$attrs:meta])* struct $name:ident { $($field:ident: $ftype:ty,)+ }) => ( #[repr(C)] #[derive(Copy)] $(#[$attrs])* pub struct $name { $(pub $field: $ftype,)+ } impl Clone for $name { #[inline] fn clone(&self) -> $name { *self } } #[cfg(feature = "impl-default")] impl Default for $name { #[inline] fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } } } ); } } pub mod shared { pub mod minwindef { use self::winapi::ctypes::{ c_char, c_float, c_int, c_long, c_uchar, c_uint, c_ulong, c_ushort, c_void, }; use crate::winapi_inline as winapi; pub type DWORD = c_ulong; pub type BOOL = c_int; pub type WORD = c_ushort; pub type LPDWORD = *mut DWORD; STRUCT! {#[debug] struct FILETIME { dwLowDateTime: DWORD, dwHighDateTime: DWORD, }} } pub mod winerror { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub const NO_ERROR: DWORD = 0; } } pub mod um { pub mod wincon { use self::winapi::shared::minwindef::{BOOL, DWORD, WORD}; pub use self::winapi::um::wincontypes::{COORD, PCOORD, PSMALL_RECT, SMALL_RECT}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; pub const FOREGROUND_BLUE: WORD = 0x0001; pub const FOREGROUND_GREEN: WORD = 0x0002; pub const FOREGROUND_RED: WORD = 0x0004; pub const FOREGROUND_INTENSITY: WORD = 0x0008; pub const BACKGROUND_BLUE: WORD = 0x0010; pub const BACKGROUND_GREEN: WORD = 0x0020; pub const BACKGROUND_RED: WORD = 0x0040; pub const BACKGROUND_INTENSITY: WORD = 0x0080; STRUCT! {struct CONSOLE_SCREEN_BUFFER_INFO { dwSize: COORD, dwCursorPosition: COORD, wAttributes: WORD, srWindow: SMALL_RECT, dwMaximumWindowSize: COORD, }} pub type PCONSOLE_SCREEN_BUFFER_INFO = *mut CONSOLE_SCREEN_BUFFER_INFO; pub const ENABLE_PROCESSED_INPUT: DWORD = 0x0001; pub const ENABLE_LINE_INPUT: DWORD = 0x0002; pub const ENABLE_ECHO_INPUT: DWORD = 0x0004; pub const ENABLE_WINDOW_INPUT: DWORD = 0x0008; pub const ENABLE_MOUSE_INPUT: DWORD = 0x0010; pub const ENABLE_INSERT_MODE: DWORD = 0x0020; pub const ENABLE_QUICK_EDIT_MODE: DWORD = 0x0040; pub const ENABLE_EXTENDED_FLAGS: DWORD = 0x0080; pub const ENABLE_AUTO_POSITION: DWORD = 0x0100; pub const ENABLE_VIRTUAL_TERMINAL_INPUT: DWORD = 0x0200; pub const ENABLE_PROCESSED_OUTPUT: DWORD = 0x0001; pub const ENABLE_WRAP_AT_EOL_OUTPUT: DWORD = 0x0002; pub const ENABLE_VIRTUAL_TERMINAL_PROCESSING: DWORD = 0x0004; pub const DISABLE_NEWLINE_AUTO_RETURN: DWORD = 0x0008; pub const ENABLE_LVB_GRID_WORLDWIDE: DWORD = 0x0010; extern "system" { pub fn GetConsoleScreenBufferInfo( hConsoleOutput: HANDLE, lpConsoleScreenBufferInfo: PCONSOLE_SCREEN_BUFFER_INFO, ) -> BOOL; pub fn SetConsoleTextAttribute(hConsoleOutput: HANDLE, wAttributes: WORD) -> BOOL; } } pub mod wincontypes { use self::winapi::um::winnt::SHORT; use crate::winapi_inline as winapi; STRUCT! {struct COORD { X: SHORT, Y: SHORT, }} pub type PCOORD = *mut COORD; STRUCT! {struct SMALL_RECT { Left: SHORT, Top: SHORT, Right: SHORT, Bottom: SHORT, }} pub type PSMALL_RECT = *mut SMALL_RECT; } pub mod consoleapi { use self::winapi::shared::minwindef::{BOOL, DWORD, LPDWORD}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; extern "system" { pub fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: LPDWORD) -> BOOL; pub fn SetConsoleMode(hConsoleHandle: HANDLE, dwMode: DWORD) -> BOOL; } } pub mod errhandlingapi { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; extern "system" { pub fn GetLastError() -> DWORD; } } pub mod fileapi { use self::winapi::shared::minwindef::{BOOL, DWORD, FILETIME}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; STRUCT! {struct BY_HANDLE_FILE_INFORMATION { dwFileAttributes: DWORD, ftCreationTime: FILETIME, ftLastAccessTime: FILETIME, ftLastWriteTime: FILETIME, dwVolumeSerialNumber: DWORD, nFileSizeHigh: DWORD, nFileSizeLow: DWORD, nNumberOfLinks: DWORD, nFileIndexHigh: DWORD, nFileIndexLow: DWORD, }} pub type PBY_HANDLE_FILE_INFORMATION = *mut BY_HANDLE_FILE_INFORMATION; pub type LPBY_HANDLE_FILE_INFORMATION = *mut BY_HANDLE_FILE_INFORMATION; extern "system" { pub fn GetFileInformationByHandle( hFile: HANDLE, lpFileInformation: LPBY_HANDLE_FILE_INFORMATION, ) -> BOOL; pub fn GetFileType(hFile: HANDLE) -> DWORD; } } pub mod winnt { use self::winapi::ctypes::{ __int64, __uint64, c_char, c_int, c_long, c_short, c_uint, c_ulong, c_void, wchar_t, }; use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub type SHORT = c_short; pub type HANDLE = *mut c_void; pub const FILE_ATTRIBUTE_READONLY: DWORD = 0x00000001; pub const FILE_ATTRIBUTE_HIDDEN: DWORD = 0x00000002; pub const FILE_ATTRIBUTE_SYSTEM: DWORD = 0x00000004; pub const FILE_ATTRIBUTE_DIRECTORY: DWORD = 0x00000010; pub const FILE_ATTRIBUTE_ARCHIVE: DWORD = 0x00000020; pub const FILE_ATTRIBUTE_DEVICE: DWORD = 0x00000040; pub const FILE_ATTRIBUTE_NORMAL: DWORD = 0x00000080; pub const FILE_ATTRIBUTE_TEMPORARY: DWORD = 0x00000100; pub const FILE_ATTRIBUTE_SPARSE_FILE: DWORD = 0x00000200; pub const FILE_ATTRIBUTE_REPARSE_POINT: DWORD = 0x00000400; pub const FILE_ATTRIBUTE_COMPRESSED: DWORD = 0x00000800; pub const FILE_ATTRIBUTE_OFFLINE: DWORD = 0x00001000; pub const FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: DWORD = 0x00002000; pub const FILE_ATTRIBUTE_ENCRYPTED: DWORD = 0x00004000; pub const FILE_ATTRIBUTE_INTEGRITY_STREAM: DWORD = 0x00008000; pub const FILE_ATTRIBUTE_VIRTUAL: DWORD = 0x00010000; pub const FILE_ATTRIBUTE_NO_SCRUB_DATA: DWORD = 0x00020000; pub const FILE_ATTRIBUTE_EA: DWORD = 0x00040000; pub const FILE_ATTRIBUTE_PINNED: DWORD = 0x00080000; pub const FILE_ATTRIBUTE_UNPINNED: DWORD = 0x00100000; pub const FILE_ATTRIBUTE_RECALL_ON_OPEN: DWORD = 0x00040000; pub const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: DWORD = 0x00400000; } pub mod winbase { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub const FILE_FLAG_WRITE_THROUGH: DWORD = 0x80000000; pub const FILE_FLAG_OVERLAPPED: DWORD = 0x40000000; pub const FILE_FLAG_NO_BUFFERING: DWORD = 0x20000000; pub const FILE_FLAG_RANDOM_ACCESS: DWORD = 0x10000000; pub const FILE_FLAG_SEQUENTIAL_SCAN: DWORD = 0x08000000; pub const FILE_FLAG_DELETE_ON_CLOSE: DWORD = 0x04000000; pub const FILE_FLAG_BACKUP_SEMANTICS: DWORD = 0x02000000; pub const FILE_FLAG_POSIX_SEMANTICS: DWORD = 0x01000000; pub const FILE_FLAG_SESSION_AWARE: DWORD = 0x00800000; pub const FILE_FLAG_OPEN_REPARSE_POINT: DWORD = 0x00200000; pub const FILE_FLAG_OPEN_NO_RECALL: DWORD = 0x00100000; pub const FILE_FLAG_FIRST_PIPE_INSTANCE: DWORD = 0x00080000; pub const FILE_FLAG_OPEN_REQUIRING_OPLOCK: DWORD = 0x00040000; pub const FILE_TYPE_UNKNOWN: DWORD = 0x0000; pub const FILE_TYPE_DISK: DWORD = 0x0001; pub const FILE_TYPE_CHAR: DWORD = 0x0002; pub const FILE_TYPE_PIPE: DWORD = 0x0003; pub const FILE_TYPE_REMOTE: DWORD = 0x8000; } } pub mod ctypes { pub use std::os::raw::c_void; pub type c_char = i8; pub type c_schar = i8; pub type c_uchar = u8; pub type c_short = i16; pub type c_ushort = u16; pub type c_int = i32; pub type c_uint = u32; pub type c_long = i32; pub type c_ulong = u32; pub type c_longlong = i64; pub type c_ulonglong = u64; pub type c_float = f32; pub type c_double = f64; pub type __int8 = i8; pub type __uint8 = u8; pub type __int16 = i16; pub type __uint16 = u16; pub type __int32 = i32; pub type __uint32 = u32; pub type __int64 = i64; pub type __uint64 = u64; pub type wchar_t = u16; }
#![allow(bad_style, overflowing_literals, unused_macros, unused_imports, dead_code)] #[macro_use] pub mod macros { macro_rules! STRUCT { (#[debug] $($rest:tt)*) => ( STRUCT!{#[cfg_attr(feature = "impl-debug", derive(Debug))] $($rest)*} ); ($(#[$attrs:meta])* struct $name:ident { $($field:ident: $ftype:ty,)+ }) => ( #[repr(C)] #[derive(Copy)] $(#[$attrs])* pub struct $name { $(pub $field: $ftype,)+ } impl Clone for $name { #[inline] fn clone(&self) -> $name { *self } } #[cfg(feature = "impl-default")] impl Default for $name { #[inline] fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } } } ); } } pub mod shared { pub mod minwindef { use self::winapi::ctypes::{ c_char, c_float, c_int, c_long, c_uchar, c_uint, c_ulong, c_ushort, c_void, }; use crate::winapi_inline as winapi; pub type DWORD = c_ulong; pub type BOOL = c_int; pub type WORD = c_ushort; pub type LPDWORD = *mut DWORD; STRUCT! {#[debug] struct FILETIME { dwLowDateTime: DWORD, dwHighDateTime: DWORD, }} } pub mod winerror { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub const NO_ERROR: DWORD = 0; } } pub mod um { pub mod wincon { use self::winapi::shared::minwindef::{BOOL, DWORD, WORD}; pub use self::winapi::um::wincontypes::{COORD, PCOORD, PSMALL_RECT, SMALL_RECT}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; pub const FOREGROUND_BLUE: WORD = 0x0001; pub const FOREGROUND_GREEN: WORD = 0x0002; pub const FOREGROUND_RED: WORD = 0x0004; pub const FOREGROUND_INTENSITY: WORD = 0x0008; pub const BACKGROUND_BLUE: WORD = 0x0010; pub const BACKGROUND_GREEN: WORD = 0x0020; pub const BACKGROUND_RED: WORD = 0x0040; pub const BACKGROUND_INTENSITY: WORD = 0x0080; STRUCT! {struct CONSOLE_SCREEN_BUFFER_INFO { dwSize: COORD, dwCursorPosition: COORD, wAttributes: WORD, srWindow: SMALL_RECT, dwMaximumWindowSize: COORD, }} pub type PCONSOLE_SCREEN_BUFFER_INFO = *mut CONSOLE_SCREEN_BUFFER_INFO; pub const ENABLE_PROCESSED_INPUT: DWORD = 0x0001; pub const ENABLE_LINE_INPUT: DWORD = 0x0002; pub const ENABLE_ECHO_INPUT: DWORD = 0x0004; pub const ENABLE_WINDOW_INPUT: DWORD = 0x0008; pub const ENABLE_MOUSE_INPUT: DWORD = 0x0010; pub const ENABLE_INSERT_MODE: DWORD = 0x0020; pub const ENABLE_QUICK_EDIT_MODE: DWORD = 0x0040; pub const ENABLE_EXTENDED_FLAGS: DWORD = 0x0080; pub const ENABLE_AUTO_POSITION: DWORD = 0x0100; pub const ENABLE_VIRTUAL_TERMINAL_INPUT: DWORD = 0x0200; pub const ENABLE_PROCESSED_OUTPUT: DWORD = 0x0001; pub const ENABLE_WRAP_AT_EOL_OUTPUT: DWORD = 0x0002; pub const ENABLE_VIRTUAL_TERMINAL_PROCESSING: DWORD = 0x0004; pub const DISABLE_NEWLINE_AUTO_RETURN: DWORD = 0x0008; pub const ENABLE_LVB_GRID_WORLDWIDE: DWORD = 0x0010; extern "system" { pub fn GetConsoleScreenBufferInfo( hConsoleOutput: HANDLE, lpConsoleScreenBufferInfo: PCONSOLE_SCREEN_BUFFER_INFO, ) -> BOOL; pub fn SetConsoleTextAttribute(hConsoleOutput: HANDLE, wAttributes: WORD) -> BOOL; } } pub mod wincontypes { use self::winapi::um::winnt::SHORT; use crate::winapi_inline as winapi; STRUCT! {struct COORD { X: SHORT, Y: SHORT, }} pub type PCOORD = *mut COORD; STRUCT! {struct SMALL_RECT { Left: SHORT, Top: SHORT, Right: SHORT, Bottom: SHORT, }} pub type PSMALL_RECT = *mut SMALL_RECT; } pub mod consoleapi { use self::winapi::shared::minwindef::{BOOL, DWORD, LPDWORD}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; extern "system" { pub fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: LPDWORD) -> BOOL; pub fn SetConsoleMode(hConsoleHandle: HANDLE, dwMode: DWORD) -> BOOL; } } pub mod errhandlingapi { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; extern "system" { pub fn GetLastError() -> DWORD; } } pub mod fileapi { use self::winapi::shared::minwindef::{BOOL, DWORD, FILETIME}; use self::winapi::um::winnt::HANDLE; use crate::winapi_inline as winapi; STRUCT! {struct BY_HANDLE_FILE_INFORMATION { dwFileAttributes: DWORD, ftCreationTime: FILETIME, ftLastAccessTime: FILETIME, ftLastWriteTime: FILETIME, dwVolumeSerialNumber: DWORD, nFileSizeHigh: DWORD, nFileSizeLow: DWORD, nNumberOfLinks: DWORD, nFileIndexHigh: DWORD, nFileIndexLow: DWORD, }} pub type PBY_HANDLE_FILE_INFORMATION = *mut BY_HANDLE_FILE_INFORMATION; pub type LPBY_HANDLE_FILE_INFORMATION = *mut BY_HANDLE_FILE_INFORMA
0x00020000; pub const FILE_ATTRIBUTE_EA: DWORD = 0x00040000; pub const FILE_ATTRIBUTE_PINNED: DWORD = 0x00080000; pub const FILE_ATTRIBUTE_UNPINNED: DWORD = 0x00100000; pub const FILE_ATTRIBUTE_RECALL_ON_OPEN: DWORD = 0x00040000; pub const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: DWORD = 0x00400000; } pub mod winbase { use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub const FILE_FLAG_WRITE_THROUGH: DWORD = 0x80000000; pub const FILE_FLAG_OVERLAPPED: DWORD = 0x40000000; pub const FILE_FLAG_NO_BUFFERING: DWORD = 0x20000000; pub const FILE_FLAG_RANDOM_ACCESS: DWORD = 0x10000000; pub const FILE_FLAG_SEQUENTIAL_SCAN: DWORD = 0x08000000; pub const FILE_FLAG_DELETE_ON_CLOSE: DWORD = 0x04000000; pub const FILE_FLAG_BACKUP_SEMANTICS: DWORD = 0x02000000; pub const FILE_FLAG_POSIX_SEMANTICS: DWORD = 0x01000000; pub const FILE_FLAG_SESSION_AWARE: DWORD = 0x00800000; pub const FILE_FLAG_OPEN_REPARSE_POINT: DWORD = 0x00200000; pub const FILE_FLAG_OPEN_NO_RECALL: DWORD = 0x00100000; pub const FILE_FLAG_FIRST_PIPE_INSTANCE: DWORD = 0x00080000; pub const FILE_FLAG_OPEN_REQUIRING_OPLOCK: DWORD = 0x00040000; pub const FILE_TYPE_UNKNOWN: DWORD = 0x0000; pub const FILE_TYPE_DISK: DWORD = 0x0001; pub const FILE_TYPE_CHAR: DWORD = 0x0002; pub const FILE_TYPE_PIPE: DWORD = 0x0003; pub const FILE_TYPE_REMOTE: DWORD = 0x8000; } } pub mod ctypes { pub use std::os::raw::c_void; pub type c_char = i8; pub type c_schar = i8; pub type c_uchar = u8; pub type c_short = i16; pub type c_ushort = u16; pub type c_int = i32; pub type c_uint = u32; pub type c_long = i32; pub type c_ulong = u32; pub type c_longlong = i64; pub type c_ulonglong = u64; pub type c_float = f32; pub type c_double = f64; pub type __int8 = i8; pub type __uint8 = u8; pub type __int16 = i16; pub type __uint16 = u16; pub type __int32 = i32; pub type __uint32 = u32; pub type __int64 = i64; pub type __uint64 = u64; pub type wchar_t = u16; }
TION; extern "system" { pub fn GetFileInformationByHandle( hFile: HANDLE, lpFileInformation: LPBY_HANDLE_FILE_INFORMATION, ) -> BOOL; pub fn GetFileType(hFile: HANDLE) -> DWORD; } } pub mod winnt { use self::winapi::ctypes::{ __int64, __uint64, c_char, c_int, c_long, c_short, c_uint, c_ulong, c_void, wchar_t, }; use self::winapi::shared::minwindef::DWORD; use crate::winapi_inline as winapi; pub type SHORT = c_short; pub type HANDLE = *mut c_void; pub const FILE_ATTRIBUTE_READONLY: DWORD = 0x00000001; pub const FILE_ATTRIBUTE_HIDDEN: DWORD = 0x00000002; pub const FILE_ATTRIBUTE_SYSTEM: DWORD = 0x00000004; pub const FILE_ATTRIBUTE_DIRECTORY: DWORD = 0x00000010; pub const FILE_ATTRIBUTE_ARCHIVE: DWORD = 0x00000020; pub const FILE_ATTRIBUTE_DEVICE: DWORD = 0x00000040; pub const FILE_ATTRIBUTE_NORMAL: DWORD = 0x00000080; pub const FILE_ATTRIBUTE_TEMPORARY: DWORD = 0x00000100; pub const FILE_ATTRIBUTE_SPARSE_FILE: DWORD = 0x00000200; pub const FILE_ATTRIBUTE_REPARSE_POINT: DWORD = 0x00000400; pub const FILE_ATTRIBUTE_COMPRESSED: DWORD = 0x00000800; pub const FILE_ATTRIBUTE_OFFLINE: DWORD = 0x00001000; pub const FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: DWORD = 0x00002000; pub const FILE_ATTRIBUTE_ENCRYPTED: DWORD = 0x00004000; pub const FILE_ATTRIBUTE_INTEGRITY_STREAM: DWORD = 0x00008000; pub const FILE_ATTRIBUTE_VIRTUAL: DWORD = 0x00010000; pub const FILE_ATTRIBUTE_NO_SCRUB_DATA: DWORD =
random
[ { "content": "/// Returns the file type of the given handle.\n\n///\n\n/// If there was a problem querying the file type, then an error is returned.\n\n///\n\n/// This corresponds to calling [`GetFileType`].\n\n///\n\n/// [`GetFileType`]: https://docs.microsoft.com/en-us/windows/desktop/api/fileapi/nf-fileapi-getfiletype\n\npub fn typ<H: AsHandleRef>(h: H) -> io::Result<Type> {\n\n unsafe {\n\n let rc = GetFileType(h.as_raw());\n\n if rc == 0 && GetLastError() != NO_ERROR {\n\n return Err(io::Error::last_os_error());\n\n }\n\n Ok(Type(rc))\n\n }\n\n}\n\n\n", "file_path": "wincolor/src/win/winutil/file.rs", "rank": 0, "score": 74644.60496642096 }, { "content": "/// Returns true if and only if the given file attributes contain the\n\n/// `FILE_ATTRIBUTE_HIDDEN` attribute.\n\npub fn is_hidden(file_attributes: u64) -> bool {\n\n file_attributes & (winnt::FILE_ATTRIBUTE_HIDDEN as u64) > 0\n\n}\n\n\n\n/// Represents file information such as creation time, file size, etc.\n\n///\n\n/// This wraps a [`BY_HANDLE_FILE_INFORMATION`].\n\n///\n\n/// [`BY_HANDLE_FILE_INFORMATION`]: https://docs.microsoft.com/en-us/windows/desktop/api/fileapi/ns-fileapi-_by_handle_file_information\n\n#[derive(Clone)]\n\npub struct Information(BY_HANDLE_FILE_INFORMATION);\n\n\n\nimpl Information {\n\n /// Returns file attributes.\n\n ///\n\n /// This corresponds to `dwFileAttributes`.\n\n pub fn file_attributes(&self) -> u64 {\n\n self.0.dwFileAttributes as u64\n\n }\n\n\n", "file_path": "wincolor/src/win/winutil/file.rs", "rank": 1, "score": 71894.69146158801 }, { "content": "/// Return various pieces of information about a file.\n\n///\n\n/// This includes information such as a file's size, unique identifier and\n\n/// time related fields.\n\n///\n\n/// This corresponds to calling [`GetFileInformationByHandle`].\n\n///\n\n/// [`GetFileInformationByHandle`]: https://docs.microsoft.com/en-us/windows/desktop/api/fileapi/nf-fileapi-getfileinformationbyhandle\n\npub fn information<H: AsHandleRef>(\n\n h: H,\n\n) -> io::Result<Information> {\n\n unsafe {\n\n let mut info: BY_HANDLE_FILE_INFORMATION = mem::zeroed();\n\n let rc = GetFileInformationByHandle(h.as_raw(), &mut info);\n\n if rc == 0 {\n\n return Err(io::Error::last_os_error());\n\n };\n\n Ok(Information(info))\n\n }\n\n}\n\n\n", "file_path": "wincolor/src/win/winutil/file.rs", "rank": 2, "score": 69569.26127489419 }, { "content": "/// Query the given handle for information about the console's screen buffer.\n\n///\n\n/// The given handle should represent a console. Otherwise, an error is\n\n/// returned.\n\n///\n\n/// This corresponds to calling [`GetConsoleScreenBufferInfo`].\n\n///\n\n/// [`GetConsoleScreenBufferInfo`]: https://docs.microsoft.com/en-us/windows/console/getconsolescreenbufferinfo\n\npub fn screen_buffer_info<H: AsHandleRef>(\n\n h: H,\n\n) -> io::Result<ScreenBufferInfo> {\n\n unsafe {\n\n let mut info: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();\n\n let rc = GetConsoleScreenBufferInfo(h.as_raw(), &mut info);\n\n if rc == 0 {\n\n return Err(io::Error::last_os_error());\n\n }\n\n Ok(ScreenBufferInfo(info))\n\n }\n\n}\n\n\n", "file_path": "wincolor/src/win/winutil/console.rs", "rank": 3, "score": 65139.70024694548 }, { "content": "/// Set the text attributes of the console represented by the given handle.\n\n///\n\n/// This corresponds to calling [`SetConsoleTextAttribute`].\n\n///\n\n/// [`SetConsoleTextAttribute`]: https://docs.microsoft.com/en-us/windows/console/setconsoletextattribute\n\npub fn set_text_attributes<H: AsHandleRef>(\n\n h: H,\n\n attributes: u16,\n\n) -> io::Result<()> {\n\n if unsafe { SetConsoleTextAttribute(h.as_raw(), attributes) } == 0 {\n\n Err(io::Error::last_os_error())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "wincolor/src/win/winutil/console.rs", "rank": 4, "score": 65136.51251271292 }, { "content": "/// Query the mode of the console represented by the given handle.\n\n///\n\n/// This corresponds to calling [`GetConsoleMode`], which describes the return\n\n/// value.\n\n///\n\n/// [`GetConsoleMode`]: https://docs.microsoft.com/en-us/windows/console/getconsolemode\n\npub fn mode<H: AsHandleRef>(h: H) -> io::Result<u32> {\n\n let mut mode = 0;\n\n if unsafe { GetConsoleMode(h.as_raw(), &mut mode) } == 0 {\n\n Err(io::Error::last_os_error())\n\n } else {\n\n Ok(mode)\n\n }\n\n}\n\n\n", "file_path": "wincolor/src/win/winutil/console.rs", "rank": 5, "score": 54793.67165243735 }, { "content": "fn filetime_to_u64(t: FILETIME) -> Option<u64> {\n\n let v = ((t.dwHighDateTime as u64) << 32) | (t.dwLowDateTime as u64);\n\n if v == 0 {\n\n None\n\n } else {\n\n Some(v)\n\n }\n\n}\n", "file_path": "wincolor/src/win/winutil/file.rs", "rank": 6, "score": 51686.56920988021 }, { "content": "/// Construct borrowed and valid Windows handles from file-like objects.\n\npub trait AsHandleRef {\n\n /// A borrowed handle that wraps the raw handle of the `Self` object.\n\n fn as_handle_ref(&self) -> HandleRef;\n\n\n\n /// A convenience routine for extracting a `HandleRef` from `Self`, and\n\n /// then extracting a raw handle from the `HandleRef`.\n\n fn as_raw(&self) -> RawHandle {\n\n self.as_handle_ref().as_raw_handle()\n\n }\n\n}\n\n\n\nimpl<'a, T: AsHandleRef> AsHandleRef for &'a T {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n (**self).as_handle_ref()\n\n }\n\n}\n\n\n\nimpl AsHandleRef for Handle {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(self.as_raw_handle()) }\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 7, "score": 50892.09718699785 }, { "content": "/// Set the mode of the console represented by the given handle.\n\n///\n\n/// This corresponds to calling [`SetConsoleMode`], which describes the format\n\n/// of the mode parameter.\n\n///\n\n/// [`SetConsoleMode`]: https://docs.microsoft.com/en-us/windows/console/setconsolemode\n\npub fn set_mode<H: AsHandleRef>(h: H, mode: u32) -> io::Result<()> {\n\n if unsafe { SetConsoleMode(h.as_raw(), mode) } == 0 {\n\n Err(io::Error::last_os_error())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Represents console screen buffer information such as size, cursor position\n\n/// and styling attributes.\n\n///\n\n/// This wraps a [`CONSOLE_SCREEN_BUFFER_INFO`].\n\n///\n\n/// [`CONSOLE_SCREEN_BUFFER_INFO`]: https://docs.microsoft.com/en-us/windows/console/console-screen-buffer-info-str\n\n#[derive(Clone)]\n\npub struct ScreenBufferInfo(CONSOLE_SCREEN_BUFFER_INFO);\n\n\n\nimpl ScreenBufferInfo {\n\n /// Returns the size of the console screen buffer, in character columns and\n\n /// rows.\n", "file_path": "wincolor/src/win/winutil/console.rs", "rank": 8, "score": 50881.16270536634 }, { "content": "#[derive(Debug)]\n\nstruct HandleRefInner(Option<File>);\n\n\n\nimpl Drop for HandleRefInner {\n\n fn drop(&mut self) {\n\n self.0.take().unwrap().into_raw_handle();\n\n }\n\n}\n\n\n\nimpl AsRawHandle for HandleRef {\n\n fn as_raw_handle(&self) -> RawHandle {\n\n self.as_file().as_raw_handle()\n\n }\n\n}\n\n\n\nimpl Clone for HandleRef {\n\n fn clone(&self) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(self.as_raw_handle()) }\n\n }\n\n}\n\n\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 22, "score": 45509.00113248994 }, { "content": "#[cfg(windows)]\n\n#[derive(Clone, Debug)]\n\nstruct WindowsBuffer {\n\n /// The actual content that should be printed.\n\n buf: Vec<u8>,\n\n /// A sequence of position oriented color specifications. Namely, each\n\n /// element is a position and a color spec, where the color spec should\n\n /// be applied at the position inside of `buf`.\n\n ///\n\n /// A missing color spec implies the underlying console should be reset.\n\n colors: Vec<(usize, Option<ColorSpec>)>,\n\n}\n\n\n\n#[cfg(windows)]\n\nimpl WindowsBuffer {\n\n /// Create a new empty buffer for Windows console coloring.\n\n fn new() -> WindowsBuffer {\n\n WindowsBuffer {\n\n buf: vec![],\n\n colors: vec![],\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 23, "score": 39291.93793900889 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct TextAttributes {\n\n fg_color: Color,\n\n fg_intense: Intense,\n\n bg_color: Color,\n\n bg_intense: Intense,\n\n}\n\n\n\nimpl TextAttributes {\n\n fn to_word(&self) -> WORD {\n\n let mut w = 0;\n\n w |= self.fg_color.to_fg();\n\n w |= self.fg_intense.to_fg();\n\n w |= self.bg_color.to_bg();\n\n w |= self.bg_intense.to_bg();\n\n w\n\n }\n\n\n\n fn from_word(word: WORD) -> TextAttributes {\n\n TextAttributes {\n\n fg_color: Color::from_fg(word),\n", "file_path": "wincolor/src/win.rs", "rank": 24, "score": 37884.44453832426 }, { "content": "#[cfg(windows)]\n\nfn write_lossy_utf8<W: io::Write>(mut w: W, buf: &[u8]) -> io::Result<usize> {\n\n match ::std::str::from_utf8(buf) {\n\n Ok(s) => w.write(s.as_bytes()),\n\n Err(ref e) if e.valid_up_to() == 0 => {\n\n w.write(b\"\\xEF\\xBF\\xBD\")?;\n\n Ok(1)\n\n }\n\n Err(e) => w.write(&buf[..e.valid_up_to()]),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{\n\n Ansi, Color, ParseColorError, ParseColorErrorKind, StandardStream,\n\n };\n\n\n\n fn assert_is_send<T: Send>() {}\n\n\n\n #[test]\n", "file_path": "src/lib.rs", "rank": 25, "score": 36632.61716721131 }, { "content": "struct LossyStandardStream<W> {\n\n wtr: W,\n\n #[cfg(windows)]\n\n is_console: bool,\n\n}\n\n\n\nimpl<W: io::Write> LossyStandardStream<W> {\n\n #[cfg(not(windows))]\n\n fn new(wtr: W) -> LossyStandardStream<W> {\n\n LossyStandardStream { wtr: wtr }\n\n }\n\n\n\n #[cfg(windows)]\n\n fn new(wtr: W) -> LossyStandardStream<W> {\n\n let is_console =\n\n wincolor::Console::stdout().is_ok()\n\n || wincolor::Console::stderr().is_ok();\n\n LossyStandardStream { wtr: wtr, is_console: is_console }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 26, "score": 33268.937692490494 }, { "content": "/// This trait describes the behavior of writers that support colored output.\n\npub trait WriteColor: io::Write {\n\n /// Returns true if and only if the underlying writer supports colors.\n\n fn supports_color(&self) -> bool;\n\n\n\n /// Set the color settings of the writer.\n\n ///\n\n /// Subsequent writes to this writer will use these settings until either\n\n /// `reset` is called or new color settings are set.\n\n ///\n\n /// If there was a problem setting the color settings, then an error is\n\n /// returned.\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()>;\n\n\n\n /// Reset the current color settings to their original settings.\n\n ///\n\n /// If there was a problem resetting the color settings, then an error is\n\n /// returned.\n\n fn reset(&mut self) -> io::Result<()>;\n\n\n\n /// Returns true if and only if the underlying writer must synchronously\n", "file_path": "src/lib.rs", "rank": 27, "score": 28434.2311470736 }, { "content": "enum StandardStreamType {\n\n Stdout,\n\n Stderr,\n\n StdoutBuffered,\n\n StderrBuffered,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 28, "score": 23598.776025746025 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum HandleKind {\n\n Stdout,\n\n Stderr,\n\n}\n\n\n\nimpl HandleKind {\n\n fn handle(&self) -> winutil::HandleRef {\n\n match *self {\n\n HandleKind::Stdout => winutil::HandleRef::stdout(),\n\n HandleKind::Stderr => winutil::HandleRef::stderr(),\n\n }\n\n }\n\n}\n\n\n\nimpl Console {\n\n /// Get a console for a standard I/O stream.\n\n fn create_for_stream(kind: HandleKind) -> io::Result<Console> {\n\n let h = kind.handle();\n\n let info = winutil::console::screen_buffer_info(&h)?;\n\n let attr = TextAttributes::from_word(info.attributes());\n", "file_path": "wincolor/src/win.rs", "rank": 29, "score": 23392.225446979024 }, { "content": "use std::io;\n\nuse std::mem;\n\n\n\nuse crate::winapi_inline as winapi;\n\n\n\nuse self::winapi::shared::minwindef::FILETIME;\n\nuse self::winapi::shared::winerror::NO_ERROR;\n\nuse self::winapi::um::errhandlingapi::GetLastError;\n\nuse self::winapi::um::fileapi::{\n\n BY_HANDLE_FILE_INFORMATION,\n\n GetFileInformationByHandle, GetFileType,\n\n};\n\nuse self::winapi::um::winnt;\n\n\n\nuse super::AsHandleRef;\n\n\n\n/// Return various pieces of information about a file.\n\n///\n\n/// This includes information such as a file's size, unique identifier and\n\n/// time related fields.\n\n///\n\n/// This corresponds to calling [`GetFileInformationByHandle`].\n\n///\n\n/// [`GetFileInformationByHandle`]: https://docs.microsoft.com/en-us/windows/desktop/api/fileapi/nf-fileapi-getfileinformationbyhandle\n", "file_path": "wincolor/src/win/winutil/file.rs", "rank": 30, "score": 27.44517838946077 }, { "content": "#[derive(Clone)]\n\npub struct Type(u32);\n\n\n\nimpl Type {\n\n /// Returns true if this type represents a character file, which is\n\n /// typically an LPT device or a console.\n\n pub fn is_char(&self) -> bool {\n\n self.0 == self::winapi::um::winbase::FILE_TYPE_CHAR\n\n }\n\n\n\n /// Returns true if this type represents a disk file.\n\n pub fn is_disk(&self) -> bool {\n\n self.0 == self::winapi::um::winbase::FILE_TYPE_DISK\n\n }\n\n\n\n /// Returns true if this type represents a sock, named pipe or an\n\n /// anonymous pipe.\n\n pub fn is_pipe(&self) -> bool {\n\n self.0 == self::winapi::um::winbase::FILE_TYPE_PIPE\n\n }\n\n\n\n /// Returns true if this type is not known.\n\n ///\n\n /// Note that this never corresponds to a failure.\n\n pub fn is_unknown(&self) -> bool {\n\n self.0 == self::winapi::um::winbase::FILE_TYPE_UNKNOWN\n\n }\n\n}\n\n\n", "file_path": "wincolor/src/win/winutil/file.rs", "rank": 31, "score": 23.606805112185743 }, { "content": "use std::io;\n\n\n\nuse crate::winapi_inline as winapi;\n\nmod winutil;\n\n\n\nuse self::winapi::shared::minwindef::{WORD};\n\nuse self::winapi::um::wincon::{\n\n self,\n\n FOREGROUND_BLUE as FG_BLUE,\n\n FOREGROUND_GREEN as FG_GREEN,\n\n FOREGROUND_RED as FG_RED,\n\n FOREGROUND_INTENSITY as FG_INTENSITY,\n\n};\n\n\n\nconst FG_CYAN: WORD = FG_BLUE | FG_GREEN;\n\nconst FG_MAGENTA: WORD = FG_BLUE | FG_RED;\n\nconst FG_YELLOW: WORD = FG_GREEN | FG_RED;\n\nconst FG_WHITE: WORD = FG_BLUE | FG_GREEN | FG_RED;\n\n\n\n/// A Windows console.\n", "file_path": "wincolor/src/win.rs", "rank": 32, "score": 22.786515678179825 }, { "content": "use std::io;\n\nuse std::mem;\n\n\n\nuse crate::winapi_inline as winapi;\n\n\n\nuse self::winapi::um::consoleapi::{GetConsoleMode, SetConsoleMode};\n\nuse self::winapi::um::wincon::{\n\n CONSOLE_SCREEN_BUFFER_INFO,\n\n GetConsoleScreenBufferInfo, SetConsoleTextAttribute,\n\n};\n\n\n\nuse super::AsHandleRef;\n\n\n\n/// Query the given handle for information about the console's screen buffer.\n\n///\n\n/// The given handle should represent a console. Otherwise, an error is\n\n/// returned.\n\n///\n\n/// This corresponds to calling [`GetConsoleScreenBufferInfo`].\n\n///\n\n/// [`GetConsoleScreenBufferInfo`]: https://docs.microsoft.com/en-us/windows/console/getconsolescreenbufferinfo\n", "file_path": "wincolor/src/win/winutil/console.rs", "rank": 33, "score": 20.39617408657312 }, { "content": "use std::fs::File;\n\nuse std::io;\n\nuse std::os::windows::io::{\n\n RawHandle,\n\n AsRawHandle, FromRawHandle, IntoRawHandle,\n\n};\n\nuse std::path::Path;\n\nuse std::process;\n\n\n\nuse crate::winapi_inline as winapi;\n\n\n\n/// A handle represents an owned and valid Windows handle to a file-like\n\n/// object.\n\n///\n\n/// When an owned handle is dropped, then the underlying raw handle is closed.\n\n/// To get a borrowed handle, use `HandleRef`.\n\n#[derive(Debug)]\n\npub struct Handle(File);\n\n\n\nimpl AsRawHandle for Handle {\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 34, "score": 19.074802052347493 }, { "content": " Ok(())\n\n }\n\n\n\n #[inline]\n\n fn reset(&mut self) -> io::Result<()> {\n\n self.push(None);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn is_synchronous(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n\n/// A color specification.\n\n#[derive(Clone, Debug, Default, Eq, PartialEq)]\n\npub struct ColorSpec {\n\n fg_color: Option<Color>,\n\n bg_color: Option<Color>,\n", "file_path": "src/lib.rs", "rank": 35, "score": 18.648894671674718 }, { "content": " /// Return this handle as a standard `File` reference.\n\n pub fn as_file(&self) -> &File {\n\n &self.0\n\n }\n\n\n\n /// Return this handle as a standard `File` mutable reference.\n\n pub fn as_file_mut(&mut self) -> &mut File {\n\n &mut self.0\n\n }\n\n}\n\n\n\n/// Represents a borrowed and valid Windows handle to a file-like object, such\n\n/// as stdin/stdout/stderr or an actual file.\n\n///\n\n/// When a borrowed handle is dropped, then the underlying raw handle is\n\n/// **not** closed. To get an owned handle, use `Handle`.\n\n#[derive(Debug)]\n\npub struct HandleRef(HandleRefInner);\n\n\n\n/// The representation of a HandleRef, on which we define a custom Drop impl\n\n/// that avoids closing the underlying raw handle.\n\n#[derive(Debug)]\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 36, "score": 17.227187939620876 }, { "content": "/// The set of available colors for use with a Windows console.\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum Color {\n\n Black,\n\n Blue,\n\n Green,\n\n Red,\n\n Cyan,\n\n Magenta,\n\n Yellow,\n\n White,\n\n}\n\n\n\nimpl Color {\n\n fn to_bg(&self) -> WORD {\n\n self.to_fg() << 4\n\n }\n\n\n\n fn from_bg(word: WORD) -> Color {\n", "file_path": "wincolor/src/win.rs", "rank": 37, "score": 16.234451568949215 }, { "content": " fg_intense: Intense::from_fg(word),\n\n bg_color: Color::from_bg(word),\n\n bg_intense: Intense::from_bg(word),\n\n }\n\n }\n\n}\n\n\n\n/// Whether to use intense colors or not.\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum Intense {\n\n Yes,\n\n No,\n\n}\n\n\n\nimpl Intense {\n\n fn to_bg(&self) -> WORD {\n\n self.to_fg() << 4\n\n }\n\n\n", "file_path": "wincolor/src/win.rs", "rank": 38, "score": 16.21845783642678 }, { "content": "```\n\n*/\n\n\n\n#![deny(missing_docs)]\n\n\n\n#[cfg(windows)]\n\nmod winapi_inline;\n\n\n\n#[cfg(windows)]\n\npub use win::*;\n\n\n\n#[cfg(windows)]\n\nmod win;\n", "file_path": "wincolor/src/lib.rs", "rank": 39, "score": 15.711053891713036 }, { "content": " fn reset(&mut self) -> io::Result<()> { (&mut **self).reset() }\n\n fn is_synchronous(&self) -> bool { (&**self).is_synchronous() }\n\n}\n\n\n\nimpl<T: ?Sized + WriteColor> WriteColor for Box<T> {\n\n fn supports_color(&self) -> bool { (&**self).supports_color() }\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n\n (&mut **self).set_color(spec)\n\n }\n\n fn reset(&mut self) -> io::Result<()> { (&mut **self).reset() }\n\n fn is_synchronous(&self) -> bool { (&**self).is_synchronous() }\n\n}\n\n\n\n/// ColorChoice represents the color preferences of an end user.\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum ColorChoice {\n\n /// Try very hard to emit colors. This includes emitting ANSI colors\n\n /// on Windows if the console API is unavailable.\n\n Always,\n\n /// AlwaysAnsi is like Always, except it never tries to use anything other\n", "file_path": "src/lib.rs", "rank": 40, "score": 15.15223146405056 }, { "content": " #[inline]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n self.buf.extend_from_slice(buf);\n\n Ok(buf.len())\n\n }\n\n\n\n #[inline]\n\n fn flush(&mut self) -> io::Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(windows)]\n\nimpl WriteColor for WindowsBuffer {\n\n #[inline]\n\n fn supports_color(&self) -> bool { true }\n\n\n\n #[inline]\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n\n self.push(Some(spec.clone()));\n", "file_path": "src/lib.rs", "rank": 41, "score": 14.027410432595548 }, { "content": "///\n\n/// This represents a very limited set of functionality available to a Windows\n\n/// console. In particular, it can only change text attributes such as color\n\n/// and intensity.\n\n///\n\n/// There is no way to \"write\" to this console. Simply write to\n\n/// stdout or stderr instead, while interleaving instructions to the console\n\n/// to change text attributes.\n\n///\n\n/// A common pitfall when using a console is to forget to flush writes to\n\n/// stdout before setting new text attributes.\n\n#[derive(Debug)]\n\npub struct Console {\n\n kind: HandleKind,\n\n start_attr: TextAttributes,\n\n cur_attr: TextAttributes,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n", "file_path": "wincolor/src/win.rs", "rank": 42, "score": 14.019934335615249 }, { "content": " /// this constructor.\n\n pub unsafe fn from_raw_handle(handle: RawHandle) -> HandleRef {\n\n HandleRef(HandleRefInner(Some(File::from_raw_handle(handle))))\n\n }\n\n\n\n /// Return this handle as a standard `File` reference.\n\n pub fn as_file(&self) -> &File {\n\n (self.0).0.as_ref().unwrap()\n\n }\n\n\n\n /// Return this handle as a standard `File` mutable reference.\n\n pub fn as_file_mut(&mut self) -> &mut File {\n\n (self.0).0.as_mut().unwrap()\n\n }\n\n}\n\n\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 43, "score": 13.77984388186929 }, { "content": " pub fn from_path<P: AsRef<Path>>(path: P) -> io::Result<Handle> {\n\n Ok(Handle::from_file(File::open(path)?))\n\n }\n\n\n\n /// Like `from_path`, but supports opening directory handles as well.\n\n ///\n\n /// If you use `from_path` on a directory, then subsequent queries using\n\n /// that handle will fail.\n\n pub fn from_path_any<P: AsRef<Path>>(path: P) -> io::Result<Handle> {\n\n use std::fs::OpenOptions;\n\n use std::os::windows::fs::OpenOptionsExt;\n\n use self::winapi::um::winbase::FILE_FLAG_BACKUP_SEMANTICS;\n\n\n\n let file = OpenOptions::new()\n\n .read(true)\n\n .custom_flags(FILE_FLAG_BACKUP_SEMANTICS)\n\n .open(path)?;\n\n Ok(Handle::from_file(file))\n\n }\n\n\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 44, "score": 13.243413818271705 }, { "content": " }\n\n}\n\n\n\nimpl AsHandleRef for HandleRef {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n self.clone()\n\n }\n\n}\n\n\n\nimpl AsHandleRef for File {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n HandleRef::from_file(self)\n\n }\n\n}\n\n\n\nimpl AsHandleRef for io::Stdin {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(self.as_raw_handle()) }\n\n }\n\n}\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 45, "score": 13.198357893447927 }, { "content": " }\n\n\n\n #[inline]\n\n fn flush(&mut self) -> io::Result<()> {\n\n self.0.flush()\n\n }\n\n}\n\n\n\nimpl<W: io::Write> WriteColor for NoColor<W> {\n\n #[inline]\n\n fn supports_color(&self) -> bool { false }\n\n\n\n #[inline]\n\n fn set_color(&mut self, _: &ColorSpec) -> io::Result<()> { Ok(()) }\n\n\n\n #[inline]\n\n fn reset(&mut self) -> io::Result<()> { Ok(()) }\n\n\n\n #[inline]\n\n fn is_synchronous(&self) -> bool { false }\n", "file_path": "src/lib.rs", "rank": 46, "score": 13.114214569425208 }, { "content": "impl WriteColor for StandardStream {\n\n #[inline]\n\n fn supports_color(&self) -> bool { self.wtr.supports_color() }\n\n\n\n #[inline]\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n\n self.wtr.set_color(spec)\n\n }\n\n\n\n #[inline]\n\n fn reset(&mut self) -> io::Result<()> { self.wtr.reset() }\n\n\n\n #[inline]\n\n fn is_synchronous(&self) -> bool { self.wtr.is_synchronous() }\n\n}\n\n\n\nimpl<'a> io::Write for StandardStreamLock<'a> {\n\n #[inline]\n\n fn write(&mut self, b: &[u8]) -> io::Result<usize> { self.wtr.write(b) }\n\n\n", "file_path": "src/lib.rs", "rank": 47, "score": 13.036349908251285 }, { "content": "\n\n /// Applies the current text attributes.\n\n fn set(&mut self) -> io::Result<()> {\n\n winutil::console::set_text_attributes(\n\n self.kind.handle(),\n\n self.cur_attr.to_word(),\n\n )\n\n }\n\n\n\n /// Apply the given intensity and color attributes to the console\n\n /// foreground.\n\n ///\n\n /// If there was a problem setting attributes on the console, then an error\n\n /// is returned.\n\n pub fn fg(&mut self, intense: Intense, color: Color) -> io::Result<()> {\n\n self.cur_attr.fg_color = color;\n\n self.cur_attr.fg_intense = intense;\n\n self.set()\n\n }\n\n\n", "file_path": "wincolor/src/win.rs", "rank": 48, "score": 12.942460559751414 }, { "content": " Ok(Console {\n\n kind: kind,\n\n start_attr: attr,\n\n cur_attr: attr,\n\n })\n\n }\n\n\n\n /// Create a new Console to stdout.\n\n ///\n\n /// If there was a problem creating the console, then an error is returned.\n\n pub fn stdout() -> io::Result<Console> {\n\n Self::create_for_stream(HandleKind::Stdout)\n\n }\n\n\n\n /// Create a new Console to stderr.\n\n ///\n\n /// If there was a problem creating the console, then an error is returned.\n\n pub fn stderr() -> io::Result<Console> {\n\n Self::create_for_stream(HandleKind::Stderr)\n\n }\n", "file_path": "wincolor/src/win.rs", "rank": 49, "score": 12.937233912952893 }, { "content": "impl HandleRef {\n\n /// Create a borrowed handle to stdin.\n\n ///\n\n /// When the returned handle is dropped, stdin is not closed.\n\n pub fn stdin() -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(io::stdin().as_raw_handle()) }\n\n }\n\n\n\n /// Create a handle to stdout.\n\n ///\n\n /// When the returned handle is dropped, stdout is not closed.\n\n pub fn stdout() -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(io::stdout().as_raw_handle()) }\n\n }\n\n\n\n /// Create a handle to stderr.\n\n ///\n\n /// When the returned handle is dropped, stderr is not closed.\n\n pub fn stderr() -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(io::stderr().as_raw_handle()) }\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 50, "score": 12.771957483394218 }, { "content": " #[inline]\n\n fn flush(&mut self) -> io::Result<()> { self.wtr.flush() }\n\n}\n\n\n\nimpl<'a> WriteColor for StandardStreamLock<'a> {\n\n #[inline]\n\n fn supports_color(&self) -> bool { self.wtr.supports_color() }\n\n\n\n #[inline]\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n\n self.wtr.set_color(spec)\n\n }\n\n\n\n #[inline]\n\n fn reset(&mut self) -> io::Result<()> { self.wtr.reset() }\n\n\n\n #[inline]\n\n fn is_synchronous(&self) -> bool { self.wtr.is_synchronous() }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 51, "score": 12.65123178584275 }, { "content": "#![allow(dead_code)]\n\n\n\n#[cfg(windows)]\n\npub use self::win::*;\n\n\n\n/// Safe routines for dealing with the Windows console.\n\n#[cfg(windows)]\n\npub mod console;\n\n/// Safe routines for dealing with files and handles on Windows.\n\n#[cfg(windows)]\n\npub mod file;\n\n#[cfg(windows)]\n\nmod win;\n", "file_path": "wincolor/src/win/winutil.rs", "rank": 52, "score": 12.395137094186884 }, { "content": "impl<W: io::Write> io::Write for Ansi<W> {\n\n #[inline]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n self.0.write(buf)\n\n }\n\n\n\n #[inline]\n\n fn flush(&mut self) -> io::Result<()> {\n\n self.0.flush()\n\n }\n\n}\n\n\n\nimpl<W: io::Write> WriteColor for Ansi<W> {\n\n #[inline]\n\n fn supports_color(&self) -> bool { true }\n\n\n\n #[inline]\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n\n self.reset()?;\n\n if spec.bold {\n", "file_path": "src/lib.rs", "rank": 53, "score": 12.220173823518058 }, { "content": "impl io::Write for BufferedStandardStream {\n\n #[inline]\n\n fn write(&mut self, b: &[u8]) -> io::Result<usize> { self.wtr.write(b) }\n\n\n\n #[inline]\n\n fn flush(&mut self) -> io::Result<()> { self.wtr.flush() }\n\n}\n\n\n\nimpl WriteColor for BufferedStandardStream {\n\n #[inline]\n\n fn supports_color(&self) -> bool { self.wtr.supports_color() }\n\n\n\n #[inline]\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n\n if self.is_synchronous() {\n\n self.wtr.flush()?;\n\n }\n\n self.wtr.set_color(spec)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 12.097109259328283 }, { "content": " ///\n\n /// On Windows systems, this will output the ANSI escape sequence\n\n /// that will print a brighter version of the color specified.\n\n pub fn intense(&self) -> bool { self.intense }\n\n\n\n /// Set whether the text is intense or not.\n\n ///\n\n /// On Unix-like systems, this will output the ANSI escape sequence\n\n /// that will print a high-intensity version of the color\n\n /// specified.\n\n ///\n\n /// On Windows systems, this will output the ANSI escape sequence\n\n /// that will print a brighter version of the color specified.\n\n pub fn set_intense(&mut self, yes: bool) -> &mut ColorSpec {\n\n self.intense = yes;\n\n self\n\n }\n\n\n\n /// Returns true if this color specification has no colors or styles.\n\n pub fn is_none(&self) -> bool {\n", "file_path": "src/lib.rs", "rank": 55, "score": 12.090602095560282 }, { "content": " bold: bool,\n\n intense: bool,\n\n underline: bool,\n\n}\n\n\n\nimpl ColorSpec {\n\n /// Create a new color specification that has no colors or styles.\n\n pub fn new() -> ColorSpec {\n\n ColorSpec::default()\n\n }\n\n\n\n /// Get the foreground color.\n\n pub fn fg(&self) -> Option<&Color> { self.fg_color.as_ref() }\n\n\n\n /// Set the foreground color.\n\n pub fn set_fg(&mut self, color: Option<Color>) -> &mut ColorSpec {\n\n self.fg_color = color;\n\n self\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 56, "score": 11.83623096181228 }, { "content": "\n\nimpl AsHandleRef for io::Stdout {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(self.as_raw_handle()) }\n\n }\n\n}\n\n\n\nimpl AsHandleRef for io::Stderr {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(self.as_raw_handle()) }\n\n }\n\n}\n\n\n\nimpl AsHandleRef for process::ChildStdin {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(self.as_raw_handle()) }\n\n }\n\n}\n\n\n\nimpl AsHandleRef for process::ChildStdout {\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 57, "score": 11.62856046586435 }, { "content": "}\n\n\n\n/// Satisfies `WriteColor` using standard ANSI escape sequences.\n\npub struct Ansi<W>(W);\n\n\n\nimpl<W: Write> Ansi<W> {\n\n /// Create a new writer that satisfies `WriteColor` using standard ANSI\n\n /// escape sequences.\n\n pub fn new(wtr: W) -> Ansi<W> { Ansi(wtr) }\n\n\n\n /// Consume this `Ansi` value and return the inner writer.\n\n pub fn into_inner(self) -> W { self.0 }\n\n\n\n /// Return a reference to the inner writer.\n\n pub fn get_ref(&self) -> &W { &self.0 }\n\n\n\n /// Return a mutable reference to the inner writer.\n\n pub fn get_mut(&mut self) -> &mut W { &mut self.0 }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 58, "score": 11.558866321561945 }, { "content": " }\n\n\n\n #[inline]\n\n fn reset(&mut self) -> io::Result<()> {\n\n match self.0 {\n\n BufferInner::NoColor(ref mut w) => w.reset(),\n\n BufferInner::Ansi(ref mut w) => w.reset(),\n\n #[cfg(windows)]\n\n BufferInner::Windows(ref mut w) => w.reset(),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn is_synchronous(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n\n/// Satisfies `WriteColor` but ignores all color options.\n\npub struct NoColor<W>(W);\n", "file_path": "src/lib.rs", "rank": 59, "score": 11.512435524322601 }, { "content": "\n\nimpl<W: Write> NoColor<W> {\n\n /// Create a new writer that satisfies `WriteColor` but drops all color\n\n /// information.\n\n pub fn new(wtr: W) -> NoColor<W> { NoColor(wtr) }\n\n\n\n /// Consume this `NoColor` value and return the inner writer.\n\n pub fn into_inner(self) -> W { self.0 }\n\n\n\n /// Return a reference to the inner writer.\n\n pub fn get_ref(&self) -> &W { &self.0 }\n\n\n\n /// Return a mutable reference to the inner writer.\n\n pub fn get_mut(&mut self) -> &mut W { &mut self.0 }\n\n}\n\n\n\nimpl<W: io::Write> io::Write for NoColor<W> {\n\n #[inline]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n self.0.write(buf)\n", "file_path": "src/lib.rs", "rank": 60, "score": 11.463908360478161 }, { "content": " #[inline]\n\n fn reset(&mut self) -> io::Result<()> { self.wtr.reset() }\n\n\n\n #[inline]\n\n fn is_synchronous(&self) -> bool { self.wtr.is_synchronous() }\n\n}\n\n\n\nimpl<W: io::Write> io::Write for WriterInner<W> {\n\n #[inline(always)]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n match *self {\n\n WriterInner::NoColor(ref mut wtr) => wtr.write(buf),\n\n WriterInner::Ansi(ref mut wtr) => wtr.write(buf),\n\n #[cfg(windows)]\n\n WriterInner::Windows { ref mut wtr, .. } => wtr.write(buf),\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn flush(&mut self) -> io::Result<()> {\n", "file_path": "src/lib.rs", "rank": 61, "score": 11.048716978576124 }, { "content": " fn is_synchronous(&self) -> bool { false }\n\n}\n\n\n\nimpl<W: io::Write> Ansi<W> {\n\n fn write_str(&mut self, s: &str) -> io::Result<()> {\n\n self.write_all(s.as_bytes())\n\n }\n\n\n\n fn write_color(\n\n &mut self,\n\n fg: bool,\n\n c: &Color,\n\n intense: bool,\n\n ) -> io::Result<()> {\n\n macro_rules! write_intense {\n\n ($clr:expr) => {\n\n if fg {\n\n self.write_str(concat!(\"\\x1B[38;5;\", $clr, \"m\"))\n\n } else {\n\n self.write_str(concat!(\"\\x1B[48;5;\", $clr, \"m\"))\n", "file_path": "src/lib.rs", "rank": 62, "score": 10.999029470645315 }, { "content": " self.set()\n\n }\n\n\n\n /// Toggle virtual terminal processing.\n\n ///\n\n /// This method attempts to toggle virtual terminal processing for this\n\n /// console. If there was a problem toggling it, then an error returned.\n\n /// On success, the caller may assume that toggling it was successful.\n\n ///\n\n /// When virtual terminal processing is enabled, characters emitted to the\n\n /// console are parsed for VT100 and similar control character sequences\n\n /// that control color and other similar operations.\n\n pub fn set_virtual_terminal_processing(\n\n &mut self,\n\n yes: bool,\n\n ) -> io::Result<()> {\n\n let vt = wincon::ENABLE_VIRTUAL_TERMINAL_PROCESSING;\n\n\n\n let handle = self.kind.handle();\n\n let old_mode = winutil::console::mode(&handle)?;\n", "file_path": "wincolor/src/win.rs", "rank": 63, "score": 10.998857556842529 }, { "content": " }\n\n\n\n /// Get whether this is underline or not.\n\n ///\n\n /// Note that the underline setting has no effect in a Windows console.\n\n pub fn underline(&self) -> bool { self.underline }\n\n\n\n /// Set whether the text is underlined or not.\n\n ///\n\n /// Note that the underline setting has no effect in a Windows console.\n\n pub fn set_underline(&mut self, yes: bool) -> &mut ColorSpec {\n\n self.underline = yes;\n\n self\n\n }\n\n\n\n /// Get whether this is intense or not.\n\n ///\n\n /// On Unix-like systems, this will output the ANSI escape sequence\n\n /// that will print a high-intensity version of the color\n\n /// specified.\n", "file_path": "src/lib.rs", "rank": 64, "score": 10.864980945883477 }, { "content": " fn as_handle_ref(&self) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(self.as_raw_handle()) }\n\n }\n\n}\n\n\n\nimpl AsHandleRef for process::ChildStderr {\n\n fn as_handle_ref(&self) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(self.as_raw_handle()) }\n\n }\n\n}\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 65, "score": 10.717599836865435 }, { "content": " fn as_raw_handle(&self) -> RawHandle {\n\n self.0.as_raw_handle()\n\n }\n\n}\n\n\n\nimpl FromRawHandle for Handle {\n\n unsafe fn from_raw_handle(handle: RawHandle) -> Handle {\n\n Handle(File::from_raw_handle(handle))\n\n }\n\n}\n\n\n\nimpl IntoRawHandle for Handle {\n\n fn into_raw_handle(self) -> RawHandle {\n\n self.0.into_raw_handle()\n\n }\n\n}\n\n\n\nimpl Handle {\n\n /// Create an owned handle to the given file.\n\n ///\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 66, "score": 10.660342677348225 }, { "content": "\n\nimpl WriteColor for Buffer {\n\n #[inline]\n\n fn supports_color(&self) -> bool {\n\n match self.0 {\n\n BufferInner::NoColor(_) => false,\n\n BufferInner::Ansi(_) => true,\n\n #[cfg(windows)]\n\n BufferInner::Windows(_) => true,\n\n }\n\n }\n\n\n\n #[inline]\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n\n match self.0 {\n\n BufferInner::NoColor(ref mut w) => w.set_color(spec),\n\n BufferInner::Ansi(ref mut w) => w.set_color(spec),\n\n #[cfg(windows)]\n\n BufferInner::Windows(ref mut w) => w.set_color(spec),\n\n }\n", "file_path": "src/lib.rs", "rank": 67, "score": 10.345737165229957 }, { "content": " /// interact with an end user's device in order to control colors. By\n\n /// default, this always returns `false`.\n\n ///\n\n /// In practice, this should return `true` if the underlying writer is\n\n /// manipulating colors using the Windows console APIs.\n\n ///\n\n /// This is useful for writing generic code (such as a buffered writer)\n\n /// that can perform certain optimizations when the underlying writer\n\n /// doesn't rely on synchronous APIs. For example, ANSI escape sequences\n\n /// can be passed through to the end user's device as is.\n\n fn is_synchronous(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized + WriteColor> WriteColor for &'a mut T {\n\n fn supports_color(&self) -> bool { (&**self).supports_color() }\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n\n (&mut **self).set_color(spec)\n\n }\n", "file_path": "src/lib.rs", "rank": 68, "score": 10.063310014113585 }, { "content": "use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor};\n\n\n\nlet mut bufwtr = BufferWriter::stderr(ColorChoice::Always);\n\nlet mut buffer = bufwtr.buffer();\n\nbuffer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?;\n\nwriteln!(&mut buffer, \"green text!\")?;\n\nbufwtr.print(&buffer)?;\n\n# Ok(()) }\n\n```\n\n*/\n\n\n\n#![deny(missing_docs)]\n\n\n\n#[cfg(windows)]\n\nextern crate wincolor;\n\n\n\nuse std::env;\n\nuse std::error;\n\nuse std::fmt;\n\nuse std::io::{self, Write};\n\nuse std::str::FromStr;\n\n#[cfg(windows)]\n\nuse std::sync::{Mutex, MutexGuard};\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\n\n\n/// This trait describes the behavior of writers that support colored output.\n", "file_path": "src/lib.rs", "rank": 69, "score": 9.94406426808699 }, { "content": " /// Get the background color.\n\n pub fn bg(&self) -> Option<&Color> { self.bg_color.as_ref() }\n\n\n\n /// Set the background color.\n\n pub fn set_bg(&mut self, color: Option<Color>) -> &mut ColorSpec {\n\n self.bg_color = color;\n\n self\n\n }\n\n\n\n /// Get whether this is bold or not.\n\n ///\n\n /// Note that the bold setting has no effect in a Windows console.\n\n pub fn bold(&self) -> bool { self.bold }\n\n\n\n /// Set whether the text is bolded or not.\n\n ///\n\n /// Note that the bold setting has no effect in a Windows console.\n\n pub fn set_bold(&mut self, yes: bool) -> &mut ColorSpec {\n\n self.bold = yes;\n\n self\n", "file_path": "src/lib.rs", "rank": 70, "score": 9.643548220234676 }, { "content": " kind: ParseColorErrorKind::InvalidRgb,\n\n given: s.to_string(),\n\n }\n\n } else {\n\n ParseColorError {\n\n kind: ParseColorErrorKind::InvalidName,\n\n given: s.to_string(),\n\n }\n\n })\n\n }\n\n }\n\n}\n\n\n\n/// An error from parsing an invalid color specification.\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub struct ParseColorError {\n\n kind: ParseColorErrorKind,\n\n given: String,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 71, "score": 9.426621953293052 }, { "content": "}\n\n\n\nimpl fmt::Display for ParseColorError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use self::ParseColorErrorKind::*;\n\n match self.kind {\n\n InvalidName => {\n\n write!(f, \"unrecognized color name '{}'. Choose from: \\\n\n black, blue, green, red, cyan, magenta, yellow, \\\n\n white\",\n\n self.given)\n\n }\n\n InvalidAnsi256 => {\n\n write!(f, \"unrecognized ansi256 color number, \\\n\n should be '[0-255]' (or a hex number), but is '{}'\",\n\n self.given)\n\n }\n\n InvalidRgb => {\n\n write!(f, \"unrecognized RGB color triple, \\\n\n should be '[0-255],[0-255],[0-255]' (or a hex \\\n", "file_path": "src/lib.rs", "rank": 72, "score": 9.292671562535677 }, { "content": " }\n\n\n\n /// Create a borrowed handle to the given file.\n\n ///\n\n /// When the returned handle is dropped, the file is not closed.\n\n pub fn from_file(file: &File) -> HandleRef {\n\n unsafe { HandleRef::from_raw_handle(file.as_raw_handle()) }\n\n }\n\n\n\n /// Create a borrowed handle from the given raw handle.\n\n ///\n\n /// Note that unlike the `FromRawHandle` trait, this constructor does\n\n /// **not** consume ownership of the given handle. That is, when the\n\n /// borrowed handle created by this constructor is dropped, the underlying\n\n /// handle will not be closed.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This is unsafe because there is no guarantee that the given raw handle\n\n /// is a valid handle. The caller must ensure this is true before invoking\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 73, "score": 9.192643377766446 }, { "content": " /// Apply the given intensity and color attributes to the console\n\n /// background.\n\n ///\n\n /// If there was a problem setting attributes on the console, then an error\n\n /// is returned.\n\n pub fn bg(&mut self, intense: Intense, color: Color) -> io::Result<()> {\n\n self.cur_attr.bg_color = color;\n\n self.cur_attr.bg_intense = intense;\n\n self.set()\n\n }\n\n\n\n /// Reset the console text attributes to their original settings.\n\n ///\n\n /// The original settings correspond to the text attributes on the console\n\n /// when this `Console` value was created.\n\n ///\n\n /// If there was a problem setting attributes on the console, then an error\n\n /// is returned.\n\n pub fn reset(&mut self) -> io::Result<()> {\n\n self.cur_attr = self.start_attr;\n", "file_path": "wincolor/src/win.rs", "rank": 74, "score": 9.054167332428921 }, { "content": " /// Returns true if and only if this buffer is empty.\n\n pub fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n\n\n /// Returns the length of this buffer in bytes.\n\n pub fn len(&self) -> usize {\n\n match self.0 {\n\n BufferInner::NoColor(ref b) => b.0.len(),\n\n BufferInner::Ansi(ref b) => b.0.len(),\n\n #[cfg(windows)]\n\n BufferInner::Windows(ref b) => b.buf.len(),\n\n }\n\n }\n\n\n\n /// Clears this buffer.\n\n pub fn clear(&mut self) {\n\n match self.0 {\n\n BufferInner::NoColor(ref mut b) => b.0.clear(),\n\n BufferInner::Ansi(ref mut b) => b.0.clear(),\n", "file_path": "src/lib.rs", "rank": 75, "score": 8.63022934351275 }, { "content": "/*!\n\nThis crate provides a smattering of safe routines for parts of winapi. The\n\nprimary purpose of this crate is to serve as a dumping ground for various\n\nutility functions that make interactions with winapi safe. This permits the\n\ncentralization of `unsafe` when dealing with Windows APIs, and thus makes it\n\neasier to audit.\n\n\n\nA key abstraction in this crate is the combination of the\n\n[`Handle`](struct.Handle.html)\n\nand\n\n[`HandleRef`](struct.HandleRef.html)\n\ntypes. Both represent a valid Windows handle to an I/O-like object, where\n\n`Handle` is owned (the resource is closed when the handle is dropped) and\n\n`HandleRef` is borrowed (the resource is not closed when the handle is\n\ndropped). Many of the routines in this crate work on handles and accept\n\nanything that can be safely converted into a `HandleRef`. This includes\n\nstandard library types such as `File`, `Stdin`, `Stdout` and `Stderr`.\n\n\n\nNote that this crate is completely empty on non-Windows platforms.\n\n*/\n", "file_path": "wincolor/src/win/winutil.rs", "rank": 76, "score": 8.502247549737962 }, { "content": "///\n\n/// This type has a `FromStr` impl that can parse colors from their human\n\n/// readable form. The format is as follows:\n\n///\n\n/// 1. Any of the explicitly listed colors in English. They are matched\n\n/// case insensitively.\n\n/// 2. A single 8-bit integer, in either decimal or hexadecimal format.\n\n/// 3. A triple of 8-bit integers separated by a comma, where each integer is\n\n/// in decimal or hexadecimal format.\n\n///\n\n/// Hexadecimal numbers are written with a `0x` prefix.\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum Color {\n\n Black,\n\n Blue,\n\n Green,\n\n Red,\n\n Cyan,\n\n Magenta,\n", "file_path": "src/lib.rs", "rank": 77, "score": 8.485120986649402 }, { "content": " match self.0 {\n\n BufferInner::NoColor(ref b) => &b.0,\n\n BufferInner::Ansi(ref b) => &b.0,\n\n #[cfg(windows)]\n\n BufferInner::Windows(ref b) => &b.buf,\n\n }\n\n }\n\n\n\n /// Return the underlying data of the buffer as a mutable slice.\n\n pub fn as_mut_slice(&mut self) -> &mut [u8] {\n\n match self.0 {\n\n BufferInner::NoColor(ref mut b) => &mut b.0,\n\n BufferInner::Ansi(ref mut b) => &mut b.0,\n\n #[cfg(windows)]\n\n BufferInner::Windows(ref mut b) => &mut b.buf,\n\n }\n\n }\n\n}\n\n\n\nimpl io::Write for Buffer {\n", "file_path": "src/lib.rs", "rank": 78, "score": 8.127518820689163 }, { "content": " wtr: IoStandardStream::new(sty),\n\n console: Mutex::new(console),\n\n }\n\n } else {\n\n WriterInner::Ansi(Ansi(IoStandardStream::new(sty)))\n\n }\n\n } else {\n\n WriterInner::NoColor(NoColor(IoStandardStream::new(sty)))\n\n }\n\n }\n\n}\n\n\n\nimpl io::Write for StandardStream {\n\n #[inline]\n\n fn write(&mut self, b: &[u8]) -> io::Result<usize> { self.wtr.write(b) }\n\n\n\n #[inline]\n\n fn flush(&mut self) -> io::Result<()> { self.wtr.flush() }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 79, "score": 8.119358458321265 }, { "content": " /// When the returned handle is dropped, the file is closed.\n\n ///\n\n /// Note that if the given file represents a handle to a directory, then\n\n /// it is generally required that it have been opened with the\n\n /// [`FILE_FLAG_BACKUP_SEMANTICS`] flag in order to use it in various\n\n /// calls such as `information` or `typ`. To have this done automatically\n\n /// for you, use the `from_path_any` constructor.\n\n ///\n\n /// [`FILE_FLAG_BACKUP_SEMANTICS`]: https://docs.microsoft.com/en-us/windows/desktop/api/FileAPI/nf-fileapi-createfilea\n\n pub fn from_file(file: File) -> Handle {\n\n Handle(file)\n\n }\n\n\n\n /// Open a file to the given file path, and return an owned handle to that\n\n /// file.\n\n ///\n\n /// When the returned handle is dropped, the file is closed.\n\n ///\n\n /// If there was a problem opening the file, then the corresponding error\n\n /// is returned.\n", "file_path": "wincolor/src/win/winutil/win.rs", "rank": 80, "score": 8.110787450276174 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\nimpl io::Write for IoStandardStream {\n\n #[inline(always)]\n\n fn write(&mut self, b: &[u8]) -> io::Result<usize> {\n\n match *self {\n\n IoStandardStream::Stdout(ref mut s) => s.write(b),\n\n IoStandardStream::Stderr(ref mut s) => s.write(b),\n\n IoStandardStream::StdoutBuffered(ref mut s) => s.write(b),\n\n IoStandardStream::StderrBuffered(ref mut s) => s.write(b),\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn flush(&mut self) -> io::Result<()> {\n\n match *self {\n\n IoStandardStream::Stdout(ref mut s) => s.flush(),\n\n IoStandardStream::Stderr(ref mut s) => s.flush(),\n\n IoStandardStream::StdoutBuffered(ref mut s) => s.flush(),\n\n IoStandardStream::StderrBuffered(ref mut s) => s.flush(),\n\n }\n\n }\n\n}\n\n\n\n// Same rigmarole for the locked variants of the standard streams.\n\n\n", "file_path": "src/lib.rs", "rank": 81, "score": 8.039606924508725 }, { "content": " fn reset(&mut self) -> io::Result<()> { self.wtr.reset() }\n\n fn is_synchronous(&self) -> bool { self.wtr.is_synchronous() }\n\n}\n\n\n\nimpl<W: io::Write> io::Write for LossyStandardStream<W> {\n\n #[cfg(not(windows))]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n self.wtr.write(buf)\n\n }\n\n\n\n #[cfg(windows)]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n if self.is_console {\n\n write_lossy_utf8(&mut self.wtr, buf)\n\n } else {\n\n self.wtr.write(buf)\n\n }\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n self.wtr.flush()\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 82, "score": 7.713631344365609 }, { "content": " }\n\n }\n\n };\n\n StandardStreamLock { wtr: stream.wtr.wrap(locked) }\n\n }\n\n}\n\n\n\nimpl BufferedStandardStream {\n\n /// Create a new `BufferedStandardStream` with the given color preferences\n\n /// that writes to standard output via a buffered writer.\n\n ///\n\n /// On Windows, if coloring is desired and a Windows console could not be\n\n /// found, then ANSI escape sequences are used instead.\n\n ///\n\n /// The specific color/style settings can be configured when writing via\n\n /// the `WriteColor` trait.\n\n pub fn stdout(choice: ColorChoice) -> BufferedStandardStream {\n\n let wtr = WriterInner::create(\n\n StandardStreamType::StdoutBuffered,\n\n choice,\n", "file_path": "src/lib.rs", "rank": 83, "score": 7.616174681940339 }, { "content": " /// ANSI escape sequences are used instead.\n\n #[cfg(windows)]\n\n fn create(\n\n sty: StandardStreamType,\n\n choice: ColorChoice,\n\n ) -> WriterInner<IoStandardStream> {\n\n let mut con = match sty {\n\n StandardStreamType::Stdout => wincolor::Console::stdout(),\n\n StandardStreamType::Stderr => wincolor::Console::stderr(),\n\n StandardStreamType::StdoutBuffered => wincolor::Console::stdout(),\n\n StandardStreamType::StderrBuffered => wincolor::Console::stderr(),\n\n };\n\n let is_console_virtual = con.as_mut().map(|con| {\n\n con.set_virtual_terminal_processing(true).is_ok()\n\n }).unwrap_or(false);\n\n if choice.should_attempt_color() {\n\n if choice.should_ansi() || is_console_virtual {\n\n WriterInner::Ansi(Ansi(IoStandardStream::new(sty)))\n\n } else if let Ok(console) = con {\n\n WriterInner::Windows {\n", "file_path": "src/lib.rs", "rank": 84, "score": 7.481804117964294 }, { "content": " ///\n\n /// The default value is `None`.\n\n pub fn separator(&mut self, sep: Option<Vec<u8>>) {\n\n self.separator = sep;\n\n }\n\n\n\n /// Creates a new `Buffer` with the current color preferences.\n\n ///\n\n /// A `Buffer` satisfies both `io::Write` and `WriteColor`. A `Buffer` can\n\n /// be printed using the `print` method.\n\n #[cfg(not(windows))]\n\n pub fn buffer(&self) -> Buffer {\n\n Buffer::new(self.color_choice)\n\n }\n\n\n\n /// Creates a new `Buffer` with the current color preferences.\n\n ///\n\n /// A `Buffer` satisfies both `io::Write` and `WriteColor`. A `Buffer` can\n\n /// be printed using the `print` method.\n\n #[cfg(windows)]\n", "file_path": "src/lib.rs", "rank": 85, "score": 7.44344683990408 }, { "content": "\n\n /// Push the given color specification into this buffer.\n\n ///\n\n /// This has the effect of setting the given color information at the\n\n /// current position in the buffer.\n\n fn push(&mut self, spec: Option<ColorSpec>) {\n\n let pos = self.buf.len();\n\n self.colors.push((pos, spec));\n\n }\n\n\n\n /// Print the contents to the given stream handle, and use the console\n\n /// for coloring.\n\n fn print(\n\n &self,\n\n console: &mut wincolor::Console,\n\n stream: &mut LossyStandardStream<IoStandardStreamLock>,\n\n ) -> io::Result<()> {\n\n let mut last = 0;\n\n for &(pos, ref spec) in &self.colors {\n\n stream.write_all(&self.buf[last..pos])?;\n", "file_path": "src/lib.rs", "rank": 86, "score": 7.35382972106051 }, { "content": " /// Create a new `BufferWriter` that writes to a standard stream with the\n\n /// given color preferences.\n\n ///\n\n /// If coloring is desired and a Windows console could not be found, then\n\n /// ANSI escape sequences are used instead.\n\n ///\n\n /// The specific color/style settings can be configured when writing to\n\n /// the buffers themselves.\n\n #[cfg(windows)]\n\n fn create(sty: StandardStreamType, choice: ColorChoice) -> BufferWriter {\n\n let mut con = match sty {\n\n StandardStreamType::Stdout => wincolor::Console::stdout(),\n\n StandardStreamType::Stderr => wincolor::Console::stderr(),\n\n StandardStreamType::StdoutBuffered => wincolor::Console::stdout(),\n\n StandardStreamType::StderrBuffered => wincolor::Console::stderr(),\n\n }.ok();\n\n let is_console_virtual = con.as_mut().map(|con| {\n\n con.set_virtual_terminal_processing(true).is_ok()\n\n }).unwrap_or(false);\n\n // If we can enable ANSI on Windows, then we don't need the console\n", "file_path": "src/lib.rs", "rank": 87, "score": 7.336802215926758 }, { "content": " match *self {\n\n WriterInner::NoColor(ref mut wtr) => wtr.flush(),\n\n WriterInner::Ansi(ref mut wtr) => wtr.flush(),\n\n #[cfg(windows)]\n\n WriterInner::Windows { ref mut wtr, .. } => wtr.flush(),\n\n }\n\n }\n\n}\n\n\n\nimpl<W: io::Write> WriteColor for WriterInner<W> {\n\n fn supports_color(&self) -> bool {\n\n match *self {\n\n WriterInner::NoColor(_) => false,\n\n WriterInner::Ansi(_) => true,\n\n #[cfg(windows)]\n\n WriterInner::Windows { .. } => true,\n\n }\n\n }\n\n\n\n fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> {\n", "file_path": "src/lib.rs", "rank": 88, "score": 7.3231541978590915 }, { "content": "### Example: using `StandardStream`\n\n\n\nThe `StandardStream` type in this crate works similarly to `std::io::Stdout`,\n\nexcept it is augmented with methods for coloring by the `WriteColor` trait.\n\nFor example, to write some green text:\n\n\n\n```rust\n\nuse std::io::Write;\n\nuse termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};\n\n\n\nlet mut stdout = StandardStream::stdout(ColorChoice::Always);\n\nstdout.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?;\n\nwriteln!(&mut stdout, \"green text!\")?;\n\n```\n\n\n\n### Example: using `BufferWriter`\n\n\n\nA `BufferWriter` can create buffers and write buffers to stdout or stderr. It\n\ndoes *not* implement `io::Write` or `WriteColor` itself. Instead, `Buffer`\n\nimplements `io::Write` and `termcolor::WriteColor`.\n\n\n\nThis example shows how to print some green text to stderr.\n\n\n\n```rust\n\nuse std::io::Write;\n\nuse termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor};\n\n\n\nlet mut bufwtr = BufferWriter::stderr(ColorChoice::Always);\n\nlet mut buffer = bufwtr.buffer();\n\nbuffer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?;\n\nwriteln!(&mut buffer, \"green text!\")?;\n\nbufwtr.print(&buffer)?;\n\n```\n", "file_path": "README.md", "rank": 89, "score": 7.319213398088029 }, { "content": " WriterInnerLock::Ansi(ref mut wtr) => wtr.write(buf),\n\n #[cfg(windows)]\n\n WriterInnerLock::Windows { ref mut wtr, .. } => wtr.write(buf),\n\n }\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n match *self {\n\n WriterInnerLock::Unreachable(_) => unreachable!(),\n\n WriterInnerLock::NoColor(ref mut wtr) => wtr.flush(),\n\n WriterInnerLock::Ansi(ref mut wtr) => wtr.flush(),\n\n #[cfg(windows)]\n\n WriterInnerLock::Windows { ref mut wtr, .. } => wtr.flush(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, W: io::Write> WriteColor for WriterInnerLock<'a, W> {\n\n fn supports_color(&self) -> bool {\n\n match *self {\n", "file_path": "src/lib.rs", "rank": 90, "score": 7.18497825895272 }, { "content": " Ok(())\n\n }\n\n }\n\n }\n\n\n\n fn is_synchronous(&self) -> bool {\n\n match *self {\n\n WriterInner::NoColor(_) => false,\n\n WriterInner::Ansi(_) => false,\n\n #[cfg(windows)]\n\n WriterInner::Windows {..} => true,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, W: io::Write> io::Write for WriterInnerLock<'a, W> {\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n match *self {\n\n WriterInnerLock::Unreachable(_) => unreachable!(),\n\n WriterInnerLock::NoColor(ref mut wtr) => wtr.write(buf),\n", "file_path": "src/lib.rs", "rank": 91, "score": 7.138251333886238 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nenum ParseColorErrorKind {\n\n InvalidName,\n\n InvalidAnsi256,\n\n InvalidRgb,\n\n}\n\n\n\nimpl ParseColorError {\n\n /// Return the string that couldn't be parsed as a valid color.\n\n pub fn invalid(&self) -> &str { &self.given }\n\n}\n\n\n\nimpl error::Error for ParseColorError {\n\n fn description(&self) -> &str {\n\n use self::ParseColorErrorKind::*;\n\n match self.kind {\n\n InvalidName => \"unrecognized color name\",\n\n InvalidAnsi256 => \"invalid ansi256 color number\",\n\n InvalidRgb => \"invalid RGB color triple\",\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 92, "score": 7.063455360912811 }, { "content": " /// The specific color/style settings can be configured when writing to\n\n /// the buffers themselves.\n\n pub fn stdout(choice: ColorChoice) -> BufferWriter {\n\n BufferWriter::create(StandardStreamType::Stdout, choice)\n\n }\n\n\n\n /// Create a new `BufferWriter` that writes to stderr with the given\n\n /// color preferences.\n\n ///\n\n /// On Windows, if coloring is desired and a Windows console could not be\n\n /// found, then ANSI escape sequences are used instead.\n\n ///\n\n /// The specific color/style settings can be configured when writing to\n\n /// the buffers themselves.\n\n pub fn stderr(choice: ColorChoice) -> BufferWriter {\n\n BufferWriter::create(StandardStreamType::Stderr, choice)\n\n }\n\n\n\n /// If set, the separator given is printed between buffers. By default, no\n\n /// separator is printed.\n", "file_path": "src/lib.rs", "rank": 93, "score": 7.0252885586289615 }, { "content": " Yellow,\n\n White,\n\n Ansi256(u8),\n\n Rgb(u8, u8, u8),\n\n #[doc(hidden)]\n\n __Nonexhaustive,\n\n}\n\n\n\nimpl Color {\n\n /// Translate this color to a wincolor::Color.\n\n #[cfg(windows)]\n\n fn to_windows(\n\n self,\n\n intense: bool,\n\n ) -> Option<(wincolor::Intense, wincolor::Color)> {\n\n use wincolor::Intense::{Yes, No};\n\n\n\n let color = match self {\n\n Color::Black => wincolor::Color::Black,\n\n Color::Blue => wincolor::Color::Blue,\n", "file_path": "src/lib.rs", "rank": 94, "score": 6.895343425706871 }, { "content": " #[inline]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n match self.0 {\n\n BufferInner::NoColor(ref mut w) => w.write(buf),\n\n BufferInner::Ansi(ref mut w) => w.write(buf),\n\n #[cfg(windows)]\n\n BufferInner::Windows(ref mut w) => w.write(buf),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn flush(&mut self) -> io::Result<()> {\n\n match self.0 {\n\n BufferInner::NoColor(ref mut w) => w.flush(),\n\n BufferInner::Ansi(ref mut w) => w.flush(),\n\n #[cfg(windows)]\n\n BufferInner::Windows(ref mut w) => w.flush(),\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 95, "score": 6.838531823364444 }, { "content": " /// than emitting ANSI color codes.\n\n AlwaysAnsi,\n\n /// Try to use colors, but don't force the issue. If the console isn't\n\n /// available on Windows, or if TERM=dumb, for example, then don't use\n\n /// colors.\n\n Auto,\n\n /// Never emit colors.\n\n Never,\n\n}\n\n\n\nimpl ColorChoice {\n\n /// Returns true if we should attempt to write colored output.\n\n #[cfg(not(windows))]\n\n fn should_attempt_color(&self) -> bool {\n\n match *self {\n\n ColorChoice::Always => true,\n\n ColorChoice::AlwaysAnsi => true,\n\n ColorChoice::Never => false,\n\n ColorChoice::Auto => {\n\n match env::var(\"TERM\") {\n", "file_path": "src/lib.rs", "rank": 96, "score": 6.800874284463161 }, { "content": " /// The specific color/style settings can be configured when writing via\n\n /// the `WriteColor` trait.\n\n pub fn stdout(choice: ColorChoice) -> StandardStream {\n\n let wtr = WriterInner::create(StandardStreamType::Stdout, choice);\n\n StandardStream { wtr: LossyStandardStream::new(wtr) }\n\n }\n\n\n\n /// Create a new `StandardStream` with the given color preferences that\n\n /// writes to standard error.\n\n ///\n\n /// On Windows, if coloring is desired and a Windows console could not be\n\n /// found, then ANSI escape sequences are used instead.\n\n ///\n\n /// The specific color/style settings can be configured when writing via\n\n /// the `WriteColor` trait.\n\n pub fn stderr(choice: ColorChoice) -> StandardStream {\n\n let wtr = WriterInner::create(StandardStreamType::Stderr, choice);\n\n StandardStream { wtr: LossyStandardStream::new(wtr) }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 97, "score": 6.431895954230795 }, { "content": " ///\n\n /// This corresponds to `dwSize`.\n\n pub fn size(&self) -> (i16, i16) {\n\n (self.0.dwSize.X, self.0.dwSize.Y)\n\n }\n\n\n\n /// Returns the position of the cursor in terms of column and row\n\n /// coordinates of the console screen buffer.\n\n ///\n\n /// This corresponds to `dwCursorPosition`.\n\n pub fn cursor_position(&self) -> (i16, i16) {\n\n (self.0.dwCursorPosition.X, self.0.dwCursorPosition.Y)\n\n }\n\n\n\n /// Returns the character attributes associated with this console.\n\n ///\n\n /// This corresponds to `wAttributes`.\n\n ///\n\n /// See [`char info`] for more details.\n\n ///\n", "file_path": "wincolor/src/win/winutil/console.rs", "rank": 98, "score": 5.995313990804625 }, { "content": " #[cfg(windows)]\n\n console: Option<Mutex<wincolor::Console>>,\n\n}\n\n\n\nimpl BufferWriter {\n\n /// Create a new `BufferWriter` that writes to a standard stream with the\n\n /// given color preferences.\n\n ///\n\n /// The specific color/style settings can be configured when writing to\n\n /// the buffers themselves.\n\n #[cfg(not(windows))]\n\n fn create(sty: StandardStreamType, choice: ColorChoice) -> BufferWriter {\n\n BufferWriter {\n\n stream: LossyStandardStream::new(IoStandardStream::new(sty)),\n\n printed: AtomicBool::new(false),\n\n separator: None,\n\n color_choice: choice,\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 99, "score": 5.98765277052156 } ]
Rust
src/mimc_hash.rs
oskarth/semaphore-rs
d462a4372f1fd9c27610f2acfe4841fab1d396aa
use crate::util::keccak256; use once_cell::sync::Lazy; use zkp_u256::U256; const NUM_ROUNDS: usize = 220; static MODULUS: Lazy<U256> = Lazy::new(|| { U256::from_decimal_str( "21888242871839275222246405745257275088548364400416034343698204186575808495617", ) .unwrap() }); static ROUND_CONSTANTS: Lazy<[U256; NUM_ROUNDS]> = Lazy::new(|| { const SEED: &str = "mimcsponge"; let mut result = [U256::ZERO; NUM_ROUNDS]; let mut bytes = keccak256(SEED.as_bytes()); for constant in result[1..NUM_ROUNDS - 1].iter_mut() { bytes = keccak256(&bytes); *constant = U256::from_bytes_be(&bytes); *constant %= &*MODULUS; } result }); fn mix(left: &mut U256, right: &mut U256) { debug_assert!(*left < *MODULUS); debug_assert!(*right < *MODULUS); for round_constant in &*ROUND_CONSTANTS { let t = (&*left + round_constant) % &*MODULUS; let t2 = t.mulmod(&t, &*MODULUS); let t4 = t2.mulmod(&t2, &*MODULUS); let t5 = t.mulmod(&t4, &*MODULUS); *right += t5; *right %= &*MODULUS; std::mem::swap(left, right); } std::mem::swap(left, right); } #[must_use] pub fn hash(values: &[U256]) -> U256 { let mut left = U256::ZERO; let mut right = U256::ZERO; for value in values { let value = value % &*MODULUS; left += value; left %= &*MODULUS; mix(&mut left, &mut right); } left } #[cfg(test)] pub mod test { use super::*; use hex_literal::hex; #[test] fn test_round_constants() { assert_eq!(ROUND_CONSTANTS[0], U256::ZERO); assert_eq!( ROUND_CONSTANTS[1], U256::from_decimal_str( "7120861356467848435263064379192047478074060781135320967663101236819528304084" ) .unwrap() ); assert_eq!( ROUND_CONSTANTS[2], U256::from_decimal_str( "5024705281721889198577876690145313457398658950011302225525409148828000436681" ) .unwrap() ); assert_eq!( ROUND_CONSTANTS[218], U256::from_decimal_str( "2119542016932434047340813757208803962484943912710204325088879681995922344971" ) .unwrap() ); assert_eq!(ROUND_CONSTANTS[219], U256::ZERO); } #[test] fn test_mix() { let mut left = U256::ONE; let mut right = U256::ZERO; mix(&mut left, &mut right); assert_eq!( left, U256::from_decimal_str( "8792246410719720074073794355580855662772292438409936688983564419486782556587" ) .unwrap() ); assert_eq!( right, U256::from_decimal_str( "7326554092124867281481480523863654579712861994895051796475958890524736238844" ) .unwrap() ); left += U256::from(2); mix(&mut left, &mut right); assert_eq!( left, U256::from_decimal_str( "19814528709687996974327303300007262407299502847885145507292406548098437687919" ) .unwrap() ); assert_eq!( right, U256::from_decimal_str( "3888906192024793285683241274210746486868893421288515595586335488978789653213" ) .unwrap() ); } #[test] fn test_hash() { assert_eq!( hash(&[U256::from(1_u64), U256::from(2_u64)]), U256::from_bytes_be(&hex!( "2bcea035a1251603f1ceaf73cd4ae89427c47075bb8e3a944039ff1e3d6d2a6f" )) ); assert_eq!( hash(&[ U256::from(1_u64), U256::from(2_u64), U256::from(3_u64), U256::from(4_u64) ]), U256::from_bytes_be(&hex!( "03e86bdc4eac70bd601473c53d8233b145fe8fd8bf6ef25f0b217a1da305665c" )) ); } } #[cfg(feature = "bench")] pub mod bench { #[allow(clippy::wildcard_imports)] use super::*; use criterion::Criterion; pub fn group(criterion: &mut Criterion) { bench_mix(criterion); } fn bench_mix(criterion: &mut Criterion) { let mut left = U256::ONE; let mut right = U256::ZERO; criterion.bench_function("mimc_mix", move |bencher| { bencher.iter(|| mix(&mut left, &mut right)); }); } }
use crate::util::keccak256; use once_cell::sync::Lazy; use zkp_u256::U256; const NUM_ROUNDS: usize = 220; static MODULUS: Lazy<U256> = Lazy::new(|| { U256::from_decimal_str( "21888242871839275222246405745257275088548364400416034343698204186575808495617", ) .unwrap() }); static ROUND_CONSTANTS: Lazy<[U256; NUM_ROUNDS]> = Lazy::new(|| { const SEED: &str = "mimcsponge"; let mut result = [U256::ZERO; NUM_ROUNDS]; let mut bytes = keccak256(SEED.as_bytes()); for constant in result[1..NUM_ROUNDS - 1].iter_mut() { bytes = keccak256(&bytes); *constant = U256::from_bytes_be(&bytes); *constant %= &*MODULUS; } result }); fn mix(left: &mut U256, right: &mut U256) { debug_assert!(*left < *MODULUS); debug_assert!(*right < *MODULUS); for round_constant in &*ROUND_CONSTANTS { let t = (&*left + round_constant) % &*MODULUS; let t2 = t.mulmod(&t, &*MODULUS); let t4 = t2.mulmod(&t2, &*MODULUS); let t5 = t.mulmod(&t4, &*MODULUS); *right += t5; *right %= &*MODULUS; std::mem::swap(left, right); } std::mem::swap(left, right); } #[must_use] pub fn hash(values: &[U256]) -> U256 { let mut left = U256::ZERO; let mut right = U256::ZERO; for value in values { let value = value % &*MODULUS; left += value; left %= &*MODULUS; mix(&mut left, &mut right); } left } #[cfg(test)] pub mod test { use super::*; use hex_literal::hex; #[test] fn test_roun
#[test] fn test_mix() { let mut left = U256::ONE; let mut right = U256::ZERO; mix(&mut left, &mut right); assert_eq!( left, U256::from_decimal_str( "8792246410719720074073794355580855662772292438409936688983564419486782556587" ) .unwrap() ); assert_eq!( right, U256::from_decimal_str( "7326554092124867281481480523863654579712861994895051796475958890524736238844" ) .unwrap() ); left += U256::from(2); mix(&mut left, &mut right); assert_eq!( left, U256::from_decimal_str( "19814528709687996974327303300007262407299502847885145507292406548098437687919" ) .unwrap() ); assert_eq!( right, U256::from_decimal_str( "3888906192024793285683241274210746486868893421288515595586335488978789653213" ) .unwrap() ); } #[test] fn test_hash() { assert_eq!( hash(&[U256::from(1_u64), U256::from(2_u64)]), U256::from_bytes_be(&hex!( "2bcea035a1251603f1ceaf73cd4ae89427c47075bb8e3a944039ff1e3d6d2a6f" )) ); assert_eq!( hash(&[ U256::from(1_u64), U256::from(2_u64), U256::from(3_u64), U256::from(4_u64) ]), U256::from_bytes_be(&hex!( "03e86bdc4eac70bd601473c53d8233b145fe8fd8bf6ef25f0b217a1da305665c" )) ); } } #[cfg(feature = "bench")] pub mod bench { #[allow(clippy::wildcard_imports)] use super::*; use criterion::Criterion; pub fn group(criterion: &mut Criterion) { bench_mix(criterion); } fn bench_mix(criterion: &mut Criterion) { let mut left = U256::ONE; let mut right = U256::ZERO; criterion.bench_function("mimc_mix", move |bencher| { bencher.iter(|| mix(&mut left, &mut right)); }); } }
d_constants() { assert_eq!(ROUND_CONSTANTS[0], U256::ZERO); assert_eq!( ROUND_CONSTANTS[1], U256::from_decimal_str( "7120861356467848435263064379192047478074060781135320967663101236819528304084" ) .unwrap() ); assert_eq!( ROUND_CONSTANTS[2], U256::from_decimal_str( "5024705281721889198577876690145313457398658950011302225525409148828000436681" ) .unwrap() ); assert_eq!( ROUND_CONSTANTS[218], U256::from_decimal_str( "2119542016932434047340813757208803962484943912710204325088879681995922344971" ) .unwrap() ); assert_eq!(ROUND_CONSTANTS[219], U256::ZERO); }
function_block-function_prefixed
[ { "content": "// See <https://internals.rust-lang.org/t/path-to-lexical-absolute/14940>\n\nfn absolute(path: &str) -> Result<PathBuf> {\n\n let path = Path::new(path);\n\n let mut absolute = if path.is_absolute() {\n\n PathBuf::new()\n\n } else {\n\n std::env::current_dir()?\n\n };\n\n for component in path.components() {\n\n match component {\n\n Component::CurDir => {}\n\n Component::ParentDir => {\n\n absolute.pop();\n\n }\n\n component => absolute.push(component.as_os_str()),\n\n }\n\n }\n\n Ok(absolute)\n\n}\n\n\n", "file_path": "build.rs", "rank": 2, "score": 95148.59797065894 }, { "content": "#[cfg(not(feature = \"dylib\"))]\n\n#[must_use]\n\npub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {\n\n WITNESS_CALCULATOR.get_or_init(|| {\n\n let store = Store::default();\n\n let module = Module::from_binary(&store, WASM).expect(\"wasm should be valid\");\n\n let result =\n\n WitnessCalculator::from_module(module).expect(\"Failed to create witness calculator\");\n\n Mutex::new(result)\n\n })\n\n}\n", "file_path": "src/circuit.rs", "rank": 3, "score": 87367.52891184934 }, { "content": "/// Helper function to remove optionally `0x` prefix from hex strings.\n\nfn trim_hex_prefix(str: &str) -> &str {\n\n if str.len() >= 2 && (&str[..2] == \"0x\" || &str[..2] == \"0X\") {\n\n &str[2..]\n\n } else {\n\n str\n\n }\n\n}\n\n\n\n/// Helper to deserialize byte arrays.\n\npub(crate) fn deserialize_bytes<'de, const N: usize, D: Deserializer<'de>>(\n\n deserializer: D,\n\n) -> Result<[u8; N], D::Error> {\n\n if deserializer.is_human_readable() {\n\n struct StrVisitor<const N: usize>;\n\n impl<'de, const N: usize> Visitor<'de> for StrVisitor<N> {\n\n type Value = [u8; N];\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> FmtResult {\n\n write!(formatter, \"a {N} byte hex string\")\n\n }\n", "file_path": "src/util.rs", "rank": 4, "score": 80283.26147395442 }, { "content": "#[must_use]\n\npub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {\n\n &*ZKEY\n\n}\n\n\n", "file_path": "src/circuit.rs", "rank": 5, "score": 79244.85626844458 }, { "content": "fn main() -> Result<()> {\n\n build_circuit()?;\n\n #[cfg(feature = \"dylib\")]\n\n build_dylib()?;\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 6, "score": 73914.56776165657 }, { "content": "#[cfg(feature = \"dylib\")]\n\nfn build_dylib() -> Result<()> {\n\n use enumset::enum_set;\n\n use std::{env, str::FromStr};\n\n use wasmer::{Module, Store, Target, Triple};\n\n use wasmer_compiler_cranelift::Cranelift;\n\n use wasmer_engine_dylib::Dylib;\n\n\n\n let wasm_file = absolute(WASM_FILE)?;\n\n assert!(wasm_file.exists());\n\n\n\n let out_dir = env::var(\"OUT_DIR\")?;\n\n let out_dir = Path::new(&out_dir).to_path_buf();\n\n let dylib_file = out_dir.join(\"semaphore.dylib\");\n\n println!(\n\n \"cargo:rustc-env=CIRCUIT_WASM_DYLIB={}\",\n\n dylib_file.display()\n\n );\n\n\n\n if dylib_file.exists() {\n\n return Ok(());\n", "file_path": "build.rs", "rank": 7, "score": 71281.92788582305 }, { "content": "fn build_circuit() -> Result<()> {\n\n println!(\"cargo:rerun-if-changed=./semaphore\");\n\n let run = |cmd: &[&str]| -> Result<()> {\n\n // TODO: Use ExitCode::exit_ok() when stable.\n\n Command::new(cmd[0])\n\n .args(cmd[1..].iter())\n\n .current_dir(\"./semaphore\")\n\n .status()?\n\n .success()\n\n .then(|| ())\n\n .ok_or(eyre!(\"procees returned failure\"))?;\n\n Ok(())\n\n };\n\n\n\n // Compute absolute paths\n\n let zkey_file = absolute(ZKEY_FILE)?;\n\n let wasm_file = absolute(WASM_FILE)?;\n\n\n\n // Build circuits if not exists\n\n // TODO: This does not rebuild if the semaphore submodule is changed.\n", "file_path": "build.rs", "rank": 8, "score": 71281.92788582305 }, { "content": "fn seed_hex(seed: &[u8]) -> [u8; 64] {\n\n let mut hasher = Sha256::new();\n\n hasher.update(seed);\n\n let bytes: [u8; 32] = hasher.finalize().into();\n\n let mut result = [0_u8; 64];\n\n hex::encode_to_slice(&bytes, &mut result[..]).expect(\"output buffer is correctly sized\");\n\n result\n\n}\n\n\n\nimpl Identity {\n\n #[must_use]\n\n pub fn from_seed(seed: &[u8]) -> Self {\n\n let seed_hex = seed_hex(seed);\n\n Self {\n\n trapdoor: derive_field(&seed_hex, b\"identity_trapdoor\"),\n\n nullifier: derive_field(&seed_hex, b\"identity_nullifier\"),\n\n }\n\n }\n\n\n\n #[must_use]\n\n pub fn secret_hash(&self) -> Field {\n\n poseidon_hash(&[self.nullifier, self.trapdoor])\n\n }\n\n\n\n #[must_use]\n\n pub fn commitment(&self) -> Field {\n\n poseidon_hash(&[self.secret_hash()])\n\n }\n\n}\n", "file_path": "src/identity.rs", "rank": 9, "score": 68428.09128624882 }, { "content": "/// Verifies a given semaphore proof\n\n///\n\n/// # Errors\n\n///\n\n/// Returns a [`ProofError`] if verifying fails. Verification failure does not\n\n/// necessarily mean the proof is incorrect.\n\npub fn verify_proof(\n\n root: Field,\n\n nullifier_hash: Field,\n\n signal_hash: Field,\n\n external_nullifier_hash: Field,\n\n proof: &Proof,\n\n) -> Result<bool, ProofError> {\n\n let zkey = zkey();\n\n let pvk = prepare_verifying_key(&zkey.0.vk);\n\n\n\n let public_inputs = [\n\n root.into(),\n\n nullifier_hash.into(),\n\n signal_hash.into(),\n\n external_nullifier_hash.into(),\n\n ];\n\n let ark_proof = (*proof).into();\n\n let result = ark_groth16::verify_proof(&pvk, &ark_proof, &public_inputs[..])?;\n\n Ok(result)\n\n}\n", "file_path": "src/protocol.rs", "rank": 10, "score": 65931.91867583939 }, { "content": "/// Generates a semaphore proof\n\n///\n\n/// # Errors\n\n///\n\n/// Returns a [`ProofError`] if proving fails.\n\npub fn generate_proof(\n\n identity: &Identity,\n\n merkle_proof: &merkle_tree::Proof<PoseidonHash>,\n\n external_nullifier_hash: Field,\n\n signal_hash: Field,\n\n) -> Result<Proof, ProofError> {\n\n generate_proof_rng(\n\n identity,\n\n merkle_proof,\n\n external_nullifier_hash,\n\n signal_hash,\n\n &mut thread_rng(),\n\n )\n\n}\n\n\n", "file_path": "src/protocol.rs", "rank": 11, "score": 65931.91867583939 }, { "content": "/// Generates a semaphore proof from entropy\n\n///\n\n/// # Errors\n\n///\n\n/// Returns a [`ProofError`] if proving fails.\n\npub fn generate_proof_rng(\n\n identity: &Identity,\n\n merkle_proof: &merkle_tree::Proof<PoseidonHash>,\n\n external_nullifier_hash: Field,\n\n signal_hash: Field,\n\n rng: &mut impl Rng,\n\n) -> Result<Proof, ProofError> {\n\n generate_proof_rs(\n\n identity,\n\n merkle_proof,\n\n external_nullifier_hash,\n\n signal_hash,\n\n ark_bn254::Fr::rand(rng),\n\n ark_bn254::Fr::rand(rng),\n\n )\n\n}\n\n\n", "file_path": "src/protocol.rs", "rank": 12, "score": 63806.40816736694 }, { "content": "#[cfg(feature = \"dylib\")]\n\npub fn initialize(dylib_path: &Path) {\n\n WITNESS_CALCULATOR\n\n .set(from_dylib(dylib_path))\n\n .expect(\"Failed to initialize witness calculator\");\n\n\n\n // Force init of ZKEY\n\n Lazy::force(&ZKEY);\n\n}\n\n\n", "file_path": "src/circuit.rs", "rank": 13, "score": 58684.21931106747 }, { "content": "#[must_use]\n\n#[allow(clippy::module_name_repetitions)]\n\npub fn hash_to_field(data: &[u8]) -> Field {\n\n let hash = keccak256(data);\n\n // Shift right one byte to make it fit in the field\n\n let mut bytes = [0_u8; 32];\n\n bytes[1..].copy_from_slice(&hash[..31]);\n\n Field(bytes)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use ark_ff::Field as _;\n\n\n\n #[test]\n\n fn test_modulus_identical() {\n\n assert_eq!(PosField::char().0, ArkField::characteristic());\n\n }\n\n\n\n #[test]\n\n fn test_field_serde() {\n", "file_path": "src/field.rs", "rank": 14, "score": 55662.415256121356 }, { "content": "#[must_use]\n\npub fn poseidon_hash(input: &[Field]) -> Field {\n\n let input = input.iter().copied().map(Into::into).collect::<Vec<_>>();\n\n\n\n POSEIDON\n\n .hash(input)\n\n .map(Into::into)\n\n .expect(\"hash with fixed input size can't fail\")\n\n}\n", "file_path": "src/poseidon_hash.rs", "rank": 15, "score": 54044.04788096291 }, { "content": "#[must_use]\n\npub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {\n\n poseidon_hash(&[external_nullifier, identity.nullifier])\n\n}\n\n\n\n#[derive(Error, Debug)]\n\npub enum ProofError {\n\n #[error(\"Error reading circuit key: {0}\")]\n\n CircuitKeyError(#[from] std::io::Error),\n\n #[error(\"Error producing witness: {0}\")]\n\n WitnessError(color_eyre::Report),\n\n #[error(\"Error producing proof: {0}\")]\n\n SynthesisError(#[from] SynthesisError),\n\n}\n\n\n", "file_path": "src/protocol.rs", "rank": 16, "score": 47851.84599125506 }, { "content": "/// Implements the private key derivation function from zk-kit.\n\n///\n\n/// See <https://github.com/appliedzkp/zk-kit/blob/1ea410456fc2b95877efa7c671bc390ffbfb5d36/packages/identity/src/identity.ts#L58>\n\nfn derive_field(seed_hex: &[u8; 64], suffix: &[u8]) -> Field {\n\n let mut hasher = Sha256::new();\n\n hasher.update(seed_hex);\n\n hasher.update(suffix);\n\n Field::from_be_bytes_mod_order(hasher.finalize().as_ref())\n\n}\n\n\n", "file_path": "src/identity.rs", "rank": 17, "score": 40588.273651792544 }, { "content": "/// Hash types, values and algorithms for a Merkle tree\n\npub trait Hasher {\n\n /// Type of the leaf and node hashes\n\n type Hash: Clone + Eq + Serialize;\n\n\n\n /// Compute the hash of an intermediate node\n\n fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash;\n\n}\n\n\n\n/// Merkle tree with all leaf and intermediate hashes stored\n\n#[derive(Clone, PartialEq, Eq, Debug)]\n\npub struct MerkleTree<H: Hasher> {\n\n /// Depth of the tree, # of layers including leaf layer\n\n depth: usize,\n\n\n\n /// Hash value of empty subtrees of given depth, starting at leaf level\n\n empty: Vec<H::Hash>,\n\n\n\n /// Hash values of tree nodes and leaves, breadth first order\n\n nodes: Vec<H::Hash>,\n\n}\n", "file_path": "src/merkle_tree.rs", "rank": 18, "score": 37619.390317198166 }, { "content": "fn main() {\n\n let mut criterion = criterion::Criterion::default().configure_from_args();\n\n lib::bench::group(&mut criterion);\n\n criterion.final_summary();\n\n}\n", "file_path": "criterion.rs", "rank": 19, "score": 32803.50162363658 }, { "content": "fn generate_proof_rs(\n\n identity: &Identity,\n\n merkle_proof: &merkle_tree::Proof<PoseidonHash>,\n\n external_nullifier_hash: Field,\n\n signal_hash: Field,\n\n r: ark_bn254::Fr,\n\n s: ark_bn254::Fr,\n\n) -> Result<Proof, ProofError> {\n\n let inputs = [\n\n (\"identityNullifier\", vec![identity.nullifier]),\n\n (\"identityTrapdoor\", vec![identity.trapdoor]),\n\n (\"treePathIndices\", merkle_proof.path_index()),\n\n (\"treeSiblings\", merkle_proof_to_vec(merkle_proof)),\n\n (\"externalNullifier\", vec![external_nullifier_hash]),\n\n (\"signalHash\", vec![signal_hash]),\n\n ];\n\n let inputs = inputs.into_iter().map(|(name, values)| {\n\n (\n\n name.to_string(),\n\n values.iter().copied().map(Into::into).collect::<Vec<_>>(),\n", "file_path": "src/protocol.rs", "rank": 20, "score": 28923.595281394548 }, { "content": "#[cfg(feature = \"dylib\")]\n\nfn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {\n\n let store = Store::new(&Dylib::headless().engine());\n\n // The module must be exported using [`Module::serialize`].\n\n let module = unsafe {\n\n Module::deserialize_from_file(&store, path).expect(\"Failed to load wasm dylib module\")\n\n };\n\n let result =\n\n WitnessCalculator::from_module(module).expect(\"Failed to create witness calculator\");\n\n Mutex::new(result)\n\n}\n\n\n", "file_path": "src/circuit.rs", "rank": 21, "score": 24132.292383202912 }, { "content": "/// Helper to merkle proof into a bigint vector\n\n/// TODO: we should create a From trait for this\n\nfn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {\n\n proof\n\n .0\n\n .iter()\n\n .map(|x| match x {\n\n Branch::Left(value) | Branch::Right(value) => *value,\n\n })\n\n .collect()\n\n}\n\n\n\n/// Generates the nullifier hash\n", "file_path": "src/protocol.rs", "rank": 22, "score": 20105.32447307234 }, { "content": " fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash {\n\n let left = U256::from_bytes_be(left.as_bytes_be());\n\n let right = U256::from_bytes_be(right.as_bytes_be());\n\n Hash::from_bytes_be(hash(&[left, right]).to_bytes_be())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::*;\n\n use hex_literal::hex;\n\n\n\n #[test]\n\n fn test_tree_4() {\n\n const LEAF: Hash = Hash::from_bytes_be(hex!(\n\n \"1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe\"\n\n ));\n\n let tree = MimcTree::new(3, LEAF);\n\n assert_eq!(tree.num_leaves(), 4);\n\n assert_eq!(\n", "file_path": "src/mimc_tree.rs", "rank": 25, "score": 21.658860607178532 }, { "content": " E: DeError,\n\n {\n\n if value.len() != N {\n\n return Err(E::invalid_length(value.len(), &self));\n\n }\n\n let mut result = [0_u8; N];\n\n result.copy_from_slice(value);\n\n Ok(result)\n\n }\n\n }\n\n deserializer.deserialize_bytes(ByteVisitor)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_serialize_bytes_hex() {\n", "file_path": "src/util.rs", "rank": 26, "score": 19.93558828044643 }, { "content": " poseidon_hash(&[*left, *right])\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n\n\n // TODO: proptest\n\n // #[test]\n\n // fn test_ark_hash_ark_roundtrip() {\n\n // let mut rng = ChaChaRng::seed_from_u64(123);\n\n // for _ in 0..1000 {\n\n // let n = Field::rand(&mut rng);\n\n // let m = Hash::from(n).into();\n\n // assert_eq!(n, m);\n\n // }\n\n // }\n\n\n\n // TODO: Const constructor\n\n // #[test]\n", "file_path": "src/poseidon_tree.rs", "rank": 29, "score": 18.142436003008413 }, { "content": "/// Hex strings can be upper/lower/mixed case and have an optional `0x` prefix\n\n/// but they must always be exactly 32 bytes.\n\nimpl<'de> Deserialize<'de> for Hash {\n\n fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {\n\n let bytes = deserialize_bytes::<32, _>(deserializer)?;\n\n Ok(Self(bytes))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::*;\n\n use hex_literal::hex;\n\n use serde_json::{from_str, to_string};\n\n\n\n #[test]\n\n fn test_serialize() {\n\n let hash = Hash([0; 32]);\n\n assert_eq!(\n\n to_string(&hash).unwrap(),\n", "file_path": "src/hash.rs", "rank": 30, "score": 17.44044936006099 }, { "content": "use core::{\n\n fmt::{Formatter, Result as FmtResult},\n\n str,\n\n};\n\nuse serde::{\n\n de::{Error as DeError, Visitor},\n\n Deserializer, Serializer,\n\n};\n\nuse tiny_keccak::{Hasher as _, Keccak};\n\n\n\npub(crate) fn keccak256(bytes: &[u8]) -> [u8; 32] {\n\n let mut output = [0; 32];\n\n let mut hasher = Keccak::v256();\n\n hasher.update(bytes);\n\n hasher.finalize(&mut output);\n\n output\n\n}\n\n\n\npub(crate) fn bytes_to_hex<const N: usize, const M: usize>(bytes: &[u8; N]) -> [u8; M] {\n\n // TODO: Replace `M` with a const expression once it's stable.\n", "file_path": "src/util.rs", "rank": 31, "score": 16.27252360155884 }, { "content": "\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: DeError,\n\n {\n\n bytes_from_hex(value).map_err(|e| E::custom(format!(\"Error in hex: {}\", e)))\n\n }\n\n }\n\n deserializer.deserialize_str(StrVisitor)\n\n } else {\n\n struct ByteVisitor<const N: usize>;\n\n impl<'de, const N: usize> Visitor<'de> for ByteVisitor<N> {\n\n type Value = [u8; N];\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> FmtResult {\n\n write!(formatter, \"{N} bytes of binary data\")\n\n }\n\n\n\n fn visit_bytes<E>(self, value: &[u8]) -> Result<Self::Value, E>\n\n where\n", "file_path": "src/util.rs", "rank": 32, "score": 15.620362352043282 }, { "content": "use crate::util::{bytes_from_hex, bytes_to_hex, deserialize_bytes, serialize_bytes};\n\nuse core::{\n\n fmt::{Debug, Display},\n\n str,\n\n str::FromStr,\n\n};\n\nuse ethers_core::types::U256;\n\nuse num_bigint::{BigInt, Sign};\n\nuse serde::{Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n/// Container for 256-bit hash values.\n\n#[derive(Clone, Copy, PartialEq, Eq, Default)]\n\npub struct Hash(pub [u8; 32]);\n\n\n\nimpl Hash {\n\n #[must_use]\n\n pub const fn from_bytes_be(bytes: [u8; 32]) -> Self {\n\n Self(bytes)\n\n }\n\n\n", "file_path": "src/hash.rs", "rank": 33, "score": 14.801697168784457 }, { "content": "where\n\n H: Hasher,\n\n H::Hash: Debug,\n\n{\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_tuple(\"Proof\").field(&self.0).finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::*;\n\n use hex_literal::hex;\n\n use tiny_keccak::{Hasher as _, Keccak};\n\n\n\n struct Keccak256;\n\n\n\n impl Hasher for Keccak256 {\n\n type Hash = [u8; 32];\n\n\n", "file_path": "src/merkle_tree.rs", "rank": 34, "score": 14.6811920577781 }, { "content": " } else {\n\n // Write as bytes directly\n\n serializer.serialize_bytes(&bytes[..])\n\n }\n\n}\n\n\n\n/// Helper to deserialize byte arrays from hex strings\n\n///\n\n/// TODO: How does it handle strings that are to short?\n\npub(crate) fn bytes_from_hex<const N: usize>(s: &str) -> Result<[u8; N], hex::FromHexError> {\n\n let str = trim_hex_prefix(s);\n\n let mut result = [0_u8; N];\n\n hex::decode_to_slice(str, &mut result)?;\n\n Ok(result)\n\n}\n\n\n\n/// Helper function to remove optionally `0x` prefix from hex strings.\n", "file_path": "src/util.rs", "rank": 35, "score": 14.500003892711309 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{hash_to_field, poseidon_tree::PoseidonTree};\n\n use rand::SeedableRng as _;\n\n use rand_chacha::ChaChaRng;\n\n use serde_json::json;\n\n\n\n fn arb_proof(seed: u64) -> Proof {\n\n // Deterministic randomness for testing\n\n let mut rng = ChaChaRng::seed_from_u64(seed);\n\n\n\n // generate identity\n\n let seed: [u8; 16] = rng.gen();\n\n let id = Identity::from_seed(&seed);\n\n\n\n // generate merkle tree\n\n let leaf = Field::from(0);\n\n let mut tree = PoseidonTree::new(21, leaf);\n", "file_path": "src/protocol.rs", "rank": 36, "score": 13.959257108530563 }, { "content": "impl Field {\n\n /// Construct a field element from a big-endian byte vector.\n\n #[must_use]\n\n pub fn from_be_bytes_mod_order(bytes: &[u8]) -> Self {\n\n ArkField::from_be_bytes_mod_order(bytes).into()\n\n }\n\n\n\n /// Convert to big-endian 32-byte array.\n\n #[must_use]\n\n pub const fn to_be_bytes(&self) -> [u8; 32] {\n\n self.0\n\n }\n\n}\n\n\n\nimpl From<u64> for Field {\n\n fn from(value: u64) -> Self {\n\n ArkField::from(value).into()\n\n }\n\n}\n\n\n", "file_path": "src/field.rs", "rank": 37, "score": 13.591506562784652 }, { "content": " #[must_use]\n\n pub const fn as_bytes_be(&self) -> &[u8; 32] {\n\n &self.0\n\n }\n\n}\n\n\n\n/// Conversion from Ether U256\n\nimpl From<&Hash> for U256 {\n\n fn from(hash: &Hash) -> Self {\n\n Self::from_big_endian(hash.as_bytes_be())\n\n }\n\n}\n\n\n\n/// Conversion to Ether U256\n\nimpl From<U256> for Hash {\n\n fn from(u256: U256) -> Self {\n\n let mut bytes = [0_u8; 32];\n\n u256.to_big_endian(&mut bytes);\n\n Self::from_bytes_be(bytes)\n\n }\n", "file_path": "src/hash.rs", "rank": 38, "score": 13.344411382663077 }, { "content": " debug_assert_eq!(M, 2 * N + 2);\n\n let mut result = [0u8; M];\n\n result[0] = b'0';\n\n result[1] = b'x';\n\n hex::encode_to_slice(&bytes[..], &mut result[2..]).expect(\"the buffer is correctly sized\");\n\n result\n\n}\n\n\n\n/// Helper to serialize byte arrays\n\npub(crate) fn serialize_bytes<const N: usize, const M: usize, S: Serializer>(\n\n serializer: S,\n\n bytes: &[u8; N],\n\n) -> Result<S::Ok, S::Error> {\n\n // TODO: Replace `M` with a const expression once it's stable.\n\n debug_assert_eq!(M, 2 * N + 2);\n\n if serializer.is_human_readable() {\n\n // Write as a 0x prefixed lower-case hex string\n\n let buffer = bytes_to_hex::<N, M>(bytes);\n\n let string = str::from_utf8(&buffer).expect(\"the buffer is valid UTF-8\");\n\n serializer.serialize_str(string)\n", "file_path": "src/util.rs", "rank": 40, "score": 13.17668651997064 }, { "content": " let value = Field::from(0x1234_5678);\n\n let serialized = serde_json::to_value(value).unwrap();\n\n let deserialized = serde_json::from_value(serialized).unwrap();\n\n assert_eq!(value, deserialized);\n\n }\n\n\n\n // #[test]\n\n // fn test_ark_pos_ark_roundtrip() {\n\n // let mut rng = ChaChaRng::seed_from_u64(123);\n\n // for _ in 0..1000 {\n\n // let n = Field::rand(&mut rng);\n\n // let m = poseidon_to_ark(ark_to_poseidon(n));\n\n // assert_eq!(n, m);\n\n // }\n\n // }\n\n}\n", "file_path": "src/field.rs", "rank": 41, "score": 12.945662713599143 }, { "content": " }\n\n }\n\n\n\n #[must_use]\n\n pub fn num_leaves(&self) -> usize {\n\n self.depth\n\n .checked_sub(1)\n\n .map(|n| 1 << n)\n\n .unwrap_or_default()\n\n }\n\n\n\n #[must_use]\n\n pub fn root(&self) -> H::Hash {\n\n self.nodes[0].clone()\n\n }\n\n\n\n pub fn set(&mut self, leaf: usize, hash: H::Hash) {\n\n self.set_range(leaf, once(hash));\n\n }\n\n\n", "file_path": "src/merkle_tree.rs", "rank": 42, "score": 12.526505631390753 }, { "content": "/// For binary formats a byte array is used.\n\nimpl Serialize for Field {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n serialize_bytes::<32, 66, S>(serializer, &self.0)\n\n }\n\n}\n\n\n\n/// Parse Hash from hex string.\n\n///\n\n/// Hex strings can be upper/lower/mixed case and have an optional `0x` prefix\n\n/// but they must always be exactly 32 bytes.\n\n///\n\n/// Too large values are reduced modulo the field prime.\n\nimpl FromStr for Field {\n\n type Err = hex::FromHexError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let bytes = bytes_from_hex::<32>(s)?;\n\n Ok(Self::from_be_bytes_mod_order(&bytes[..]))\n\n }\n", "file_path": "src/field.rs", "rank": 43, "score": 12.297475729001546 }, { "content": "\n\n/// Element of a Merkle proof\n\n#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Branch<H: Hasher> {\n\n /// Left branch taken, value is the right sibling hash.\n\n Left(H::Hash),\n\n\n\n /// Right branch taken, value is the left sibling hash.\n\n Right(H::Hash),\n\n}\n\n\n\n/// Merkle proof path, bottom to top.\n\n#[derive(Clone, PartialEq, Eq, Serialize)]\n\npub struct Proof<H: Hasher>(pub Vec<Branch<H>>);\n\n\n\n/// For a given node index, return the parent node index\n\n/// Returns None if there is no parent (root node)\n\nconst fn parent(index: usize) -> Option<usize> {\n\n if index == 0 {\n\n None\n", "file_path": "src/merkle_tree.rs", "rank": 44, "score": 11.872870075170265 }, { "content": " // loom. See <https://github.com/tokio-rs/loom>\n\n let a = spawn(|| test_end_to_end(b\"hello\", b\"appId\", b\"xxx\"));\n\n let b = spawn(|| test_end_to_end(b\"secret\", b\"test\", b\"signal\"));\n\n a.join().unwrap();\n\n b.join().unwrap();\n\n }\n\n}\n\n\n\n#[cfg(feature = \"bench\")]\n\npub mod bench {\n\n use crate::{\n\n hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, protocol::generate_proof,\n\n Field,\n\n };\n\n use criterion::Criterion;\n\n\n\n pub fn group(criterion: &mut Criterion) {\n\n #[cfg(feature = \"mimc\")]\n\n crate::mimc_hash::bench::group(criterion);\n\n #[cfg(feature = \"mimc\")]\n", "file_path": "src/lib.rs", "rank": 46, "score": 11.666494412140239 }, { "content": " #[allow(clippy::wildcard_imports)]\n\n use super::*;\n\n use criterion::{black_box, Criterion};\n\n use hex_literal::hex;\n\n\n\n // TODO: Randomize trees and indices\n\n // TODO: Bench over a range of depths\n\n\n\n const DEPTH: usize = 20;\n\n const LEAF: Hash = Hash::from_bytes_be(hex!(\n\n \"352aa0818e138060d93b80393828ef8cdc104f331799b3ea647907481e51cce9\"\n\n ));\n\n\n\n pub fn group(criterion: &mut Criterion) {\n\n bench_set(criterion);\n\n bench_proof(criterion);\n\n bench_verify(criterion);\n\n }\n\n\n\n fn bench_set(criterion: &mut Criterion) {\n", "file_path": "src/mimc_tree.rs", "rank": 47, "score": 11.653514608506388 }, { "content": " poseidon_tree::PoseidonTree,\n\n protocol::{generate_nullifier_hash, generate_proof, verify_proof},\n\n Field,\n\n };\n\n use std::thread::spawn;\n\n\n\n #[test]\n\n fn test_field_serde() {\n\n let value = Field::from(0x1234_5678);\n\n let serialized = serde_json::to_value(value).unwrap();\n\n let deserialized = serde_json::from_value(serialized).unwrap();\n\n assert_eq!(value, deserialized);\n\n }\n\n\n\n fn test_end_to_end(identity: &[u8], external_nullifier: &[u8], signal: &[u8]) {\n\n // const LEAF: Hash = Hash::from_bytes_be(hex!(\n\n // \"0000000000000000000000000000000000000000000000000000000000000000\"\n\n // ));\n\n let leaf = Field::from(0);\n\n\n", "file_path": "src/lib.rs", "rank": 48, "score": 11.289904779802745 }, { "content": " let bytes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];\n\n let mut ser = serde_json::Serializer::new(Vec::new());\n\n serialize_bytes::<16, 34, _>(&mut ser, &bytes).unwrap();\n\n let json = ser.into_inner();\n\n assert_eq!(json, b\"\\\"0x0102030405060708090a0b0c0d0e0f10\\\"\");\n\n }\n\n\n\n #[test]\n\n fn test_serialize_bytes_bin() {\n\n let bytes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];\n\n let mut bin: Vec<u8> = Vec::new();\n\n {\n\n let mut ser = bincode::Serializer::new(&mut bin, bincode::options());\n\n serialize_bytes::<16, 34, _>(&mut ser, &bytes).unwrap();\n\n }\n\n // Bincode appears to prefix with a length.\n\n assert_eq!(bin, [\n\n 16, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16\n\n ]);\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 49, "score": 11.269193259183425 }, { "content": "impl From<ArkField> for Field {\n\n fn from(value: ArkField) -> Self {\n\n let mut bytes = [0_u8; 32];\n\n let byte_vec = value.into_repr().to_bytes_be();\n\n bytes.copy_from_slice(&byte_vec[..]);\n\n Self(bytes)\n\n }\n\n}\n\n\n\nimpl From<Field> for ArkField {\n\n fn from(value: Field) -> Self {\n\n Self::from_be_bytes_mod_order(&value.0[..])\n\n }\n\n}\n\n\n\nimpl From<PosField> for Field {\n\n fn from(value: PosField) -> Self {\n\n let mut bytes = [0u8; 32];\n\n value\n\n .into_repr()\n", "file_path": "src/field.rs", "rank": 51, "score": 11.172267444606932 }, { "content": "\n\nimpl Debug for Field {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let hex = bytes_to_hex::<32, 66>(&self.0);\n\n let hex_str = str::from_utf8(&hex).expect(\"hex is always valid utf8\");\n\n write!(f, \"Field({})\", hex_str)\n\n }\n\n}\n\n\n\nimpl Display for Field {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let hex = bytes_to_hex::<32, 66>(&self.0);\n\n let hex_str = str::from_utf8(&hex).expect(\"hex is always valid utf8\");\n\n write!(f, \"{}\", hex_str)\n\n }\n\n}\n\n\n\n/// Serialize a field element.\n\n///\n\n/// For human readable formats a `0x` prefixed lower case hex string is used.\n", "file_path": "src/field.rs", "rank": 52, "score": 10.907702168347104 }, { "content": " Self::from_bytes_be(Sign::Plus, hash.as_bytes_be())\n\n }\n\n}\n\n\n\nimpl Debug for Hash {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let hex = bytes_to_hex::<32, 66>(&self.0);\n\n let hex_str = str::from_utf8(&hex).expect(\"hex is always valid utf8\");\n\n write!(f, \"Field({})\", hex_str)\n\n }\n\n}\n\n\n\nimpl Display for Hash {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let hex = bytes_to_hex::<32, 66>(&self.0);\n\n let hex_str = str::from_utf8(&hex).expect(\"hex is always valid utf8\");\n\n write!(f, \"{}\", hex_str)\n\n }\n\n}\n\n\n", "file_path": "src/hash.rs", "rank": 53, "score": 10.608909423616941 }, { "content": "use crate::util::{bytes_from_hex, bytes_to_hex, deserialize_bytes, keccak256, serialize_bytes};\n\nuse ark_bn254::Fr as ArkField;\n\nuse ark_ff::{BigInteger as _, PrimeField as _};\n\nuse core::{\n\n fmt::{Debug, Display},\n\n str,\n\n str::FromStr,\n\n};\n\nuse ff::{PrimeField as _, PrimeFieldRepr as _};\n\nuse num_bigint::{BigInt, Sign};\n\nuse poseidon_rs::Fr as PosField;\n\nuse serde::{Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n/// An element of the BN254 scalar field Fr.\n\n///\n\n/// Represented as a big-endian byte vector without Montgomery reduction.\n\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\n// TODO: Make sure value is always reduced.\n\npub struct Field([u8; 32]);\n\n\n", "file_path": "src/field.rs", "rank": 54, "score": 10.525962554662541 }, { "content": " self.update_nodes(start, end);\n\n }\n\n }\n\n\n\n #[must_use]\n\n pub fn proof(&self, leaf: usize) -> Option<Proof<H>> {\n\n if leaf >= self.num_leaves() {\n\n return None;\n\n }\n\n let mut index = self.num_leaves() + leaf - 1;\n\n let mut path = Vec::with_capacity(self.depth);\n\n while let Some(parent) = parent(index) {\n\n // Add proof for node at index to parent\n\n path.push(match index & 1 {\n\n 1 => Branch::Left(self.nodes[index + 1].clone()),\n\n 0 => Branch::Right(self.nodes[index - 1].clone()),\n\n _ => unreachable!(),\n\n });\n\n index = parent;\n\n }\n", "file_path": "src/merkle_tree.rs", "rank": 55, "score": 10.509755675334503 }, { "content": "use ark_bn254::Parameters;\n\nuse ark_ec::bn::Bn;\n\n\n\n// Export types\n\npub use crate::{\n\n field::{hash_to_field, Field},\n\n poseidon_hash::poseidon_hash,\n\n};\n\n\n\n#[cfg(feature = \"dylib\")]\n\npub use circuit::initialize;\n\n\n\npub type Groth16Proof = ark_groth16::Proof<Bn<Parameters>>;\n\npub type EthereumGroth16Proof = ark_circom::ethereum::Proof;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::{\n\n hash_to_field,\n\n identity::Identity,\n", "file_path": "src/lib.rs", "rank": 56, "score": 9.987810675876608 }, { "content": " fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash {\n\n let mut output = [0; 32];\n\n let mut hasher = Keccak::v256();\n\n hasher.update(left);\n\n hasher.update(right);\n\n hasher.finalize(&mut output);\n\n output\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_index_calculus() {\n\n assert_eq!(parent(0), None);\n\n assert_eq!(parent(1), Some(0));\n\n assert_eq!(parent(2), Some(0));\n\n assert_eq!(parent(3), Some(1));\n\n assert_eq!(parent(4), Some(1));\n\n assert_eq!(parent(5), Some(2));\n\n assert_eq!(parent(6), Some(2));\n\n assert_eq!(first_child(0), 1);\n", "file_path": "src/merkle_tree.rs", "rank": 57, "score": 9.590215883437988 }, { "content": " #[test]\n\n fn test_proof_cast_roundtrip() {\n\n let proof = arb_proof(123);\n\n let ark_proof: ArkProof<Bn<Parameters>> = proof.into();\n\n let result: Proof = ark_proof.into();\n\n assert_eq!(proof, result);\n\n }\n\n\n\n #[test]\n\n fn test_proof_serialize() {\n\n let proof = arb_proof(456);\n\n let json = serde_json::to_value(&proof).unwrap();\n\n assert_eq!(\n\n json,\n\n json!([\n\n [\n\n \"0x249ae469686987ee9368da60dd177a8c42891c02f5760e955e590c79d55cfab2\",\n\n \"0xf22e25870f49388459d388afb24dcf6ec11bb2d4def1e2ec26d6e42f373aad8\"\n\n ],\n\n [\n", "file_path": "src/protocol.rs", "rank": 58, "score": 9.501300507549452 }, { "content": " tree.root(),\n\n Hash::from_bytes_be(hex!(\n\n \"250de92bd4bcf4fb684fdf64923cb3b20ef4118b41c6ffb8c36b606468d6be57\"\n\n ))\n\n );\n\n let proof = tree.proof(3).expect(\"proof should exist\");\n\n assert_eq!(\n\n proof,\n\n crate::merkle_tree::Proof(vec![\n\n Branch::Right(LEAF),\n\n Branch::Right(Hash::from_bytes_be(hex!(\n\n \"19f1cba77f27301df4ce3391f9b0d766cfd304d0f069cec6c0e55dfda6aba924\"\n\n ))),\n\n ])\n\n );\n\n }\n\n}\n\n\n\n#[cfg(feature = \"bench\")]\n\npub mod bench {\n", "file_path": "src/mimc_tree.rs", "rank": 59, "score": 9.349721572597987 }, { "content": "use ark_bn254::{Bn254, Fr};\n\nuse ark_circom::{read_zkey, WitnessCalculator};\n\nuse ark_groth16::ProvingKey;\n\nuse ark_relations::r1cs::ConstraintMatrices;\n\nuse core::include_bytes;\n\nuse once_cell::sync::{Lazy, OnceCell};\n\nuse std::{io::Cursor, sync::Mutex};\n\nuse wasmer::{Module, Store};\n\n\n\n#[cfg(feature = \"dylib\")]\n\nuse std::{env, path::Path};\n\n#[cfg(feature = \"dylib\")]\n\nuse wasmer::Dylib;\n\n\n\nconst ZKEY_BYTES: &[u8] = include_bytes!(env!(\"BUILD_RS_ZKEY_FILE\"));\n\n\n\n#[cfg(not(feature = \"dylib\"))]\n\nconst WASM: &[u8] = include_bytes!(env!(\"BUILD_RS_WASM_FILE\"));\n\n\n\nstatic ZKEY: Lazy<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> = Lazy::new(|| {\n\n let mut reader = Cursor::new(ZKEY_BYTES);\n\n read_zkey(&mut reader).expect(\"zkey should be valid\")\n\n});\n\n\n\nstatic WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();\n\n\n\n/// Initialize the library.\n\n#[cfg(feature = \"dylib\")]\n", "file_path": "src/circuit.rs", "rank": 60, "score": 8.859507755230318 }, { "content": " Branch::Right(_) => (index << 1) + 1,\n\n })\n\n }\n\n\n\n /// Compute path index (TODO: do we want to keep this here?)\n\n #[must_use]\n\n pub fn path_index(&self) -> Vec<Field> {\n\n self.0\n\n .iter()\n\n .map(|branch| match branch {\n\n Branch::Left(_) => Field::from(0),\n\n Branch::Right(_) => Field::from(1),\n\n })\n\n .collect()\n\n }\n\n\n\n /// Compute the Merkle root given a leaf hash\n\n #[must_use]\n\n pub fn root(&self, hash: H::Hash) -> H::Hash {\n\n self.0.iter().fold(hash, |hash, branch| match branch {\n", "file_path": "src/merkle_tree.rs", "rank": 61, "score": 8.785483036714105 }, { "content": " Some(Proof(path))\n\n }\n\n\n\n #[must_use]\n\n pub fn verify(&self, hash: H::Hash, proof: &Proof<H>) -> bool {\n\n proof.root(hash) == self.root()\n\n }\n\n\n\n #[must_use]\n\n pub fn leaves(&self) -> &[H::Hash] {\n\n &self.nodes[(self.num_leaves() - 1)..]\n\n }\n\n}\n\n\n\nimpl<H: Hasher> Proof<H> {\n\n /// Compute the leaf index for this proof\n\n #[must_use]\n\n pub fn leaf_index(&self) -> usize {\n\n self.0.iter().rev().fold(0, |index, branch| match branch {\n\n Branch::Left(_) => index << 1,\n", "file_path": "src/merkle_tree.rs", "rank": 62, "score": 8.771233588995717 }, { "content": " Branch::Left(sibling) => H::hash_node(&hash, sibling),\n\n Branch::Right(sibling) => H::hash_node(sibling, &hash),\n\n })\n\n }\n\n}\n\n\n\nimpl<H> Debug for Branch<H>\n\nwhere\n\n H: Hasher,\n\n H::Hash: Debug,\n\n{\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::Left(arg0) => f.debug_tuple(\"Left\").field(arg0).finish(),\n\n Self::Right(arg0) => f.debug_tuple(\"Right\").field(arg0).finish(),\n\n }\n\n }\n\n}\n\n\n\nimpl<H> Debug for Proof<H>\n", "file_path": "src/merkle_tree.rs", "rank": 63, "score": 8.582624475542449 }, { "content": "#![doc = include_str!(\"../README.md\")]\n\n#![warn(clippy::all, clippy::pedantic, clippy::cargo, clippy::nursery)]\n\n// TODO: ark-circom and ethers-core pull in a lot of dependencies, some duplicate.\n\n#![allow(clippy::multiple_crate_versions)]\n\n\n\nmod circuit;\n\nmod field;\n\npub mod hash;\n\npub mod identity;\n\npub mod merkle_tree;\n\nmod poseidon_hash;\n\npub mod poseidon_tree;\n\npub mod protocol;\n\npub mod util;\n\n\n\n#[cfg(feature = \"mimc\")]\n\npub mod mimc_hash;\n\n#[cfg(feature = \"mimc\")]\n\npub mod mimc_tree;\n\n\n", "file_path": "src/lib.rs", "rank": 64, "score": 8.569161217681057 }, { "content": " \"\\\"0x0000000000000000000000000000000000000000000000000000000000000000\\\"\"\n\n );\n\n let hash = Hash(hex!(\n\n \"1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe\"\n\n ));\n\n assert_eq!(\n\n to_string(&hash).unwrap(),\n\n \"\\\"0x1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe\\\"\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_deserialize() {\n\n assert_eq!(\n\n from_str::<Hash>(\n\n \"\\\"0x1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe\\\"\"\n\n )\n\n .unwrap(),\n\n Hash(hex!(\n\n \"1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe\"\n", "file_path": "src/hash.rs", "rank": 65, "score": 8.328018247765854 }, { "content": "}\n\n\n\n/// Deserialize human readable hex strings or byte arrays into hashes.\n\n/// Hex strings can be upper/lower/mixed case and have an optional `0x` prefix\n\n/// but they must always be exactly 32 bytes.\n\nimpl<'de> Deserialize<'de> for Field {\n\n fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {\n\n let bytes = deserialize_bytes::<32, _>(deserializer)?;\n\n Ok(Self::from_be_bytes_mod_order(&bytes))\n\n }\n\n}\n\n\n\n/// Hash arbitrary data to a field element.\n\n///\n\n/// This is used to create `signal_hash` and `external_nullifier_hash`.\n\n#[must_use]\n\n#[allow(clippy::module_name_repetitions)]\n", "file_path": "src/field.rs", "rank": 66, "score": 8.224185433830712 }, { "content": " .write_be(&mut bytes[..])\n\n .expect(\"write to correctly sized slice always succeeds\");\n\n Self(bytes)\n\n }\n\n}\n\n\n\nimpl From<Field> for PosField {\n\n fn from(value: Field) -> Self {\n\n let mut repr = <Self as ff::PrimeField>::Repr::default();\n\n repr.read_be(&value.0[..])\n\n .expect(\"read from correctly sized slice always succeeds\");\n\n Self::from_repr(repr).expect(\"value is always in range\")\n\n }\n\n}\n\n\n\nimpl From<Field> for BigInt {\n\n fn from(value: Field) -> Self {\n\n Self::from_bytes_be(Sign::Plus, &value.0[..])\n\n }\n\n}\n", "file_path": "src/field.rs", "rank": 67, "score": 8.173255605814962 }, { "content": "/// Parse Hash from hex string.\n\n/// Hex strings can be upper/lower/mixed case and have an optional `0x` prefix\n\n/// but they must always be exactly 32 bytes.\n\nimpl FromStr for Hash {\n\n type Err = hex::FromHexError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n bytes_from_hex::<32>(s).map(Self)\n\n }\n\n}\n\n\n\n/// Serialize hashes into human readable hex strings or byte arrays.\n\n/// Hex strings are lower case without prefix and always 32 bytes.\n\nimpl Serialize for Hash {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n serialize_bytes::<32, 66, S>(serializer, &self.0)\n\n }\n\n}\n\n\n\n/// Deserialize human readable hex strings or byte arrays into hashes.\n", "file_path": "src/hash.rs", "rank": 68, "score": 7.657153793643776 }, { "content": "use crate::{\n\n hash::Hash,\n\n merkle_tree::{self, Hasher, MerkleTree},\n\n mimc_hash::hash,\n\n};\n\nuse serde::Serialize;\n\nuse zkp_u256::U256;\n\n\n\npub type MimcTree = MerkleTree<MimcHash>;\n\n#[allow(dead_code)]\n\npub type Branch = merkle_tree::Branch<MimcHash>;\n\n#[allow(dead_code)]\n\npub type Proof = merkle_tree::Proof<MimcHash>;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Serialize)]\n\npub struct MimcHash;\n\n\n\nimpl Hasher for MimcHash {\n\n type Hash = Hash;\n\n\n", "file_path": "src/mimc_tree.rs", "rank": 69, "score": 7.628041856482137 }, { "content": "use std::time::Instant;\n\nuse thiserror::Error;\n\n\n\n// Matches the private G1Tup type in ark-circom.\n\npub type G1 = (U256, U256);\n\n\n\n// Matches the private G2Tup type in ark-circom.\n\npub type G2 = ([U256; 2], [U256; 2]);\n\n\n\n/// Wrap a proof object so we have serde support\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct Proof(G1, G2, G1);\n\n\n\nimpl From<ArkProof<Bn<Parameters>>> for Proof {\n\n fn from(proof: ArkProof<Bn<Parameters>>) -> Self {\n\n let proof = ark_circom::ethereum::Proof::from(proof);\n\n let (a, b, c) = proof.as_tuple();\n\n Self(a, b, c)\n\n }\n\n}\n", "file_path": "src/protocol.rs", "rank": 70, "score": 7.5997267742578485 }, { "content": "use crate::{\n\n merkle_tree::{self, Hasher, MerkleTree},\n\n poseidon_hash, Field,\n\n};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[allow(dead_code)]\n\npub type PoseidonTree = MerkleTree<PoseidonHash>;\n\n#[allow(dead_code)]\n\npub type Branch = merkle_tree::Branch<PoseidonHash>;\n\n#[allow(dead_code)]\n\npub type Proof = merkle_tree::Proof<PoseidonHash>;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct PoseidonHash;\n\n\n\nimpl Hasher for PoseidonHash {\n\n type Hash = Field;\n\n\n\n fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash {\n", "file_path": "src/poseidon_tree.rs", "rank": 71, "score": 7.539527657326433 }, { "content": "# 🦀 semaphore-rs\n\n\n\nRust support library for using [semaphore](https://github.com/appliedzkp/semaphore). It's mostly a Rust rewrite of [zk-kit](https://github.com/appliedzkp/zk-kit), but just focuses on semaphore (for now) and still covers a much smaller scope. It's using [ark-circom](https://github.com/gakonst/ark-circom) under the hood for generating the groth16 proofs.\n\n\n\n## Usage\n\n\n\nAdd this line to your `cargo.toml`:\n\n\n\n```toml\n\nsemaphore = { git = \"https://github.com/worldcoin/semaphore-rs\" }\n\n```\n\n\n\n## Building semaphore circuits\n\n\n\n1. Check out submodule (if not done before already): `git submodule update --init --recursive`\n\n1. Install semaphore dependencies `cd semaphore && npm install`\n\n1. Compile circuits `npm exec ts-node ./scripts/compile-circuits.ts`\n\n1. You'll find the `zkey` and `wasm` file in `semaphore/build/snark`\n\n\n\n## Example\n\n\n\nExample as in `src/lib.rs`, run with `cargo test`.\n\n\n\n```rust\n\nuse semaphore::{hash_to_field, Field, identity::Identity, poseidon_tree::PoseidonTree,\n\n protocol::* };\n\nuse num_bigint::BigInt;\n\n\n\n// generate identity\n\nlet id = Identity::from_seed(b\"secret\");\n\n\n\n// generate merkle tree\n\nlet leaf = Field::from(0);\n\nlet mut tree = PoseidonTree::new(21, leaf);\n\ntree.set(0, id.commitment());\n\n\n\nlet merkle_proof = tree.proof(0).expect(\"proof should exist\");\n\nlet root = tree.root();\n\n\n\n// change signal and external_nullifier here\n\nlet signal_hash = hash_to_field(b\"xxx\");\n\nlet external_nullifier_hash = hash_to_field(b\"appId\");\n\n\n\nlet nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);\n\n\n\nlet proof = generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();\n\nlet success = verify_proof(root, nullifier_hash, signal_hash, external_nullifier_hash, &proof).unwrap();\n\n\n\nassert!(success);\n\n```\n", "file_path": "README.md", "rank": 72, "score": 7.4986019547108445 }, { "content": " root,\n\n nullifier_hash,\n\n signal_hash,\n\n external_nullifier_hash,\n\n &proof,\n\n )\n\n .unwrap();\n\n assert!(success);\n\n }\n\n }\n\n #[test]\n\n fn test_single() {\n\n // Note that rust will still run tests in parallel\n\n test_end_to_end(b\"hello\", b\"appId\", b\"xxx\");\n\n }\n\n\n\n #[test]\n\n fn test_parallel() {\n\n // Note that this does not guarantee a concurrency issue will be detected.\n\n // For that we need much more sophisticated static analysis tooling like\n", "file_path": "src/lib.rs", "rank": 73, "score": 7.051914573186032 }, { "content": " // fn test_tree_4() {\n\n // const LEAF: Hash = Hash::from_bytes_be(hex!(\n\n // \"0000000000000000000000000000000000000000000000000000000000000000\"\n\n // ));\n\n\n\n // let tree = PoseidonTree::new(3, LEAF);\n\n // assert_eq!(tree.num_leaves(), 4);\n\n // assert_eq!(\n\n // tree.root(),\n\n // Hash::from_bytes_be(hex!(\n\n //\n\n // \"1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1\"\n\n // ))\n\n // );\n\n // let proof = tree.proof(3).expect(\"proof should exist\");\n\n // assert_eq!(\n\n // proof,\n\n // crate::merkle_tree::Proof(vec![\n\n // Branch::Right(LEAF),\n\n // Branch::Right(Hash::from_bytes_be(hex!(\n\n //\n\n // \"2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864\"\n\n // ))),\n\n // ])\n\n // );\n\n // }\n\n}\n", "file_path": "src/poseidon_tree.rs", "rank": 74, "score": 7.013737864663834 }, { "content": "use color_eyre::eyre::{eyre, Result};\n\nuse std::{\n\n path::{Component, Path, PathBuf},\n\n process::Command,\n\n};\n\n\n\nconst ZKEY_FILE: &str = \"./semaphore/build/snark/semaphore_final.zkey\";\n\nconst WASM_FILE: &str = \"./semaphore/build/snark/semaphore.wasm\";\n\n\n\n// See <https://internals.rust-lang.org/t/path-to-lexical-absolute/14940>\n", "file_path": "build.rs", "rank": 75, "score": 6.8491533241393725 }, { "content": " pub fn set_range<I: IntoIterator<Item = H::Hash>>(&mut self, start: usize, hashes: I) {\n\n let index = self.num_leaves() + start - 1;\n\n let mut count = 0;\n\n // TODO: Error/panic when hashes is longer than available leafs\n\n for (leaf, hash) in self.nodes[index..].iter_mut().zip(hashes) {\n\n *leaf = hash;\n\n count += 1;\n\n }\n\n if count != 0 {\n\n self.update_nodes(index, index + (count - 1));\n\n }\n\n }\n\n\n\n fn update_nodes(&mut self, start: usize, end: usize) {\n\n debug_assert_eq!(depth(start), depth(end));\n\n if let (Some(start), Some(end)) = (parent(start), parent(end)) {\n\n for parent in start..=end {\n\n let child = first_child(parent);\n\n self.nodes[parent] = H::hash_node(&self.nodes[child], &self.nodes[child + 1]);\n\n }\n", "file_path": "src/merkle_tree.rs", "rank": 76, "score": 6.605306373414516 }, { "content": "use crate::{\n\n circuit::{witness_calculator, zkey},\n\n identity::Identity,\n\n merkle_tree::{self, Branch},\n\n poseidon_hash,\n\n poseidon_tree::PoseidonHash,\n\n Field,\n\n};\n\nuse ark_bn254::{Bn254, Parameters};\n\nuse ark_circom::CircomReduction;\n\nuse ark_ec::bn::Bn;\n\nuse ark_groth16::{\n\n create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof as ArkProof,\n\n};\n\nuse ark_relations::r1cs::SynthesisError;\n\nuse ark_std::UniformRand;\n\nuse color_eyre::Result;\n\nuse primitive_types::U256;\n\nuse rand::{thread_rng, Rng};\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "src/protocol.rs", "rank": 77, "score": 6.10938091027664 }, { "content": "use crate::Field;\n\nuse once_cell::sync::Lazy;\n\nuse poseidon_rs::Poseidon;\n\n\n\nstatic POSEIDON: Lazy<Poseidon> = Lazy::new(Poseidon::new);\n\n\n\n#[must_use]\n", "file_path": "src/poseidon_hash.rs", "rank": 78, "score": 5.806468785025497 }, { "content": " pub fn new(depth: usize, initial_leaf: H::Hash) -> Self {\n\n // Compute empty node values, leaf to root\n\n let empty = successors(Some(initial_leaf), |prev| Some(H::hash_node(prev, prev)))\n\n .take(depth)\n\n .collect::<Vec<_>>();\n\n\n\n // Compute node values\n\n let nodes = empty\n\n .iter()\n\n .rev()\n\n .enumerate()\n\n .flat_map(|(depth, hash)| repeat(hash).take(1 << depth))\n\n .cloned()\n\n .collect::<Vec<_>>();\n\n debug_assert!(nodes.len() == (1 << depth) - 1);\n\n\n\n Self {\n\n depth,\n\n empty,\n\n nodes,\n", "file_path": "src/merkle_tree.rs", "rank": 79, "score": 5.462710337612831 }, { "content": " ))\n\n );\n\n assert_eq!(\n\n from_str::<Hash>(\n\n \"\\\"0X1C4823575d154474EE3e5ac838d002456a815181437afd14f126da58a9912bbe\\\"\"\n\n )\n\n .unwrap(),\n\n Hash(hex!(\n\n \"1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe\"\n\n ))\n\n );\n\n }\n\n}\n", "file_path": "src/hash.rs", "rank": 80, "score": 5.432579624267301 }, { "content": "\n\n #[test]\n\n fn test_position() {\n\n let mut tree = MerkleTree::<Keccak256>::new(3, [0; 32]);\n\n tree.set(\n\n 0,\n\n hex!(\"0000000000000000000000000000000000000000000000000000000000000001\"),\n\n );\n\n tree.set(\n\n 1,\n\n hex!(\"0000000000000000000000000000000000000000000000000000000000000002\"),\n\n );\n\n tree.set(\n\n 2,\n\n hex!(\"0000000000000000000000000000000000000000000000000000000000000003\"),\n\n );\n\n tree.set(\n\n 3,\n\n hex!(\"0000000000000000000000000000000000000000000000000000000000000004\"),\n\n );\n\n }\n\n}\n", "file_path": "src/merkle_tree.rs", "rank": 81, "score": 5.30050525882899 }, { "content": "use crate::{poseidon_hash, Field};\n\nuse sha2::{Digest, Sha256};\n\n\n\n#[derive(Clone, PartialEq, Eq, Debug)]\n\npub struct Identity {\n\n pub trapdoor: Field,\n\n pub nullifier: Field,\n\n}\n\n\n\n/// Implements the private key derivation function from zk-kit.\n\n///\n\n/// See <https://github.com/appliedzkp/zk-kit/blob/1ea410456fc2b95877efa7c671bc390ffbfb5d36/packages/identity/src/identity.ts#L58>\n", "file_path": "src/identity.rs", "rank": 82, "score": 5.262916284404641 }, { "content": " 3,\n\n hex!(\"0000000000000000000000000000000000000000000000000000000000000004\"),\n\n );\n\n assert_eq!(\n\n tree.root(),\n\n hex!(\"a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36\")\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_proof() {\n\n let mut tree = MerkleTree::<Keccak256>::new(3, [0; 32]);\n\n tree.set(\n\n 0,\n\n hex!(\"0000000000000000000000000000000000000000000000000000000000000001\"),\n\n );\n\n tree.set(\n\n 1,\n\n hex!(\"0000000000000000000000000000000000000000000000000000000000000002\"),\n\n );\n", "file_path": "src/merkle_tree.rs", "rank": 83, "score": 5.2411596820040085 }, { "content": "//! Implements basic binary Merkle trees\n\n//!\n\n//! # To do\n\n//!\n\n//! * Disk based storage backend (using mmaped files should be easy)\n\n\n\nuse crate::Field;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{\n\n fmt::Debug,\n\n iter::{once, repeat, successors},\n\n};\n\n\n\n/// Hash types, values and algorithms for a Merkle tree\n", "file_path": "src/merkle_tree.rs", "rank": 84, "score": 5.2337495360396105 }, { "content": "}\n\n\n\n/// Conversion from vec\n\nimpl From<Vec<u8>> for Hash {\n\n fn from(vec: Vec<u8>) -> Self {\n\n let mut bytes = [0_u8; 32];\n\n bytes.copy_from_slice(&vec[0..32]);\n\n Self::from_bytes_be(bytes)\n\n }\n\n}\n\n\n\n/// Conversion to `BigInt`\n\nimpl From<Hash> for BigInt {\n\n fn from(hash: Hash) -> Self {\n\n Self::from_bytes_be(Sign::Plus, hash.as_bytes_be())\n\n }\n\n}\n\n\n\nimpl From<&Hash> for BigInt {\n\n fn from(hash: &Hash) -> Self {\n", "file_path": "src/hash.rs", "rank": 85, "score": 5.1643387921304615 }, { "content": " } else {\n\n Some(((index + 1) >> 1) - 1)\n\n }\n\n}\n\n\n\n/// For a given node index, return index of the first (left) child.\n\nconst fn first_child(index: usize) -> usize {\n\n (index << 1) + 1\n\n}\n\n\n\nconst fn depth(index: usize) -> usize {\n\n // `n.next_power_of_two()` will return `n` iff `n` is a power of two.\n\n // The extra offset corrects this.\n\n (index + 2).next_power_of_two().trailing_zeros() as usize - 1\n\n}\n\n\n\nimpl<H: Hasher> MerkleTree<H> {\n\n /// Creates a new `MerkleTree`\n\n /// * `depth` - The depth of the tree, including the root. This is 1 greater\n\n /// than the `treeLevels` argument to the Semaphore contract.\n", "file_path": "src/merkle_tree.rs", "rank": 86, "score": 4.936380115985478 }, { "content": " crate::mimc_tree::bench::group(criterion);\n\n bench_proof(criterion);\n\n }\n\n\n\n fn bench_proof(criterion: &mut Criterion) {\n\n let leaf = Field::from(0);\n\n\n\n // Create tree\n\n let id = Identity::from_seed(b\"hello\");\n\n let mut tree = PoseidonTree::new(21, leaf);\n\n tree.set(0, id.commitment());\n\n let merkle_proof = tree.proof(0).expect(\"proof should exist\");\n\n\n\n // change signal and external_nullifier here\n\n let signal_hash = hash_to_field(b\"xxx\");\n\n let external_nullifier_hash = hash_to_field(b\"appId\");\n\n\n\n criterion.bench_function(\"proof\", move |b| {\n\n b.iter(|| {\n\n generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();\n\n });\n\n });\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 87, "score": 4.7831341205232745 }, { "content": " assert_eq!(first_child(2), 5);\n\n assert_eq!(depth(0), 0);\n\n assert_eq!(depth(1), 1);\n\n assert_eq!(depth(2), 1);\n\n assert_eq!(depth(3), 2);\n\n assert_eq!(depth(6), 2);\n\n }\n\n\n\n #[test]\n\n fn test_root() {\n\n let mut tree = MerkleTree::<Keccak256>::new(3, [0; 32]);\n\n assert_eq!(\n\n tree.root(),\n\n hex!(\"b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30\")\n\n );\n\n tree.set(\n\n 0,\n\n hex!(\"0000000000000000000000000000000000000000000000000000000000000001\"),\n\n );\n\n assert_eq!(\n", "file_path": "src/merkle_tree.rs", "rank": 88, "score": 4.408595863510054 }, { "content": " // generate identity\n\n let id = Identity::from_seed(identity);\n\n\n\n // generate merkle tree\n\n let mut tree = PoseidonTree::new(21, leaf);\n\n tree.set(0, id.commitment());\n\n\n\n let merkle_proof = tree.proof(0).expect(\"proof should exist\");\n\n let root = tree.root();\n\n dbg!(root);\n\n\n\n let signal_hash = hash_to_field(signal);\n\n let external_nullifier_hash = hash_to_field(external_nullifier);\n\n let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);\n\n\n\n let proof =\n\n generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();\n\n\n\n for _ in 0..5 {\n\n let success = verify_proof(\n", "file_path": "src/lib.rs", "rank": 89, "score": 4.278055654950087 }, { "content": " let mut tree = MimcTree::new(DEPTH, LEAF);\n\n let index = 354_184;\n\n let hash = Hash::from_bytes_be([0_u8; 32]);\n\n criterion.bench_function(\"mimc_tree_set\", move |bencher| {\n\n bencher.iter(|| tree.set(index, black_box(hash)));\n\n });\n\n }\n\n\n\n fn bench_proof(criterion: &mut Criterion) {\n\n let tree = MimcTree::new(DEPTH, LEAF);\n\n let index = 354_184;\n\n criterion.bench_function(\"mimc_tree_proof\", move |bencher| {\n\n bencher.iter(|| tree.proof(black_box(index)));\n\n });\n\n }\n\n\n\n fn bench_verify(criterion: &mut Criterion) {\n\n let tree = MimcTree::new(DEPTH, LEAF);\n\n let index = 354_184;\n\n let proof = tree.proof(index).expect(\"proof should exist\");\n\n let hash = Hash::from_bytes_be([0_u8; 32]);\n\n criterion.bench_function(\"mimc_verfiy\", move |bencher| {\n\n bencher.iter(|| proof.root(black_box(hash)));\n\n });\n\n }\n\n}\n", "file_path": "src/mimc_tree.rs", "rank": 90, "score": 3.9939921305594868 }, { "content": " tree.set(0, id.commitment());\n\n\n\n let merkle_proof = tree.proof(0).expect(\"proof should exist\");\n\n\n\n let external_nullifier: [u8; 16] = rng.gen();\n\n let external_nullifier_hash = hash_to_field(&external_nullifier);\n\n\n\n let signal: [u8; 16] = rng.gen();\n\n let signal_hash = hash_to_field(&signal);\n\n\n\n generate_proof_rng(\n\n &id,\n\n &merkle_proof,\n\n external_nullifier_hash,\n\n signal_hash,\n\n &mut rng,\n\n )\n\n .unwrap()\n\n }\n\n\n", "file_path": "src/protocol.rs", "rank": 91, "score": 3.413160329701985 }, { "content": "# The MIT License (MIT)\n\n\n\nCopyright © 2021 Worldcoin\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\n\n\n**The Software is provided “as is”, without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages or other liability, whether in an action of contract, tort or otherwise, arising from, out of or in connection with the Software or the use or other dealings in the Software.**\n", "file_path": "mit-license.md", "rank": 92, "score": 2.9925712698179643 }, { "content": "use semaphore as lib;\n\n\n", "file_path": "criterion.rs", "rank": 93, "score": 2.9448526384695395 }, { "content": " }\n\n\n\n // Create a WASM engine for the target that can compile\n\n let triple = Triple::from_str(&env::var(\"TARGET\")?).map_err(|e| eyre!(e))?;\n\n let cpu_features = enum_set!();\n\n let target = Target::new(triple, cpu_features);\n\n let compiler_config = Cranelift::default();\n\n let engine = Dylib::new(compiler_config).target(target).engine();\n\n\n\n // Compile the WASM module\n\n let store = Store::new(&engine);\n\n let module = Module::from_file(&store, &wasm_file)?;\n\n module.serialize_to_file(&dylib_file)?;\n\n assert!(dylib_file.exists());\n\n println!(\"cargo:warning=Circuit dylib is in {}\", dylib_file.display());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "build.rs", "rank": 94, "score": 1.4778528532489306 } ]
Rust
src/player.rs
ttempleton/rust-battleship
48d45f8c5d8c73ec399e1b6781418cdb5774fdc0
use crate::{direction::Direction, ship::Ship, space::Space}; use std::cmp; pub struct Player { is_cpu: bool, spaces: Vec<Space>, ships: Vec<Ship>, grid_size: [u8; 2], grid_cursor: [u8; 2], } impl Player { pub fn new(grid_size: [u8; 2], ship_count: usize, is_cpu: bool) -> Player { Player { is_cpu: is_cpu, spaces: Space::all_grid_spaces(&grid_size), ships: Vec::with_capacity(ship_count), grid_size: grid_size, grid_cursor: [0, 0], } } pub fn select_space(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { let space_index = self.space_index(pos); let ship_hit = self.ships.iter().position(|s| s.pos().contains(pos)); self.spaces[space_index].set_checked(ship_hit.is_some())?; Ok(()) } pub fn sink_ship_if_all_hit(&mut self, pos: &[u8; 2]) -> Result<bool, &'static str> { if let Some(index) = self.ships.iter().position(|s| s.pos().contains(pos)) { let sunk = self.ships[index] .pos() .iter() .all(|p| self.space(p).is_hit()); if sunk { self.ships[index].set_sunk()?; } Ok(sunk) } else { Err("no ship at the given position") } } pub fn all_ships_sunk(&self) -> bool { self.ships.iter().all(|ship| ship.is_sunk()) } pub fn suggested_checks(&self) -> Vec<[u8; 2]> { let mut select = vec![]; let directions = Direction::all(); let hit_spaces = self .spaces .iter() .filter(|s| s.is_hit() && self.ship(s.pos()).unwrap().is_active()) .collect::<Vec<&Space>>(); for space in &hit_spaces { for direction in &directions { let unchecked = self.find_unchecked_space(space.pos(), *direction, true); if let Some(pos) = unchecked { if !select.contains(&pos) { select.push(pos); } } } } if hit_spaces.len() > 0 && select.is_empty() { for direction in &directions { let unchecked = self.find_unchecked_space(hit_spaces[0].pos(), *direction, false); if let Some(pos) = unchecked { select.push(pos); } } } if select.is_empty() { select = self .spaces .iter() .filter(|space| space.is_unchecked()) .map(|space| *space.pos()) .collect::<Vec<[u8; 2]>>(); } select } pub fn add_ship( &mut self, head: [u8; 2], direction: Direction, length: u8, placement: bool, ) -> Result<(), &'static str> { if self.ships.len() == self.ships.capacity() { Err("tried to add ship to a player with all ships already added") } else { let pos = self .get_ship_position(head, direction, length) .ok_or("tried to place a ship partially out of bounds")?; if !placement && !self.valid_ship_position(&pos) { Err("tried to place a ship in an invalid position") } else { let mut ship = Ship::new(pos)?; if !placement { ship.set_active()?; } self.ships.push(ship); Ok(()) } } } pub fn move_placement_ship(&mut self, direction: Direction) -> Result<(), &'static str> { let index = self.ships.len() - 1; let old_head = self.ships[index].pos()[0]; let new_head = self .movement(&old_head, direction) .ok_or("movement not possible without going out of bounds")?; let ship_pos = self .get_ship_position( new_head, self.ships[index].dir(), self.ships[index].len() as u8, ) .ok_or("movement not possible without going out of bounds")?; self.ships[index].set_pos(ship_pos)?; Ok(()) } pub fn place_placement_ship(&mut self) -> Result<(), &'static str> { let index = self.ships.len() - 1; if !self.valid_ship_position(&self.ships[index].pos()) { Err("placement ship overlaps with another ship") } else { self.ships[index].set_active()?; Ok(()) } } pub fn rotate_placement_ship(&mut self) -> Result<(), &'static str> { let index = self.ships.len() - 1; let ship_len = self.ships[index].len() as u8; let dir = self.ships[index].dir().rotated(); let old_head = self.ships[index].pos()[0]; let new_head = match dir { Direction::North => [ old_head[0], cmp::min(old_head[1], self.grid_size[1] - ship_len), ], Direction::East => [cmp::max(old_head[0], ship_len - 1), old_head[1]], Direction::South => [old_head[0], cmp::max(old_head[1], ship_len - 1)], Direction::West => [ cmp::min(old_head[0], self.grid_size[0] - ship_len), old_head[1], ], }; let ship_pos = self.get_ship_position(new_head, dir, ship_len).unwrap(); self.ships[index].set_pos(ship_pos)?; Ok(()) } pub fn get_ship_position( &self, head: [u8; 2], direction: Direction, length: u8, ) -> Option<Vec<[u8; 2]>> { let valid = match direction { Direction::North => head[1] + length <= self.grid_size[1], Direction::East => head[0] >= length - 1, Direction::South => head[1] >= length - 1, Direction::West => head[0] + length <= self.grid_size[0], }; if valid { let mut ship = Vec::with_capacity(length as usize); for pos in 0..length { let pos_u8 = pos as u8; ship.push(match direction { Direction::North => [head[0], head[1] + pos_u8], Direction::East => [head[0] - pos_u8, head[1]], Direction::South => [head[0], head[1] - pos_u8], Direction::West => [head[0] + pos_u8, head[1]], }); } Some(ship) } else { None } } fn valid_ship_position(&self, new_ship: &[[u8; 2]]) -> bool { new_ship.iter().all(|s| { self.valid_space(s) && !self.ship_is_in_space(s) && !(self.ship_is_next_to(s) && self.is_cpu) }) } pub fn ships(&self) -> &[Ship] { &self.ships } fn ship(&self, pos: &[u8; 2]) -> Option<&Ship> { self.ships.iter().find(|s| s.pos().contains(pos)) } pub fn ship_is_in_space(&self, pos: &[u8; 2]) -> bool { self.ships .iter() .any(|s| s.pos().contains(pos) && !s.is_placement()) } fn ship_is_next_to(&self, pos: &[u8; 2]) -> bool { let &[x, y] = pos; x > 0 && self.ship_is_in_space(&[x - 1, y]) || x < self.grid_size[0] - 1 && self.ship_is_in_space(&[x + 1, y]) || y > 0 && self.ship_is_in_space(&[x, y - 1]) || y < self.grid_size[1] - 1 && self.ship_is_in_space(&[x, y + 1]) } pub fn spaces(&self) -> &[Space] { &self.spaces } fn valid_space(&self, pos: &[u8; 2]) -> bool { pos[0] < self.grid_size[0] && pos[1] < self.grid_size[1] } pub fn space(&self, pos: &[u8; 2]) -> &Space { self.spaces.get(self.space_index(pos)).unwrap() } fn space_index(&self, pos: &[u8; 2]) -> usize { self.grid_size[0] as usize * pos[0] as usize + pos[1] as usize } fn movement(&self, pos: &[u8; 2], direction: Direction) -> Option<[u8; 2]> { let valid = match direction { Direction::North => pos[1] > 0, Direction::East => pos[0] < self.grid_size[0] - 1, Direction::South => pos[1] < self.grid_size[1] - 1, Direction::West => pos[0] > 0, }; match valid { true => Some(match direction { Direction::North => [pos[0], pos[1] - 1], Direction::East => [pos[0] + 1, pos[1]], Direction::South => [pos[0], pos[1] + 1], Direction::West => [pos[0] - 1, pos[1]], }), false => None, } } fn find_unchecked_space( &self, pos: &[u8; 2], direction: Direction, check_for_line: bool, ) -> Option<[u8; 2]> { let mut check_pos = self.movement(pos, direction); while let Some(next_pos) = check_pos { let next_space = self.space(&next_pos); match next_space.is_hit() { true => check_pos = self.movement(&next_pos, direction), false => { if !next_space.is_unchecked() { check_pos = None; } break; } }; } if check_for_line && check_pos.is_some() { let unchecked = check_pos.unwrap(); let opposite_dir = direction.opposite(); let prev_pos = self.movement(&unchecked, opposite_dir).unwrap(); if &prev_pos == pos { check_pos = None; } } check_pos } pub fn grid_cursor(&self) -> &[u8; 2] { &self.grid_cursor } pub fn move_grid_cursor(&mut self, direction: Direction) -> Result<(), &'static str> { if let Some(new_cursor) = self.movement(&self.grid_cursor, direction) { self.set_grid_cursor(&new_cursor)?; Ok(()) } else { Err("tried to move grid cursor out of bounds") } } pub fn set_grid_cursor(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { if self.space_index(pos) < self.spaces.len() { self.grid_cursor = *pos; Ok(()) } else { Err("tried to set the grid cursor to a nonexistent space") } } pub fn is_cpu(&self) -> bool { self.is_cpu } pub fn placement_ship(&self) -> Result<&Ship, &'static str> { let ships_len = self.ships.len(); if ships_len == 0 { Err("player has no ships") } else if !self.ships[ships_len - 1].is_placement() { Err("player has no placement ship") } else { Ok(&self.ships[self.ships.len() - 1]) } } pub fn placement_ship_mut(&mut self) -> Result<&mut Ship, &'static str> { let ships_len = self.ships.len(); if ships_len == 0 { Err("player has no ships") } else if !self.ships[ships_len - 1].is_placement() { Err("player has no placement ship") } else { Ok(&mut self.ships[ships_len - 1]) } } }
use crate::{direction::Direction, ship::Ship, space::Space}; use std::cmp; pub struct Player { is_cpu: bool, spaces: Vec<Space>, ships: Vec<Ship>, grid_size: [u8; 2], grid_cursor: [u8; 2], } impl Player { pub fn new(grid_size: [u8; 2], ship_count: usize, is_cpu: bool) -> Player { Player { is_cpu: is_cpu, spaces: Space::all_grid_spaces(&grid_size), ships: Vec::with_capacity(ship_count), grid_size: grid_size, grid_cursor: [0, 0], } } pub fn select_space(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { let space_index = self.space_index(pos); let ship_hit = self.ships.iter().position(|s| s.pos().contains(pos)); self.spaces[space_index].set_checked(ship_hit.is_some())?; Ok(()) } pub fn sink_ship_if_all_hit(&mut self, pos: &[u8; 2]) -> Result<bool, &'static str> { if let Some(index) = self.ships.iter().position(|s| s.pos().contains(pos)) { let sunk = self.ships[index] .pos() .iter() .all(|p| self.space(p).is_hit()); if sunk { self.ships[index].set_sunk()?; } Ok(sunk) } else { Err("no ship at the given position") } } pub fn all_ships_sunk(&self) -> bool { self.ships.iter().all(|ship| ship.is_sunk()) } pub fn suggested_checks(&self) -> Vec<[u8; 2]> { let mut select = vec![]; let directions = Direction::all(); let hit_spaces = self .spaces .iter() .filter(|s| s.is_hit() && self.ship(s.pos()).unwrap().is_active()) .collect::<Vec<&Space>>(); for space in &hit_spaces { for direction in &directions { let unchecked = self.find_unchecked_space(space.pos(), *direction, true); if let Some(pos) = unchecked { if !select.contains(&pos) { select.push(pos); } } } } if hit_spaces.len() > 0 && select.is_empty() { for direction in &directions { let unchecked = self.find_unchecked_space(hit_spaces[0].pos(), *direction, false); if let Some(pos) = unchecked { select.push(pos); } } } if select.is_empty() { select = self .spaces .iter() .filter(|space| space.is_unchecked()) .map(|space| *space.pos()) .collect::<Vec<[u8; 2]>>(); } select }
pub fn move_placement_ship(&mut self, direction: Direction) -> Result<(), &'static str> { let index = self.ships.len() - 1; let old_head = self.ships[index].pos()[0]; let new_head = self .movement(&old_head, direction) .ok_or("movement not possible without going out of bounds")?; let ship_pos = self .get_ship_position( new_head, self.ships[index].dir(), self.ships[index].len() as u8, ) .ok_or("movement not possible without going out of bounds")?; self.ships[index].set_pos(ship_pos)?; Ok(()) } pub fn place_placement_ship(&mut self) -> Result<(), &'static str> { let index = self.ships.len() - 1; if !self.valid_ship_position(&self.ships[index].pos()) { Err("placement ship overlaps with another ship") } else { self.ships[index].set_active()?; Ok(()) } } pub fn rotate_placement_ship(&mut self) -> Result<(), &'static str> { let index = self.ships.len() - 1; let ship_len = self.ships[index].len() as u8; let dir = self.ships[index].dir().rotated(); let old_head = self.ships[index].pos()[0]; let new_head = match dir { Direction::North => [ old_head[0], cmp::min(old_head[1], self.grid_size[1] - ship_len), ], Direction::East => [cmp::max(old_head[0], ship_len - 1), old_head[1]], Direction::South => [old_head[0], cmp::max(old_head[1], ship_len - 1)], Direction::West => [ cmp::min(old_head[0], self.grid_size[0] - ship_len), old_head[1], ], }; let ship_pos = self.get_ship_position(new_head, dir, ship_len).unwrap(); self.ships[index].set_pos(ship_pos)?; Ok(()) } pub fn get_ship_position( &self, head: [u8; 2], direction: Direction, length: u8, ) -> Option<Vec<[u8; 2]>> { let valid = match direction { Direction::North => head[1] + length <= self.grid_size[1], Direction::East => head[0] >= length - 1, Direction::South => head[1] >= length - 1, Direction::West => head[0] + length <= self.grid_size[0], }; if valid { let mut ship = Vec::with_capacity(length as usize); for pos in 0..length { let pos_u8 = pos as u8; ship.push(match direction { Direction::North => [head[0], head[1] + pos_u8], Direction::East => [head[0] - pos_u8, head[1]], Direction::South => [head[0], head[1] - pos_u8], Direction::West => [head[0] + pos_u8, head[1]], }); } Some(ship) } else { None } } fn valid_ship_position(&self, new_ship: &[[u8; 2]]) -> bool { new_ship.iter().all(|s| { self.valid_space(s) && !self.ship_is_in_space(s) && !(self.ship_is_next_to(s) && self.is_cpu) }) } pub fn ships(&self) -> &[Ship] { &self.ships } fn ship(&self, pos: &[u8; 2]) -> Option<&Ship> { self.ships.iter().find(|s| s.pos().contains(pos)) } pub fn ship_is_in_space(&self, pos: &[u8; 2]) -> bool { self.ships .iter() .any(|s| s.pos().contains(pos) && !s.is_placement()) } fn ship_is_next_to(&self, pos: &[u8; 2]) -> bool { let &[x, y] = pos; x > 0 && self.ship_is_in_space(&[x - 1, y]) || x < self.grid_size[0] - 1 && self.ship_is_in_space(&[x + 1, y]) || y > 0 && self.ship_is_in_space(&[x, y - 1]) || y < self.grid_size[1] - 1 && self.ship_is_in_space(&[x, y + 1]) } pub fn spaces(&self) -> &[Space] { &self.spaces } fn valid_space(&self, pos: &[u8; 2]) -> bool { pos[0] < self.grid_size[0] && pos[1] < self.grid_size[1] } pub fn space(&self, pos: &[u8; 2]) -> &Space { self.spaces.get(self.space_index(pos)).unwrap() } fn space_index(&self, pos: &[u8; 2]) -> usize { self.grid_size[0] as usize * pos[0] as usize + pos[1] as usize } fn movement(&self, pos: &[u8; 2], direction: Direction) -> Option<[u8; 2]> { let valid = match direction { Direction::North => pos[1] > 0, Direction::East => pos[0] < self.grid_size[0] - 1, Direction::South => pos[1] < self.grid_size[1] - 1, Direction::West => pos[0] > 0, }; match valid { true => Some(match direction { Direction::North => [pos[0], pos[1] - 1], Direction::East => [pos[0] + 1, pos[1]], Direction::South => [pos[0], pos[1] + 1], Direction::West => [pos[0] - 1, pos[1]], }), false => None, } } fn find_unchecked_space( &self, pos: &[u8; 2], direction: Direction, check_for_line: bool, ) -> Option<[u8; 2]> { let mut check_pos = self.movement(pos, direction); while let Some(next_pos) = check_pos { let next_space = self.space(&next_pos); match next_space.is_hit() { true => check_pos = self.movement(&next_pos, direction), false => { if !next_space.is_unchecked() { check_pos = None; } break; } }; } if check_for_line && check_pos.is_some() { let unchecked = check_pos.unwrap(); let opposite_dir = direction.opposite(); let prev_pos = self.movement(&unchecked, opposite_dir).unwrap(); if &prev_pos == pos { check_pos = None; } } check_pos } pub fn grid_cursor(&self) -> &[u8; 2] { &self.grid_cursor } pub fn move_grid_cursor(&mut self, direction: Direction) -> Result<(), &'static str> { if let Some(new_cursor) = self.movement(&self.grid_cursor, direction) { self.set_grid_cursor(&new_cursor)?; Ok(()) } else { Err("tried to move grid cursor out of bounds") } } pub fn set_grid_cursor(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { if self.space_index(pos) < self.spaces.len() { self.grid_cursor = *pos; Ok(()) } else { Err("tried to set the grid cursor to a nonexistent space") } } pub fn is_cpu(&self) -> bool { self.is_cpu } pub fn placement_ship(&self) -> Result<&Ship, &'static str> { let ships_len = self.ships.len(); if ships_len == 0 { Err("player has no ships") } else if !self.ships[ships_len - 1].is_placement() { Err("player has no placement ship") } else { Ok(&self.ships[self.ships.len() - 1]) } } pub fn placement_ship_mut(&mut self) -> Result<&mut Ship, &'static str> { let ships_len = self.ships.len(); if ships_len == 0 { Err("player has no ships") } else if !self.ships[ships_len - 1].is_placement() { Err("player has no placement ship") } else { Ok(&mut self.ships[ships_len - 1]) } } }
pub fn add_ship( &mut self, head: [u8; 2], direction: Direction, length: u8, placement: bool, ) -> Result<(), &'static str> { if self.ships.len() == self.ships.capacity() { Err("tried to add ship to a player with all ships already added") } else { let pos = self .get_ship_position(head, direction, length) .ok_or("tried to place a ship partially out of bounds")?; if !placement && !self.valid_ship_position(&pos) { Err("tried to place a ship in an invalid position") } else { let mut ship = Ship::new(pos)?; if !placement { ship.set_active()?; } self.ships.push(ship); Ok(()) } } }
function_block-full_function
[ { "content": " 0 => Direction::North,\n\n 1 => Direction::East,\n\n 2 => Direction::South,\n\n 3 => Direction::West,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n /// Returns the direction travelled from `pos1` to `pos2` if the positions\n\n /// represent travel in exactly north, south, east or west direction, or\n\n /// returns an error.\n\n pub fn from_positions(pos1: &[u8; 2], pos2: &[u8; 2]) -> Result<Direction, &'static str> {\n\n let x_diff = pos1[0] as i16 - pos2[0] as i16;\n\n let y_diff = pos1[1] as i16 - pos2[1] as i16;\n\n\n\n if x_diff == 0 && y_diff > 0 {\n\n Ok(Direction::North)\n\n } else if x_diff == 0 && y_diff < 0 {\n\n Ok(Direction::South)\n\n } else if x_diff > 0 && y_diff == 0 {\n", "file_path": "src/direction.rs", "rank": 0, "score": 48144.22849332017 }, { "content": "use rand::{thread_rng, Rng};\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum Direction {\n\n North,\n\n East,\n\n South,\n\n West,\n\n}\n\n\n\nimpl Direction {\n\n pub fn opposite(&self) -> Direction {\n\n match *self {\n\n Direction::North => Direction::South,\n\n Direction::East => Direction::West,\n\n Direction::South => Direction::North,\n\n Direction::West => Direction::East,\n\n }\n\n }\n\n\n", "file_path": "src/direction.rs", "rank": 1, "score": 48138.36352614102 }, { "content": " Direction::from_positions(&[0, 2], &[0, 0]),\n\n Ok(Direction::North)\n\n );\n\n assert_eq!(\n\n Direction::from_positions(&[2, 0], &[0, 0]),\n\n Ok(Direction::West)\n\n );\n\n assert!(Direction::from_positions(&[0, 0], &[0, 0]).is_err());\n\n }\n\n}\n", "file_path": "src/direction.rs", "rank": 2, "score": 48138.24104443069 }, { "content": " Ok(Direction::West)\n\n } else if x_diff < 0 && y_diff == 0 {\n\n Ok(Direction::East)\n\n } else {\n\n Err(\"positions do not represent a supported direction\")\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn opposite() {\n\n assert_eq!(Direction::North.opposite(), Direction::South);\n\n assert_eq!(Direction::South.opposite(), Direction::North);\n\n assert_eq!(Direction::East.opposite(), Direction::West);\n\n assert_eq!(Direction::West.opposite(), Direction::East);\n\n }\n", "file_path": "src/direction.rs", "rank": 3, "score": 48137.59954434497 }, { "content": "\n\n #[test]\n\n fn rotated() {\n\n assert_eq!(Direction::North.rotated(), Direction::East);\n\n assert_eq!(Direction::East.rotated(), Direction::South);\n\n assert_eq!(Direction::South.rotated(), Direction::West);\n\n assert_eq!(Direction::West.rotated(), Direction::North);\n\n }\n\n\n\n #[test]\n\n fn from_positions() {\n\n assert_eq!(\n\n Direction::from_positions(&[0, 0], &[0, 2]),\n\n Ok(Direction::South)\n\n );\n\n assert_eq!(\n\n Direction::from_positions(&[0, 0], &[2, 0]),\n\n Ok(Direction::East)\n\n );\n\n assert_eq!(\n", "file_path": "src/direction.rs", "rank": 4, "score": 48137.0794488346 }, { "content": " pub fn rotated(&self) -> Direction {\n\n match *self {\n\n Direction::North => Direction::East,\n\n Direction::East => Direction::South,\n\n Direction::South => Direction::West,\n\n Direction::West => Direction::North,\n\n }\n\n }\n\n\n\n pub fn all() -> [Direction; 4] {\n\n [\n\n Direction::North,\n\n Direction::East,\n\n Direction::South,\n\n Direction::West,\n\n ]\n\n }\n\n\n\n pub fn random() -> Direction {\n\n match thread_rng().gen_range(0, 4) {\n", "file_path": "src/direction.rs", "rank": 5, "score": 48135.116949679585 }, { "content": "fn main() {\n\n let settings = settings::AppSettings { space_size: 20 };\n\n let mut app = app::App::new(&settings);\n\n\n\n app.init();\n\n}\n", "file_path": "src/main.rs", "rank": 6, "score": 28101.960460366587 }, { "content": "#[derive(PartialEq)]\n\nenum ShipState {\n\n Placement,\n\n Active,\n\n Sunk,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn new() {\n\n let hopefully_ship = Ship::new(vec![[0, 0], [0, 1]]);\n\n assert!(hopefully_ship.is_ok());\n\n }\n\n\n\n #[test]\n\n fn pos() {\n\n let pos = vec![[0, 0], [0, 1]];\n\n let ship = Ship::new(pos.clone()).unwrap();\n", "file_path": "src/ship.rs", "rank": 7, "score": 27965.051859449875 }, { "content": " ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the space's state is not `SpaceState::Unchecked`.\n\n pub fn set_checked(&mut self, hit: bool) -> Result<(), &'static str> {\n\n if self.state != SpaceState::Unchecked {\n\n Err(\"tried to check an already checked space\")\n\n } else {\n\n self.state = SpaceState::Checked(hit);\n\n Ok(())\n\n }\n\n }\n\n\n\n pub fn is_unchecked(&self) -> bool {\n\n self.state == SpaceState::Unchecked\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.state == SpaceState::Checked(false)\n\n }\n", "file_path": "src/space.rs", "rank": 8, "score": 24082.28748830116 }, { "content": "\n\n pub fn is_hit(&self) -> bool {\n\n self.state == SpaceState::Checked(true)\n\n }\n\n\n\n pub fn pos(&self) -> &[u8; 2] {\n\n &self.position\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq)]\n\npub enum SpaceState {\n\n Unchecked,\n\n Checked(bool),\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/space.rs", "rank": 9, "score": 24081.11192480674 }, { "content": "pub struct Space {\n\n state: SpaceState,\n\n position: [u8; 2],\n\n}\n\n\n\nimpl Space {\n\n pub fn new(pos: [u8; 2]) -> Space {\n\n Space {\n\n state: SpaceState::Unchecked,\n\n position: pos,\n\n }\n\n }\n\n\n\n pub fn all_grid_spaces(grid_size: &[u8; 2]) -> Vec<Space> {\n\n (0..grid_size[0])\n\n .flat_map(|col| (0..grid_size[1]).map(move |row| Space::new([col, row])))\n\n .collect()\n\n }\n\n\n\n /// Sets this space as having been checked, and whether it was hit.\n", "file_path": "src/space.rs", "rank": 10, "score": 24078.54294841278 }, { "content": " #[test]\n\n fn set_checked() {\n\n let mut space = Space::new([0, 0]);\n\n assert!(space.set_checked(false).is_ok());\n\n assert!(space.set_checked(false).is_err());\n\n\n\n space = Space::new([0, 0]);\n\n assert!(space.set_checked(true).is_ok());\n\n assert!(space.set_checked(true).is_err());\n\n }\n\n\n\n #[test]\n\n fn is_unchecked() {\n\n let mut space = Space::new([0, 0]);\n\n assert!(space.is_unchecked());\n\n assert!(space.set_checked(false).is_ok());\n\n assert!(!space.is_unchecked());\n\n\n\n space = Space::new([0, 0]);\n\n assert!(space.set_checked(true).is_ok());\n", "file_path": "src/space.rs", "rank": 11, "score": 24076.05998529855 }, { "content": " assert!(!space.is_unchecked());\n\n }\n\n\n\n #[test]\n\n fn is_empty() {\n\n let mut space = Space::new([0, 0]);\n\n assert!(!space.is_empty());\n\n assert!(space.set_checked(false).is_ok());\n\n assert!(space.is_empty());\n\n\n\n space = Space::new([0, 0]);\n\n assert!(space.set_checked(true).is_ok());\n\n assert!(!space.is_empty());\n\n }\n\n\n\n #[test]\n\n fn is_hit() {\n\n let mut space = Space::new([0, 0]);\n\n assert!(!space.is_hit());\n\n assert!(space.set_checked(true).is_ok());\n", "file_path": "src/space.rs", "rank": 12, "score": 24075.01643819337 }, { "content": " assert!(space.is_hit());\n\n\n\n space = Space::new([0, 0]);\n\n assert!(space.set_checked(false).is_ok());\n\n assert!(!space.is_hit());\n\n }\n\n\n\n #[test]\n\n fn pos() {\n\n let space = Space::new([0, 0]);\n\n assert_eq!(space.pos(), &[0, 0]);\n\n }\n\n}\n", "file_path": "src/space.rs", "rank": 13, "score": 24068.125146029382 }, { "content": "use crate::direction::Direction;\n\n\n\npub struct Ship {\n\n state: ShipState,\n\n position: Vec<[u8; 2]>,\n\n dir: Direction,\n\n}\n\n\n\nimpl Ship {\n\n /// Creates a new `Ship` with the given position.\n\n pub fn new(pos: Vec<[u8; 2]>) -> Result<Ship, &'static str> {\n\n let dir = Direction::from_positions(&pos[1], &pos[0])?;\n\n\n\n Ok(Ship {\n\n state: ShipState::Placement,\n\n position: pos,\n\n dir: dir,\n\n })\n\n }\n\n\n", "file_path": "src/ship.rs", "rank": 14, "score": 23676.438452511735 }, { "content": " /// Returns the ship's position.\n\n pub fn pos(&self) -> &[[u8; 2]] {\n\n &self.position\n\n }\n\n\n\n /// Sets the ship's position.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if `pos` does not form a vertical or horizontal line.\n\n pub fn set_pos(&mut self, pos: Vec<[u8; 2]>) -> Result<(), &'static str> {\n\n if pos.is_empty() {\n\n Err(\"tried to set an empty position to a ship\")\n\n } else if pos.len() == 1 {\n\n self.position = pos;\n\n\n\n Ok(())\n\n } else {\n\n let mut valid = true;\n\n let dir = Direction::from_positions(&pos[1], &pos[0])?;\n", "file_path": "src/ship.rs", "rank": 15, "score": 23670.652309095705 }, { "content": " self.state = ShipState::Active;\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Returns whether the ship has sunk.\n\n pub fn is_sunk(&self) -> bool {\n\n self.state == ShipState::Sunk\n\n }\n\n\n\n /// Sets the ship as having been sunk.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the ship's state is not `ShipState::Active`.\n\n pub fn set_sunk(&mut self) -> Result<(), &'static str> {\n\n if self.state != ShipState::Active {\n\n Err(\"tried to sink a ship that was not active\")\n\n } else {\n\n self.state = ShipState::Sunk;\n\n\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq)]\n", "file_path": "src/ship.rs", "rank": 16, "score": 23664.27913605965 }, { "content": " } else {\n\n let dir = Direction::from_positions(&pos[1], &pos[0])?;\n\n\n\n self.position = pos;\n\n self.dir = dir;\n\n\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n /// Returns the ship's direction.\n\n pub fn dir(&self) -> Direction {\n\n self.dir\n\n }\n\n\n\n /// Returns the ship's length.\n\n pub fn len(&self) -> usize {\n\n self.position.len()\n\n }\n", "file_path": "src/ship.rs", "rank": 17, "score": 23663.97207327785 }, { "content": " assert_eq!(ship.pos(), pos.as_slice());\n\n }\n\n\n\n #[test]\n\n fn set_pos() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(ship.set_pos(vec![[1, 0], [0, 0]]).is_ok());\n\n assert_eq!(ship.dir(), Direction::East);\n\n\n\n assert!(ship.set_pos(vec![[0, 1], [0, 0]]).is_ok());\n\n assert_eq!(ship.dir(), Direction::South);\n\n\n\n assert!(ship.set_pos(vec![[0, 0], [1, 0]]).is_ok());\n\n assert_eq!(ship.dir(), Direction::West);\n\n\n\n assert!(ship.set_pos(vec![[0, 0], [0, 1]]).is_ok());\n\n assert_eq!(ship.dir(), Direction::North);\n\n\n\n assert!(ship.set_pos(vec![[0, 0], [0, 0]]).is_err());\n\n assert!(ship.set_pos(vec![[0, 0], [0, 2]]).is_err());\n", "file_path": "src/ship.rs", "rank": 18, "score": 23660.697392532442 }, { "content": "\n\n /// Returns whether the ship is in the placement state.\n\n pub fn is_placement(&self) -> bool {\n\n self.state == ShipState::Placement\n\n }\n\n\n\n /// Returns whether the ship is active.\n\n pub fn is_active(&self) -> bool {\n\n self.state == ShipState::Active\n\n }\n\n\n\n /// Sets the ship as active.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the ship's state is not `ShipState::Placement`.\n\n pub fn set_active(&mut self) -> Result<(), &'static str> {\n\n if self.state != ShipState::Placement {\n\n Err(\"tried to set a ship as active that wasn't in placement state\")\n\n } else {\n", "file_path": "src/ship.rs", "rank": 19, "score": 23659.52325566276 }, { "content": " assert!(ship.is_active());\n\n assert!(ship.set_sunk().is_ok());\n\n assert!(!ship.is_active());\n\n }\n\n\n\n #[test]\n\n fn set_active() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(ship.set_active().is_ok());\n\n assert!(ship.set_active().is_err());\n\n assert!(ship.set_sunk().is_ok());\n\n assert!(ship.set_active().is_err());\n\n }\n\n\n\n #[test]\n\n fn is_sunk() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(!ship.is_sunk());\n\n assert!(ship.set_active().is_ok());\n\n assert!(!ship.is_sunk());\n", "file_path": "src/ship.rs", "rank": 20, "score": 23658.92036831113 }, { "content": " assert!(ship.set_pos(vec![]).is_err());\n\n }\n\n\n\n #[test]\n\n fn dir() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert_eq!(ship.dir(), Direction::North);\n\n\n\n ship = Ship::new(vec![[0, 1], [0, 0]]).unwrap();\n\n assert_eq!(ship.dir(), Direction::South);\n\n\n\n ship = Ship::new(vec![[0, 0], [1, 0]]).unwrap();\n\n assert_eq!(ship.dir(), Direction::West);\n\n\n\n ship = Ship::new(vec![[1, 0], [0, 0]]).unwrap();\n\n assert_eq!(ship.dir(), Direction::East);\n\n }\n\n\n\n #[test]\n\n fn len() {\n", "file_path": "src/ship.rs", "rank": 21, "score": 23658.879239341088 }, { "content": " assert!(ship.set_sunk().is_ok());\n\n assert!(ship.is_sunk());\n\n }\n\n\n\n #[test]\n\n fn set_sunk() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(ship.set_sunk().is_err());\n\n assert!(ship.set_active().is_ok());\n\n assert!(ship.set_sunk().is_ok());\n\n assert!(ship.set_sunk().is_err());\n\n }\n\n}\n", "file_path": "src/ship.rs", "rank": 22, "score": 23658.67362314078 }, { "content": " let pos = vec![[0, 0], [0, 1]];\n\n let ship = Ship::new(pos.clone()).unwrap();\n\n assert_eq!(ship.len(), pos.len());\n\n }\n\n\n\n #[test]\n\n fn is_placement() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(ship.is_placement());\n\n assert!(ship.set_active().is_ok());\n\n assert!(!ship.is_placement());\n\n assert!(ship.set_sunk().is_ok());\n\n assert!(!ship.is_placement());\n\n }\n\n\n\n #[test]\n\n fn is_active() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(!ship.is_active());\n\n assert!(ship.set_active().is_ok());\n", "file_path": "src/ship.rs", "rank": 23, "score": 23657.556660990263 }, { "content": "\n\n for i in 0..pos.len() - 1 {\n\n let x_diff = (pos[i][0] as i16 - pos[i + 1][0] as i16).abs() as u8;\n\n let y_diff = (pos[i][1] as i16 - pos[i + 1][1] as i16).abs() as u8;\n\n\n\n if x_diff + y_diff != 1 {\n\n valid = false;\n\n break;\n\n }\n\n\n\n let next_dir = Direction::from_positions(&pos[i + 1], &pos[i])?;\n\n\n\n if next_dir != dir {\n\n valid = false;\n\n break;\n\n }\n\n }\n\n\n\n if !valid {\n\n Err(\"ship position does not form a continuous line\")\n", "file_path": "src/ship.rs", "rank": 24, "score": 23655.373742144315 }, { "content": " /// has no ships, or if the active player has no placement ship.\n\n pub fn set_placement_ship(&mut self, pos: Vec<[u8; 2]>) -> Result<(), &'static str> {\n\n if self.state != GameState::Placement {\n\n Err(\"tried to set position of ship outside of placement game state\")\n\n } else {\n\n let ship = self.players[self.turn as usize].placement_ship_mut()?;\n\n ship.set_pos(pos)?;\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Selects a space on the inactive player's grid if it's unchecked.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the inactive player's space at `pos` was already\n\n /// checked.\n\n pub fn select_space(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> {\n\n let ref mut opponent = self.players[self.not_turn()];\n", "file_path": "src/game.rs", "rank": 47, "score": 29.293257380892236 }, { "content": "use crate::direction::Direction;\n\nuse crate::player::Player;\n\nuse crate::settings::GameSettings;\n\nuse rand::{seq::SliceRandom, thread_rng, Rng};\n\n\n\npub struct Game {\n\n settings: GameSettings,\n\n players: [Player; 2],\n\n state: GameState,\n\n turn: u8,\n\n}\n\n\n\nimpl Game {\n\n pub fn new(settings: GameSettings) -> Result<Game, &'static str> {\n\n let grid_size = [settings.spaces[0], settings.spaces[1]];\n\n let mut players = [\n\n Player::new(grid_size, settings.ships.len(), false),\n\n Player::new(grid_size, settings.ships.len(), true),\n\n ];\n\n\n", "file_path": "src/game.rs", "rank": 48, "score": 25.0930043014002 }, { "content": " pub fn suggested_check(&self) -> [u8; 2] {\n\n let mut rng = thread_rng();\n\n let mut positions = self.inactive_player().suggested_checks();\n\n positions.shuffle(&mut rng);\n\n\n\n positions[0]\n\n }\n\n\n\n /// Moves the active player's grid cursor in the given `direction`.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if moving the grid cursor in `direction` would move it out of bounds.\n\n pub fn move_grid_cursor(&mut self, direction: Direction) -> Result<(), &'static str> {\n\n self.players[self.turn as usize].move_grid_cursor(direction)\n\n }\n\n\n\n /// Sets the active player's grid cursor position.\n\n ///\n\n /// # Errors\n", "file_path": "src/game.rs", "rank": 49, "score": 24.938039616864156 }, { "content": "\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Moves the active player's placement ship in the given direction.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the game's state is not `GameState::Placement`.\n\n pub fn move_ship(&mut self, direction: Direction) -> Result<(), &'static str> {\n\n if self.state != GameState::Placement {\n\n Err(\"tried to move ship outside of placement game state\")\n\n } else {\n\n self.players[self.turn as usize].move_placement_ship(direction)?;\n\n\n\n Ok(())\n\n }\n\n }\n\n\n", "file_path": "src/game.rs", "rank": 50, "score": 24.81550318316917 }, { "content": " /// Rotates the active player's placement ship in the given direction.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the game's state is not `GameState::Placement`.\n\n pub fn rotate_ship(&mut self) -> Result<(), &'static str> {\n\n if self.state != GameState::Placement {\n\n Err(\"tried to rotate ship outside of placement game state\")\n\n } else {\n\n self.players[self.turn as usize].rotate_placement_ship()?;\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Sets the active player's placement ship to the given position.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the game's state is not `GameState::Placement`, if the active player\n", "file_path": "src/game.rs", "rank": 51, "score": 22.239182993178353 }, { "content": "\n\n opponent.select_space(pos)?;\n\n\n\n // If it's an error, no ship was at the position; and if it's false, the\n\n // ship wasn't sunk\n\n if opponent.sink_ship_if_all_hit(pos) == Ok(true) {\n\n self.state = match opponent.all_ships_sunk() {\n\n true => GameState::Complete,\n\n false => GameState::Active,\n\n };\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Returns an unchecked position on the inactive player's grid as a check suggestion.\n\n ///\n\n /// This is intended for use in cases where the active player is computer-controlled, to\n\n /// determine the space they check. However, it could also be used to suggest a space that a\n\n /// human player could check.\n", "file_path": "src/game.rs", "rank": 52, "score": 21.7063284981092 }, { "content": " /// Returns an error if the game's state is not `GameState::Placement` or if the active\n\n /// player's placement ship overlaps with another ship.\n\n pub fn place_ship(&mut self) -> Result<(), &'static str> {\n\n if self.state != GameState::Placement {\n\n Err(\"tried to place ship outside of placement game state\")\n\n } else {\n\n let ref mut player = self.players[self.turn as usize];\n\n let ship_count = player.ships().len();\n\n\n\n player.place_placement_ship()?;\n\n\n\n // If the player hasn't placed all their ships, add a new one.\n\n if ship_count < self.settings.ships.len() {\n\n player.add_ship(\n\n [0, 0],\n\n Direction::West,\n\n self.settings.ships[ship_count],\n\n true,\n\n )?;\n\n }\n", "file_path": "src/game.rs", "rank": 53, "score": 18.68165469957711 }, { "content": " ///\n\n /// Returns an error if no space exists at `pos`.\n\n pub fn set_grid_cursor(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> {\n\n self.players[self.turn as usize].set_grid_cursor(pos)\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq)]\n\npub enum GameState {\n\n Placement,\n\n Active,\n\n Complete,\n\n}\n", "file_path": "src/game.rs", "rank": 54, "score": 17.601051594530066 }, { "content": " for player in &mut players {\n\n if !player.is_cpu() {\n\n player.add_ship([0, 0], Direction::West, settings.ships[0], true)?;\n\n } else {\n\n let mut rng = thread_rng();\n\n let mut i = 0;\n\n\n\n while i < settings.ships.len() {\n\n let pos = [\n\n rng.gen_range(0, grid_size[0]),\n\n rng.gen_range(0, grid_size[1]),\n\n ];\n\n\n\n if player\n\n .add_ship(pos, Direction::random(), settings.ships[i], false)\n\n .is_ok()\n\n {\n\n i += 1;\n\n }\n\n }\n", "file_path": "src/game.rs", "rank": 55, "score": 16.71369642770154 }, { "content": "\n\n /// Returns as `usize` the index of the currently inactive player.\n\n pub fn not_turn(&self) -> usize {\n\n (self.turn + 1) as usize % 2\n\n }\n\n\n\n /// Sets the inactive player as active.\n\n pub fn switch_active_player(&mut self) {\n\n self.turn = self.not_turn() as u8;\n\n }\n\n\n\n /// Returns whether the active player has placed all their ships.\n\n pub fn active_player_placed_all_ships(&self) -> bool {\n\n let ships = self.active_player().ships();\n\n\n\n ships.len() == self.settings.ships.len() && !ships[ships.len() - 1].is_placement()\n\n }\n\n\n\n /// Returns whether a human player is currently placing ships.\n\n pub fn is_player_placing_ship(&self) -> bool {\n", "file_path": "src/game.rs", "rank": 56, "score": 16.553139880327013 }, { "content": " pub fn set_state_active(&mut self) -> Result<(), &'static str> {\n\n if self.state != GameState::Placement {\n\n Err(\"tried to set game as active from a state other than placement\")\n\n } else {\n\n self.state = GameState::Active;\n\n self.turn = 0;\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Returns whether the game's current state is complete.\n\n pub fn is_state_complete(&self) -> bool {\n\n self.state == GameState::Complete\n\n }\n\n\n\n /// Returns as `usize` the index of the currently active player.\n\n pub fn turn(&self) -> usize {\n\n self.turn as usize\n\n }\n", "file_path": "src/game.rs", "rank": 57, "score": 16.37900816559636 }, { "content": "pub struct AppSettings {\n\n pub space_size: u32,\n\n}\n\n\n\npub struct GameSettings {\n\n pub spaces: [u8; 2],\n\n pub ships: Vec<u8>,\n\n}\n\n\n\nimpl GameSettings {\n\n pub fn defaults() -> GameSettings {\n\n GameSettings {\n\n spaces: [10, 10],\n\n ships: vec![2, 3, 4, 5],\n\n }\n\n }\n\n}\n", "file_path": "src/settings.rs", "rank": 58, "score": 16.198015269998038 }, { "content": " /// Performs grid movement according to the current program state.\n\n fn movement(&mut self, direction: Direction) {\n\n if self.game.is_player_placing_ship() && self.game.move_ship(direction).is_err() {\n\n // TODO: more specific error checking.\n\n }\n\n\n\n if self.game.is_player_selecting_space()\n\n && self.turn_active\n\n && self.game.move_grid_cursor(direction).is_err()\n\n {\n\n // TODO: might be good to have some visual effect.\n\n }\n\n }\n\n\n\n /// Records the last known mouse cursor position.\n\n fn mouse_cursor_movement(&mut self, c: &[f64; 2]) {\n\n self.mouse_cursor = *c;\n\n\n\n if let Some(grid_pos) = self.mouse_cursor_grid_position() {\n\n if self.game.is_state_placement() {\n", "file_path": "src/app.rs", "rank": 59, "score": 16.13275711078359 }, { "content": " self.state == GameState::Placement && !self.active_player().is_cpu()\n\n }\n\n\n\n /// Returns whether a human player is currently selecting a space.\n\n pub fn is_player_selecting_space(&self) -> bool {\n\n self.state == GameState::Active && !self.active_player().is_cpu()\n\n }\n\n\n\n /// Returns as `usize` the winner, if there is one.\n\n pub fn get_winner(&self) -> Option<usize> {\n\n match self.state {\n\n GameState::Complete => Some(self.turn as usize),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Places the active player's placement ship.\n\n ///\n\n /// # Errors\n\n ///\n", "file_path": "src/game.rs", "rank": 60, "score": 15.735220869183065 }, { "content": " .select_space(&cpu_space)\n\n .expect(\"CPU player tried to select a checked space\");\n\n self.cpu_turn_timer = 0.0;\n\n self.turn_active = false;\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn primary_action(&mut self, grid_pos: &[u8; 2]) {\n\n if self.game.is_player_placing_ship() && self.game.place_ship().is_err() {\n\n // TODO: more specific error checking.\n\n // For now, just assume it's the overlap error and ignore it.\n\n }\n\n\n\n if self.game.is_player_selecting_space() && self.game.select_space(grid_pos).is_ok() {\n\n self.turn_active = false;\n\n }\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 61, "score": 14.899859528945605 }, { "content": "use crate::direction::Direction;\n\nuse crate::game::Game;\n\nuse crate::settings::{AppSettings, GameSettings};\n\nuse piston_window::*;\n\nuse std::{env::current_exe, path::PathBuf};\n\n\n\npub struct App<'a> {\n\n window: PistonWindow,\n\n settings: &'a AppSettings,\n\n game: Game,\n\n turn_active: bool,\n\n turn_end_timer: f64,\n\n cpu_turn_timer: f64,\n\n mouse_cursor: [f64; 2],\n\n grid_area: [u32; 4],\n\n}\n\n\n\nimpl<'a> App<'a> {\n\n pub fn new(settings: &AppSettings) -> App {\n\n let game_settings = GameSettings::defaults();\n", "file_path": "src/app.rs", "rank": 62, "score": 11.950414152197194 }, { "content": "mod app;\n\nmod direction;\n\nmod game;\n\nmod player;\n\nmod settings;\n\nmod ship;\n\nmod space;\n\n\n", "file_path": "src/main.rs", "rank": 63, "score": 11.6835411980259 }, { "content": "rust-battleship\n\n===============\n\n\n\nA simple Battleship game written in Rust, using the Piston game engine.\n\n\n\nIt is currently a one-player vs CPU opponent game.\n\n\n\nKeyboard controls:\n\n\n\n| Key | Ship Placement | Game |\n\n| ------ | -------------- | ---------------- |\n\n| Arrows | Move ship | Move grid cursor |\n\n| Enter | Place ship | Select space |\n\n| Space | Rotate ship | n/a |\n\n\n\nMouse controls:\n\n\n\n| Button | Ship Placement | Game |\n\n| ------ | -------------- | ------------ |\n\n| Left | Place ship | Select space |\n\n| Right | Rotate ship | n/a |\n\n\n", "file_path": "README.md", "rank": 64, "score": 11.049428242435916 }, { "content": " // Grid spaces\n\n for space in shown_player.spaces() {\n\n let space_pos = space.pos();\n\n let transform = c.transform.trans(\n\n (space_size_u32 * space_pos[0] as u32 + grid_area[0]) as f64,\n\n (space_size_u32 * space_pos[1] as u32 + grid_area[1]) as f64,\n\n );\n\n\n\n // Only show ship locations during ship placement or if the\n\n // current player is computer-controlled.\n\n if shown_player.ship_is_in_space(space_pos)\n\n && (game_state_placement\n\n || (space.is_unchecked() && current_player.is_cpu()))\n\n {\n\n image(&space_textures[3], transform, g);\n\n } else {\n\n let space_state = if space.is_unchecked() {\n\n 0\n\n } else if space.is_empty() {\n\n 1\n", "file_path": "src/app.rs", "rank": 65, "score": 10.52805666401813 }, { "content": " } else {\n\n 2\n\n };\n\n image(&space_textures[space_state], transform, g);\n\n }\n\n }\n\n\n\n // During ship placement, show the temporary position of the\n\n // next ship to be placed.\n\n if game_state_placement {\n\n if let Ok(ship) = shown_player.placement_ship() {\n\n for pos in ship.pos() {\n\n let transform = c.transform.trans(\n\n (space_size_u32 * pos[0] as u32 + grid_area[0]) as f64,\n\n (space_size_u32 * pos[1] as u32 + grid_area[1]) as f64,\n\n );\n\n image(&space_textures[3], transform, g);\n\n }\n\n }\n\n }\n", "file_path": "src/app.rs", "rank": 66, "score": 10.440119299604373 }, { "content": " }\n\n }\n\n\n\n Ok(Game {\n\n settings: settings,\n\n players: players,\n\n state: GameState::Placement,\n\n turn: 0,\n\n })\n\n }\n\n\n\n pub fn settings(&self) -> &GameSettings {\n\n &self.settings\n\n }\n\n\n\n /// Returns a reference to the currently active player.\n\n pub fn active_player(&self) -> &Player {\n\n &self.players[self.turn as usize]\n\n }\n\n\n", "file_path": "src/game.rs", "rank": 67, "score": 9.967155236879739 }, { "content": " window: window,\n\n settings: &settings,\n\n game: Game::new(game_settings).unwrap(),\n\n turn_active: true,\n\n turn_end_timer: 0.0,\n\n cpu_turn_timer: 0.0,\n\n mouse_cursor: [0.0; 2],\n\n grid_area: grid_area,\n\n }\n\n }\n\n\n\n pub fn init(&mut self) {\n\n self.window.set_ups(60);\n\n self.window.set_max_fps(60);\n\n\n\n // TODO set textures in a not terrible way\n\n let assets_dir = Self::get_assets_dir(current_exe().unwrap()).unwrap();\n\n let images_dir: PathBuf = assets_dir.join(\"images\");\n\n let mut space_textures = vec![];\n\n\n", "file_path": "src/app.rs", "rank": 68, "score": 9.21191853846474 }, { "content": " for state in 0..3 {\n\n let image_file = format!(\"gridspace-{}.png\", state);\n\n space_textures.push(self.get_texture(images_dir.join(&image_file)));\n\n }\n\n\n\n space_textures.push(self.get_texture(images_dir.join(\"shipspace.png\")));\n\n\n\n let grid_cursor_texture = self.get_texture(images_dir.join(\"grid-cursor.png\"));\n\n\n\n let mut ship_textures = vec![];\n\n for ship_size in 2..6 {\n\n let image_file = format!(\"ship-{}.png\", ship_size);\n\n ship_textures.push(self.get_texture(images_dir.join(&image_file)));\n\n }\n\n\n\n let player_text = [\n\n self.get_texture(images_dir.join(\"player-1.png\")),\n\n self.get_texture(images_dir.join(\"player-2.png\")),\n\n ];\n\n\n", "file_path": "src/app.rs", "rank": 69, "score": 9.060355746037573 }, { "content": " /// Returns a reference to the currently inactive player.\n\n pub fn inactive_player(&self) -> &Player {\n\n &self.players[self.not_turn()]\n\n }\n\n\n\n /// Returns whether the game's current state is ship placement.\n\n pub fn is_state_placement(&self) -> bool {\n\n self.state == GameState::Placement\n\n }\n\n\n\n /// Returns whether the game's current state is active.\n\n pub fn is_state_active(&self) -> bool {\n\n self.state == GameState::Active\n\n }\n\n\n\n /// Sets the game state as active. starting the game and setting player 1 as the active player.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the game state was not `GameState::Placement`.\n", "file_path": "src/game.rs", "rank": 70, "score": 8.970382455365879 }, { "content": " && self.mouse_cursor[1] >= self.grid_area[1] as f64\n\n && self.mouse_cursor[0] < (self.grid_area[0] + self.grid_area[2]) as f64\n\n && self.mouse_cursor[1] < (self.grid_area[1] + self.grid_area[3]) as f64\n\n }\n\n\n\n /// Returns the texture from the file at the given path.\n\n fn get_texture(&mut self, path: PathBuf) -> G2dTexture {\n\n Texture::from_path(\n\n &mut self.window.create_texture_context(),\n\n path,\n\n Flip::None,\n\n &TextureSettings::new(),\n\n )\n\n .unwrap()\n\n }\n\n\n\n /// Returns the assets directory, if it could be found.\n\n fn get_assets_dir(mut dir: PathBuf) -> Result<PathBuf, &'static str> {\n\n let mut result = None;\n\n\n", "file_path": "src/app.rs", "rank": 71, "score": 8.944033341754572 }, { "content": " let player = self.game.active_player();\n\n let ship_dir = player\n\n .placement_ship()\n\n .expect(\"failed to get player's placement ship\")\n\n .dir();\n\n // Subtract 1 from the ship count to not consider the placement ship itself.\n\n let ship_count = player.ships().len() - 1;\n\n let ship_len = self.game.settings().ships[ship_count];\n\n\n\n if let Some(ship) = player.get_ship_position(grid_pos, ship_dir, ship_len) {\n\n // `set_pos()` will return an error if the position was invalid.\n\n self.game\n\n .set_placement_ship(ship)\n\n .expect(\"tried to set placement ship to invalid position\");\n\n }\n\n } else if self.game.is_state_active()\n\n && !self.game.active_player().is_cpu()\n\n && self.game.set_grid_cursor(&grid_pos).is_err()\n\n {\n\n // TODO: might be good to have some visual effect.\n", "file_path": "src/app.rs", "rank": 72, "score": 8.34980046093925 }, { "content": " /// Processes left button presses according to the current program state.\n\n fn button_left(&mut self) {\n\n self.movement(Direction::West);\n\n }\n\n\n\n /// Processes right button presses according to the current program state.\n\n fn button_right(&mut self) {\n\n self.movement(Direction::East);\n\n }\n\n\n\n /// Processes up button presses according to the current program state.\n\n fn button_up(&mut self) {\n\n self.movement(Direction::North);\n\n }\n\n\n\n /// Processes down button presses according to the current program state.\n\n fn button_down(&mut self) {\n\n self.movement(Direction::South);\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 73, "score": 8.31434366277366 }, { "content": " /// Processes primary button presses according to the current program state.\n\n fn button_primary(&mut self) {\n\n let grid_pos = self.game.active_player().grid_cursor().clone();\n\n self.primary_action(&grid_pos);\n\n }\n\n\n\n /// Processes secondary button presses according to the current program state.\n\n fn button_secondary(&mut self) {\n\n if self.game.is_player_placing_ship() {\n\n self.game.rotate_ship().expect(\"failed to rotate ship\");\n\n }\n\n }\n\n\n\n /// Processes left mouse clicks according to the current program state.\n\n fn mouse_left_click(&mut self) {\n\n if let Some(grid_pos) = self.mouse_cursor_grid_position() {\n\n self.primary_action(&grid_pos);\n\n }\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 74, "score": 7.776238692157997 }, { "content": " let window_size = self.window.size();\n\n let turn_end_timer = self.turn_end_timer;\n\n let game_winner = self.game.get_winner();\n\n let game_turn = self.game.turn();\n\n let turn_active = self.turn_active;\n\n\n\n self.window.draw_2d(&e, |c, g, _| {\n\n clear([0.6, 0.6, 1.0, 1.0], g);\n\n\n\n // Ship icons above grid\n\n for (i, ship) in shown_player.ships().iter().enumerate() {\n\n if ship.is_active() {\n\n let transform = c.transform.trans(\n\n (space_size_u32 * 2 * i as u32 + grid_area[0] * 2) as f64,\n\n 30.0 as f64,\n\n );\n\n image(&ship_textures[i], transform, g);\n\n }\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 75, "score": 7.408981097780939 }, { "content": " 22.0,\n\n ),\n\n g,\n\n );\n\n }\n\n });\n\n }\n\n }\n\n }\n\n\n\n fn update(&mut self, u: &UpdateArgs) {\n\n if self.game.is_state_placement() && self.game.active_player_placed_all_ships() {\n\n self.game.switch_active_player();\n\n\n\n if self.game.active_player_placed_all_ships() {\n\n // All ships have been placed; start the game.\n\n // This will also set player 1 as active so no need to switch active player.\n\n self.game\n\n .set_state_active()\n\n .expect(\"failed to start the game\");\n", "file_path": "src/app.rs", "rank": 76, "score": 7.165330613725567 }, { "content": " }\n\n }\n\n }\n\n\n\n /// Returns the grid coordinates of the mouse cursor position.\n\n fn mouse_cursor_grid_position(&self) -> Option<[u8; 2]> {\n\n if self.mouse_over_grid() {\n\n let grid_area_f64 = [self.grid_area[0] as f64, self.grid_area[1] as f64];\n\n\n\n Some([\n\n ((self.mouse_cursor[0] - grid_area_f64[0]) / grid_area_f64[0]) as u8,\n\n ((self.mouse_cursor[1] - grid_area_f64[1]) / grid_area_f64[0]) as u8,\n\n ])\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn mouse_over_grid(&self) -> bool {\n\n self.mouse_cursor[0] >= self.grid_area[0] as f64\n", "file_path": "src/app.rs", "rank": 77, "score": 6.571865395669343 }, { "content": " if let Some(c) = e.mouse_cursor_args() {\n\n self.mouse_cursor_movement(&c);\n\n }\n\n\n\n if let Some(u) = e.update_args() {\n\n self.update(&u);\n\n }\n\n\n\n if e.render_args().is_some() {\n\n let current_player = self.game.active_player();\n\n let game_state_placement = self.game.is_state_placement();\n\n let game_state_active = self.game.is_state_active();\n\n let game_state_complete = self.game.is_state_complete();\n\n let shown_player = match game_state_placement {\n\n true => current_player,\n\n false => self.game.inactive_player(),\n\n };\n\n\n\n let space_size_u32 = self.settings.space_size as u32;\n\n let grid_area = self.grid_area;\n", "file_path": "src/app.rs", "rank": 78, "score": 6.47924911482947 }, { "content": " }\n\n } else {\n\n if !self.turn_active {\n\n // Continue/end the end-of-turn delay.\n\n if self.turn_end_timer < 1.5 {\n\n self.turn_end_timer += u.dt;\n\n } else if self.game.is_state_active() {\n\n self.game.switch_active_player();\n\n self.turn_end_timer = 0.0;\n\n self.turn_active = true;\n\n }\n\n }\n\n\n\n // Continue/end the delay when CPU players take their turn.\n\n if self.turn_active && self.game.active_player().is_cpu() {\n\n self.cpu_turn_timer += u.dt;\n\n\n\n if self.cpu_turn_timer >= 1.0 {\n\n let cpu_space = self.game.suggested_check();\n\n self.game\n", "file_path": "src/app.rs", "rank": 79, "score": 5.736016327620661 }, { "content": " while dir.pop() {\n\n if dir.join(\"assets\").exists() {\n\n result = Some(dir.join(\"assets\"));\n\n break;\n\n }\n\n }\n\n\n\n result.ok_or(\"could not find assets directory\")\n\n }\n\n}\n", "file_path": "src/app.rs", "rank": 80, "score": 5.35185365236402 }, { "content": "\n\n // During the game, show the player's grid cursor.\n\n if game_state_active && turn_end_timer == 0.0 && !current_player.is_cpu() {\n\n let grid_cursor = current_player.grid_cursor();\n\n let transform = c.transform.trans(\n\n (space_size_u32 * grid_cursor[0] as u32 + grid_area[0]) as f64,\n\n (space_size_u32 * grid_cursor[1] as u32 + grid_area[1]) as f64,\n\n );\n\n image(&grid_cursor_texture, transform, g);\n\n }\n\n\n\n // Current player text image\n\n if game_winner.is_none() {\n\n let turn = game_turn;\n\n let player_text_size = player_text[turn].get_size();\n\n let transform = c\n\n .transform\n\n .trans((window_size.width - player_text_size.0 as f64) / 2.0, 2.0);\n\n image(&player_text[turn], transform, g);\n\n }\n", "file_path": "src/app.rs", "rank": 81, "score": 4.825922094710468 }, { "content": " let grid_area = [\n\n settings.space_size,\n\n settings.space_size * 3,\n\n game_settings.spaces[0] as u32 * settings.space_size,\n\n game_settings.spaces[1] as u32 * settings.space_size,\n\n ];\n\n\n\n let window_size = [\n\n grid_area[2] + settings.space_size * 2,\n\n grid_area[3] + settings.space_size * 4,\n\n ];\n\n\n\n let window_title = \"Battleship\";\n\n let window: PistonWindow = WindowSettings::new(window_title, window_size)\n\n .exit_on_esc(true)\n\n .resizable(false)\n\n .build()\n\n .unwrap();\n\n\n\n App {\n", "file_path": "src/app.rs", "rank": 82, "score": 4.8036668173390655 }, { "content": " let wins_text_size = game_over_text[1].get_size();\n\n let player_text_size = player_text[winner].get_size();\n\n image(\n\n &game_over_text[0],\n\n c.transform\n\n .trans((window_size.width - game_over_text_size.0 as f64) / 2.0, 2.0),\n\n g,\n\n );\n\n image(\n\n &player_text[winner],\n\n c.transform.trans(\n\n (window_size.width - (player_text_size.0 + wins_text_size.0 + 2) as f64) / 2.0,\n\n 22.0,\n\n ),\n\n g,\n\n );\n\n image(\n\n &game_over_text[1],\n\n c.transform.trans(\n\n (window_size.width + (player_text_size.0 - wins_text_size.0 + 2) as f64) / 2.0,\n", "file_path": "src/app.rs", "rank": 83, "score": 2.863751775694136 }, { "content": "\n\n // During turn transitions / game over, cover the window with\n\n // a black rectangle of increasing opacity.\n\n if !turn_active && turn_end_timer >= 0.75 {\n\n let alpha = match game_state_complete {\n\n true => (turn_end_timer as f32 - 0.75) / 1.125,\n\n false => (turn_end_timer as f32 - 0.75) / 0.75,\n\n };\n\n rectangle(\n\n [0.0, 0.0, 0.0, alpha],\n\n [0.0, 0.0, window_size.width, window_size.height],\n\n c.transform,\n\n g,\n\n );\n\n }\n\n\n\n // Game over content, to appear over the black rectangle.\n\n if turn_end_timer >= 1.5 && game_winner.is_some() {\n\n let winner = game_winner.unwrap();\n\n let game_over_text_size = game_over_text[0].get_size();\n", "file_path": "src/app.rs", "rank": 84, "score": 1.5309636290742734 }, { "content": " let game_over_text = [\n\n self.get_texture(images_dir.join(\"game-over.png\")),\n\n self.get_texture(images_dir.join(\"wins.png\")),\n\n ];\n\n\n\n while let Some(e) = self.window.next() {\n\n if let Some(p) = e.press_args() {\n\n match p {\n\n Button::Mouse(mouse::MouseButton::Left) => self.mouse_left_click(),\n\n Button::Mouse(mouse::MouseButton::Right) => self.button_secondary(),\n\n Button::Keyboard(keyboard::Key::Left) => self.button_left(),\n\n Button::Keyboard(keyboard::Key::Right) => self.button_right(),\n\n Button::Keyboard(keyboard::Key::Up) => self.button_up(),\n\n Button::Keyboard(keyboard::Key::Down) => self.button_down(),\n\n Button::Keyboard(keyboard::Key::Return) => self.button_primary(),\n\n Button::Keyboard(keyboard::Key::Space) => self.button_secondary(),\n\n _ => {}\n\n }\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 85, "score": 1.2115992384866332 } ]
Rust
plugins/afl/afl_mutate/src/afl_mutate.rs
elast0ny/CROWDFUZZ
340fd0e9e03e147ebe977d456e8f6052bcf183eb
use std::mem::MaybeUninit; pub use ::afl_lib::*; pub use ::cflib::*; mod mutators; pub use mutators::*; mod bit_flip; pub use bit_flip::*; mod arithmetic; pub use arithmetic::*; mod interesting; pub use interesting::*; mod havoc; pub use havoc::*; cflib::register!(name, env!("CARGO_PKG_NAME")); cflib::register!(load, init); cflib::register!(pre_fuzz, validate); cflib::register!(fuzz, mutate_input); cflib::register!(unload, destroy); struct State { force_update: bool, prev_input_idx: usize, stage_name: String, cur_stage: MutatorStage, stat_cur_stage: StatStr, stat_total_iterations: StatNum, stat_stage_progress: StatNum, restore_input: &'static mut bool, no_select: &'static mut bool, no_mutate: &'static bool, inputs: &'static Vec<CfInputInfo>, cur_input_idx: &'static usize, cur_input: &'static mut CfInput, afl: &'static mut AflGlobals, afl_queue: &'static mut AflQueue, } fn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> { #[allow(invalid_value)] let state = Box::new(unsafe { State { force_update: true, prev_input_idx: 0, stage_name: String::new(), cur_stage: MutatorStage::default(), stat_cur_stage: core.new_stat_str("stage", 128, "[init]")?, stat_stage_progress: core.new_stat_num("progress", 0)?, stat_total_iterations: core.new_stat_num("iterations", 0)?, restore_input: store.as_mutref(STORE_RESTORE_INPUT, Some(core))?, no_select: store.as_mutref(STORE_NO_SELECT, Some(core))?, no_mutate: store.as_mutref(STORE_NO_MUTATE, Some(core))?, inputs: MaybeUninit::zeroed().assume_init(), cur_input_idx: MaybeUninit::zeroed().assume_init(), cur_input: MaybeUninit::zeroed().assume_init(), afl: MaybeUninit::zeroed().assume_init(), afl_queue: MaybeUninit::zeroed().assume_init(), } }); Ok(Box::into_raw(state) as _) } fn validate( core: &mut dyn PluginInterface, store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let state = box_ref!(plugin_ctx, State); unsafe { state.inputs = store.as_mutref(STORE_INPUT_LIST, Some(core))?; state.cur_input_idx = store.as_ref(STORE_INPUT_IDX, Some(core))?; state.cur_input = store.as_mutref(STORE_INPUT_BYTES, Some(core))?; match store.as_mutref(STORE_AFL_GLOBALS, None) { Ok(v) => state.afl = v, Err(e) => { core.warn("Missing AFL globals ! Is the `afl_state` plugin running ?"); return Err(e); } }; state.afl_queue = store.as_mutref(STORE_AFL_QUEUE, Some(core))?; } Ok(()) } fn mutate_input( _core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let s = box_ref!(plugin_ctx, State); if *s.no_mutate { if !s.force_update { s.stat_cur_stage.set("None"); *s.stat_stage_progress.val = 0; *s.stat_total_iterations.val = 0; s.force_update = true; } return Ok(()); } let stage = &mut s.cur_stage; let input = &mut s.cur_input; let afl = &mut s.afl; let q = unsafe { s.afl_queue.get_unchecked_mut(*s.cur_input_idx) }; if s.force_update || s.prev_input_idx != *s.cur_input_idx { stage.sync_to_input(q, afl, input); s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), Some(s.stat_total_iterations.val)); s.stat_cur_stage.set(&s.stage_name); *s.stat_stage_progress.val = 0; s.prev_input_idx = *s.cur_input_idx; s.force_update = false; } loop { match stage.mutate(input) { StageResult::WillRestoreInput => { *s.no_select = true; *s.restore_input = false; } StageResult::CantRestoreInput => { *s.restore_input = true; *s.no_select = false; } StageResult::Update => { s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), None); s.stat_cur_stage.set(&s.stage_name); continue; } StageResult::Done => { if stage.next(q, afl, input) { s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), Some(s.stat_total_iterations.val)); s.stat_cur_stage.set(&s.stage_name); *s.stat_stage_progress.val = 0; continue; } q.passed_det = true; *s.restore_input = false; *s.no_select = false; } }; break; } *s.stat_stage_progress.val += 1; Ok(()) } fn destroy( _core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let _state = box_take!(plugin_ctx, State); Ok(()) }
use std::mem::MaybeUninit; pub use ::afl_lib::*; pub use ::cflib::*; mod mutators; pub use mutators::*; mod bit_flip; pub use bit_flip::*; mod arithmetic; pub use arithmetic::*; mod interesting; pub use interesting::*; mod havoc; pub use havoc::*; cflib::register!(name, env!("CARGO_PKG_NAME")); cflib::register!(load, init); cflib::register!(pre_fuzz, validate); cflib::register!(fuzz, mutate_input); cflib::register!(unload, destroy); struct State { force_update: bool, prev_input_idx: usize, stage_name: String, cur_stage: MutatorStage, stat_cur_stage: StatStr, stat_total_iterations: StatNum, stat_stage_progress: StatNum, restore_input: &'static mut bool, no_select: &'static mut bool, no_mutate: &'static bool, inputs: &'static Vec<CfInputInfo>, cur_input_idx: &'static usize, cur_input: &'static mut CfInput, afl: &'static mut AflGlobals, afl_queue: &'static mut AflQueue, } fn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> { #[allow(invalid_value)] let state = Box::new(unsafe { State { force_update: true, prev_input_idx: 0, stage_name: String::new(), cur_stage: MutatorStage::default(), stat_cur_stage: core.new_stat_str("stage", 128, "[init]")?, stat_stage_progress: core.new_stat_num("progress", 0)?, stat_total_iterations: core.new_stat_num("iterations", 0)?, restore_input: store.as_mutref(STORE_RESTORE_INPUT, Some(core))?, no_select: store.as_mutref(STORE_NO_SELECT, Some(core))?, no_mutate: store.as_mutref(STORE_NO_MUTATE, Some(core))?, inputs: MaybeUninit::zeroed().assume_init(), cur_input_idx: MaybeUninit::zeroed().assume_init(), cur_input: MaybeUninit::zeroed().assume_init(),
fn validate( core: &mut dyn PluginInterface, store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let state = box_ref!(plugin_ctx, State); unsafe { state.inputs = store.as_mutref(STORE_INPUT_LIST, Some(core))?; state.cur_input_idx = store.as_ref(STORE_INPUT_IDX, Some(core))?; state.cur_input = store.as_mutref(STORE_INPUT_BYTES, Some(core))?; match store.as_mutref(STORE_AFL_GLOBALS, None) { Ok(v) => state.afl = v, Err(e) => { core.warn("Missing AFL globals ! Is the `afl_state` plugin running ?"); return Err(e); } }; state.afl_queue = store.as_mutref(STORE_AFL_QUEUE, Some(core))?; } Ok(()) } fn mutate_input( _core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let s = box_ref!(plugin_ctx, State); if *s.no_mutate { if !s.force_update { s.stat_cur_stage.set("None"); *s.stat_stage_progress.val = 0; *s.stat_total_iterations.val = 0; s.force_update = true; } return Ok(()); } let stage = &mut s.cur_stage; let input = &mut s.cur_input; let afl = &mut s.afl; let q = unsafe { s.afl_queue.get_unchecked_mut(*s.cur_input_idx) }; if s.force_update || s.prev_input_idx != *s.cur_input_idx { stage.sync_to_input(q, afl, input); s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), Some(s.stat_total_iterations.val)); s.stat_cur_stage.set(&s.stage_name); *s.stat_stage_progress.val = 0; s.prev_input_idx = *s.cur_input_idx; s.force_update = false; } loop { match stage.mutate(input) { StageResult::WillRestoreInput => { *s.no_select = true; *s.restore_input = false; } StageResult::CantRestoreInput => { *s.restore_input = true; *s.no_select = false; } StageResult::Update => { s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), None); s.stat_cur_stage.set(&s.stage_name); continue; } StageResult::Done => { if stage.next(q, afl, input) { s.stage_name.clear(); stage.update_state(input, Some(&mut s.stage_name), Some(s.stat_total_iterations.val)); s.stat_cur_stage.set(&s.stage_name); *s.stat_stage_progress.val = 0; continue; } q.passed_det = true; *s.restore_input = false; *s.no_select = false; } }; break; } *s.stat_stage_progress.val += 1; Ok(()) } fn destroy( _core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8, ) -> Result<()> { let _state = box_take!(plugin_ctx, State); Ok(()) }
afl: MaybeUninit::zeroed().assume_init(), afl_queue: MaybeUninit::zeroed().assume_init(), } }); Ok(Box::into_raw(state) as _) }
function_block-function_prefix_line
[ { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n #[allow(invalid_value)]\n\n let mut s = Box::new(unsafe {\n\n State {\n\n afl: AflGlobals::default(),\n\n queue: Vec::new(),\n\n is_calibrating: false,\n\n prev_idx: 0,\n\n max_cal: 0,\n\n first_trace: Vec::with_capacity(MAP_SIZE),\n\n tmp: String::new(),\n\n init_testcase_num: 0,\n\n\n\n // Stats\n\n queued_with_cov: core\n\n .new_stat_num(&format!(\"{}queued_with_cov\", TAG_PREFIX_TOTAL), 0)?,\n\n queued_variable: core\n\n .new_stat_num(&format!(\"{}queued_variable\", TAG_PREFIX_TOTAL), 0)?,\n\n // Core store vals\n\n num_execs: store.as_ref(STORE_NUM_EXECS, Some(core))?,\n", "file_path": "plugins/afl/afl_state/src/afl_state.rs", "rank": 0, "score": 394786.70892715466 }, { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n #[allow(invalid_value)]\n\n let s = Box::new(unsafe {\n\n State {\n\n cur_input_idx: 0,\n\n seq_input_idx: 0,\n\n orig_buf: Vec::new(),\n\n cur_input: CfInput::default(),\n\n priority_list: BinaryHeap::new(),\n\n restore_input: false,\n\n num_old_inputs: 0,\n\n // Stats\n\n num_priority_inputs: core\n\n .new_stat_num(&format!(\"{}priority_inputs\", TAG_PREFIX_TOTAL), 0)?,\n\n num_new_inputs: core\n\n .new_stat_num(&format!(\"{}new_inputs\", TAG_PREFIX_TOTAL), 0)?,\n\n // Core store values\n\n no_select: store.as_ref(STORE_NO_SELECT, Some(core))?,\n\n // Plugin store values\n\n input_list: MaybeUninit::zeroed().assume_init(),\n", "file_path": "plugins/select_input/src/select_input.rs", "rank": 2, "score": 359626.60471454397 }, { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n #[allow(invalid_value)]\n\n let s = Box::new(unsafe {\n\n State {\n\n rng: SmallRng::from_rng(&mut ::rand::thread_rng()).unwrap(),\n\n // core store values\n\n no_mutate: store.as_mutref(STORE_NO_MUTATE, Some(core))?,\n\n // Plugin store values\n\n cur_input: MaybeUninit::zeroed().assume_init(),\n\n }\n\n });\n\n\n\n Ok(Box::into_raw(s) as _)\n\n}\n\n\n", "file_path": "plugins/basic_mutate/src/basic_mutate.rs", "rank": 3, "score": 358803.13626551116 }, { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n\n\n #[allow(invalid_value)]\n\n let mut s = Box::new(unsafe {\n\n State {\n\n // Plugin store vals\n\n afl: MaybeUninit::zeroed().assume_init(),\n\n ctx: MaybeUninit::zeroed().assume_init(),\n\n\n\n exit_status: TargetExitStatus::Normal(0),\n\n input_file: None,\n\n exec_time: 0,\n\n target_input_path: None,\n\n target_working_dir: None,\n\n target_timeout_ms: None,\n\n target_args: Vec::new(),\n\n \n\n // Stats\n\n avg_exec_time: core.new_stat_num(STAT_TARGET_EXEC_TIME, 0)?,\n\n // Core store values\n", "file_path": "plugins/afl/afl_fork_server/src/afl_fork_server.rs", "rank": 4, "score": 352295.8103377508 }, { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n #[allow(invalid_value)]\n\n let mut s = Box::new(unsafe {\n\n State {\n\n hasher: Sha1::new(),\n\n unique_files: HashSet::new(),\n\n tmp_uid: [0; 20],\n\n tmp_str: String::new(),\n\n tmp_buf: Vec::new(),\n\n queue_dir: PathBuf::new(),\n\n is_input_list_owner: false,\n\n is_new_inputs_owner: false,\n\n num_inputs: MaybeUninit::zeroed().assume_init(),\n\n owned_input_list: Vec::new(),\n\n owned_new_inputs: Vec::new(),\n\n input_list: MaybeUninit::zeroed().assume_init(),\n\n new_inputs: MaybeUninit::zeroed().assume_init(),\n\n stat_queue_dir: MaybeUninit::zeroed().assume_init(),\n\n }\n\n });\n", "file_path": "plugins/fs_store/src/fs_store.rs", "rank": 5, "score": 331833.01188516186 }, { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n // Make sure target_bin points to a file\n\n let target_bin_path: &String = unsafe { store.as_ref(STORE_TARGET_BIN, Some(core))? };\n\n if !Path::new(target_bin_path).is_file() {\n\n core.error(&format!(\n\n \"Failed to find target binary '{}'\",\n\n target_bin_path\n\n ));\n\n return Err(From::from(\"Invalid target binary path\".to_string()));\n\n }\n\n\n\n #[allow(invalid_value)]\n\n let mut s = Box::new(unsafe {\n\n State {\n\n exit_status: TargetExitStatus::Normal(0),\n\n input_file: None,\n\n exec_time: 0,\n\n target_input_path: None,\n\n target_working_dir: None,\n\n target_timeout_ms: None,\n", "file_path": "plugins/run_target/src/run_target.rs", "rank": 6, "score": 322997.9397365481 }, { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n #[allow(invalid_value)]\n\n let mut s = Box::new(unsafe {\n\n State {\n\n hasher: Sha1::new(),\n\n tmp_uid: [0; 20],\n\n tmp_str: String::with_capacity(40),\n\n crash_dir: PathBuf::new(),\n\n timeout_dir: PathBuf::new(),\n\n\n\n // Stats\n\n num_crashes: MaybeUninit::zeroed().assume_init(),\n\n num_timeouts: MaybeUninit::zeroed().assume_init(),\n\n stat_crash_dir: MaybeUninit::zeroed().assume_init(),\n\n stat_timeout_dir: MaybeUninit::zeroed().assume_init(),\n\n\n\n // Plugin store values\n\n exit_status: MaybeUninit::zeroed().assume_init(),\n\n cur_input: MaybeUninit::zeroed().assume_init(),\n\n input_list: MaybeUninit::zeroed().assume_init(),\n", "file_path": "plugins/save_result/src/save_result.rs", "rank": 7, "score": 322997.93973654805 }, { "content": "// Initialize our plugin\n\nfn init(core: &mut dyn PluginInterface, store: &mut CfStore) -> Result<*mut u8> {\n\n let state = Box::new(State {\n\n // Create number that lives in the stats memory\n\n num_execs: core.new_stat_num(&format!(\"{}num_execs\", TAG_PREFIX_TOTAL), 0)?,\n\n // Get refence to store value owned by the core\n\n fuzzer_name: unsafe { store.as_ref(STORE_FUZZER_NAME, Some(core)) }?,\n\n });\n\n\n\n core.info(&format!(\"Hello {} !\", state.fuzzer_name));\n\n\n\n Ok(Box::into_raw(state) as _)\n\n}\n\n\n", "file_path": "plugins/test_plugin/src/test_plugin.rs", "rank": 8, "score": 322997.93973654805 }, { "content": "pub fn could_be_arith(mut old_val: u32, mut new_val: u32, blen: u8) -> bool {\n\n let mut ov: u8 = 0;\n\n let mut nv: u8 = 0;\n\n let mut diffs: usize = 0;\n\n\n\n if old_val == new_val {\n\n return true;\n\n }\n\n /* See if one-byte adjustments to any byte could produce this result. */\n\n for i in 0..blen {\n\n let a = old_val >> (8 * i);\n\n let b = new_val >> (8 * i);\n\n\n\n if a != b {\n\n diffs += 1;\n\n ov = a as _;\n\n nv = b as _;\n\n }\n\n }\n\n /* If only one byte differs and the values are within range, return 1. */\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 9, "score": 321522.54747760145 }, { "content": "fn get_valid_ptr(store: &CfStore, key: &str) -> Result<*mut u8> {\n\n if let Some(v) = store.get(key) {\n\n if v.is_null() {\n\n Err(From::from(\"Store pointer is null\".to_string()))\n\n } else {\n\n Ok(*v)\n\n }\n\n } else {\n\n Err(From::from(\"Store key is missing\".to_string()))\n\n }\n\n}\n\n\n\nimpl CfStoreUtil for CfStore {\n\n fn insert_exclusive<T>(\n\n &mut self,\n\n key: &str,\n\n val: &T,\n\n core: Option<&mut dyn PluginInterface>,\n\n ) -> Result<()> {\n\n if self.get(key).is_some() {\n", "file_path": "cflib/src/store.rs", "rank": 10, "score": 289087.8708408508 }, { "content": "pub fn could_be_interest(old_val: u32, new_val: u32, blen: u8, check_le: bool) -> bool {\n\n if old_val == new_val {\n\n return true;\n\n }\n\n /* See if one-byte insertions from interesting_8 over old_val could\n\n produce new_val. */\n\n for i in 0..blen {\n\n for j in 0..INTERESTING_8.len() {\n\n let tval = unsafe {\n\n (old_val & !(0xff << (i * 8)))\n\n | ((*INTERESTING_8.get_unchecked(j) as u32) << (i * 8))\n\n };\n\n\n\n if new_val == tval {\n\n return true;\n\n }\n\n }\n\n }\n\n /* Bail out unless we're also asked to examine two-byte LE insertions\n\n as a preparation for BE attempts. */\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 11, "score": 283549.4216640079 }, { "content": "pub fn could_be_bitflip(mut xor_val: u32) -> bool {\n\n let mut sh: u8 = 0;\n\n\n\n if xor_val == 0 {\n\n return true;\n\n }\n\n /* Shift left until first bit set. */\n\n while (xor_val & 1) == 0 {\n\n sh += 1;\n\n xor_val >>= 1;\n\n }\n\n /* 1-, 2-, and 4-bit patterns are OK anywhere. */\n\n if xor_val == 1 || xor_val == 3 || xor_val == 15 {\n\n return true;\n\n }\n\n /* 8-, 16-, and 32-bit patterns are OK only if shift factor is\n\n divisible by 8, since that's the stepover for these ops. */\n\n if sh & 7 != 0 {\n\n return false;\n\n }\n\n if xor_val == 0xff || xor_val == 0xffff || xor_val == 0xffffffff {\n\n return true;\n\n }\n\n\n\n false\n\n}\n", "file_path": "plugins/afl/afl_mutate/src/bit_flip.rs", "rank": 12, "score": 264061.1443155691 }, { "content": "pub fn choose_block_len(limit: usize, rng: &mut SmallRng) -> usize {\n\n let mut min_value;\n\n let max_value;\n\n\n\n match rng.gen_range(0, 3) {\n\n 0 => {\n\n min_value = 1;\n\n max_value = HAVOC_BLK_SMALL;\n\n }\n\n 1 => {\n\n min_value = HAVOC_BLK_SMALL;\n\n max_value = HAVOC_BLK_MEDIUM;\n\n }\n\n _ => {\n\n if rng.gen_range(0, 10) != 0 {\n\n min_value = HAVOC_BLK_MEDIUM;\n\n max_value = HAVOC_BLK_LARGE;\n\n } else {\n\n min_value = HAVOC_BLK_LARGE;\n\n max_value = HAVOC_BLK_XL;\n\n }\n\n }\n\n }\n\n if min_value as usize >= limit {\n\n min_value = 1;\n\n }\n\n (min_value as usize) + rng.gen_range(0, std::cmp::min(max_value as usize, limit) - (min_value as usize) + 1)\n\n}\n", "file_path": "plugins/afl/afl_mutate/src/havoc.rs", "rank": 13, "score": 262155.76577751804 }, { "content": "pub fn calculate_score(q: &mut AflQueueEntry, afl: &AflGlobals) -> u32 {\n\n let avg_exec_us = (afl.total_cal_us / afl.total_cal_cycles) as usize;\n\n let mut perf_score = 100;\n\n /* Adjust score based on execution speed of this path, compared to the\n\n global average. Multiplier ranges from 0.1x to 3x. Fast inputs are\n\n less expensive to fuzz, so we're giving them more air time. */\n\n\n\n if q.exec_us as f64 * 0.1 > avg_exec_us as _ {\n\n perf_score = 10;\n\n } else if q.exec_us as f64 * 0.25 > avg_exec_us as _ {\n\n perf_score = 25;\n\n } else if q.exec_us as f64 * 0.5 > avg_exec_us as _ {\n\n perf_score = 50;\n\n } else if q.exec_us as f64 * 0.75 > avg_exec_us as _ {\n\n perf_score = 75;\n\n } else if q.exec_us * 4 < avg_exec_us as _ {\n\n perf_score = 300;\n\n } else if q.exec_us * 3 < avg_exec_us as _ {\n\n perf_score = 200;\n\n } else if q.exec_us * 2 < avg_exec_us as _ {\n", "file_path": "plugins/afl/afl_mutate/src/havoc.rs", "rank": 14, "score": 257215.72269800398 }, { "content": "// Perform our task in the fuzzing loop\n\nfn fuzz(_core: &mut dyn PluginInterface, _store: &mut CfStore, plugin_ctx: *mut u8) -> Result<()> {\n\n let ctx = box_ref!(plugin_ctx, State);\n\n\n\n *ctx.num_execs.val += 1;\n\n\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/test_plugin/src/test_plugin.rs", "rank": 15, "score": 248745.6494030144 }, { "content": "pub fn has_new_bits(virgin_map: &mut [u8], trace_bits: &[u8]) -> u8 {\n\n let mut current: *const usize = trace_bits.as_ptr() as _;\n\n let mut virgin: *mut usize = virgin_map.as_mut_ptr() as _;\n\n\n\n #[cfg(target_pointer_width = \"64\")]\n\n let len = virgin_map.len() >> 3;\n\n #[cfg(target_pointer_width = \"32\")]\n\n let len = virgin_map.len() >> 2;\n\n\n\n let mut ret: u8 = 0;\n\n\n\n for _ in 0..len {\n\n unsafe {\n\n if *current != 0 && *current & *virgin != 0 {\n\n if ret < 2 {\n\n let cur = current as *const u8;\n\n let vir = virgin as *mut u8;\n\n\n\n #[cfg(target_pointer_width = \"64\")]\n\n if (*cur != 0 && *vir == 0xff)\n", "file_path": "plugins/afl/afl_lib/src/lib.rs", "rank": 16, "score": 245369.53478690604 }, { "content": "pub fn select_next_plugin(state: &mut State) {\n\n if !state.fuzzers.is_empty() {\n\n let prev = state.ui.selected_plugin;\n\n increment_selected(\n\n &mut state.ui.selected_plugin,\n\n state.fuzzers[0].stats.plugins.len() - 1,\n\n true,\n\n );\n\n if prev != state.ui.selected_plugin {\n\n state.ui.plugins_view.clear();\n\n state.ui.plugin_list.select(Some(state.ui.selected_plugin))\n\n }\n\n }\n\n}\n\n\n", "file_path": "cf_tui/src/ui.rs", "rank": 17, "score": 226853.81433280266 }, { "content": "pub fn select_prev_plugin(state: &mut State) {\n\n if !state.fuzzers.is_empty() {\n\n let prev = state.ui.selected_plugin;\n\n decrement_selected(\n\n &mut state.ui.selected_plugin,\n\n state.fuzzers[0].stats.plugins.len() - 1,\n\n true,\n\n );\n\n if prev != state.ui.selected_plugin {\n\n state.ui.plugins_view.clear();\n\n state.ui.plugin_list.select(Some(state.ui.selected_plugin))\n\n }\n\n }\n\n}\n\n\n", "file_path": "cf_tui/src/ui.rs", "rank": 18, "score": 226853.81433280266 }, { "content": "pub fn decrement_selected(val: &mut usize, max: usize, loop_to_max: bool) {\n\n if *val == 0 {\n\n if loop_to_max {\n\n *val = max - 1;\n\n }\n\n return;\n\n }\n\n *val -= 1;\n\n}\n\n\n", "file_path": "cf_tui/src/ui.rs", "rank": 19, "score": 225480.98698008363 }, { "content": "pub fn increment_selected(val: &mut usize, max: usize, loop_to_zero: bool) {\n\n if *val == max - 1 {\n\n if loop_to_zero {\n\n *val = 0;\n\n }\n\n return;\n\n }\n\n *val += 1;\n\n}\n\n\n", "file_path": "cf_tui/src/ui.rs", "rank": 20, "score": 225480.98698008363 }, { "content": "fn max_idx(width: u8, input_len: usize) -> usize {\n\n if input_len == 0 {\n\n return 0;\n\n }\n\n\n\n if width < 8 {\n\n (input_len * 8) - ((width) - 1) as usize\n\n } else {\n\n let delta = ((width / 8) - 1) as usize;\n\n if delta > input_len {\n\n 0\n\n } else {\n\n input_len - delta\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct BitFlipState {\n\n idx: usize,\n", "file_path": "plugins/afl/afl_mutate/src/bit_flip.rs", "rank": 21, "score": 225366.07371608532 }, { "content": "fn read_file(src: &Path, dst: &mut Vec<u8>) -> bool {\n\n // Open file\n\n let mut fin = match File::open(src) {\n\n Ok(f) => f,\n\n _ => return false,\n\n };\n\n // Read contents\n\n dst.clear();\n\n if fin.read_to_end(dst).is_err() {\n\n return false;\n\n }\n\n true\n\n}\n\n\n", "file_path": "plugins/fs_store/src/helpers.rs", "rank": 22, "score": 222831.4518622184 }, { "content": "pub fn draw<B: Backend>(state: &mut State, f: &mut Frame<B>) {\n\n let size = f.size();\n\n // Split terminal into 3 main parts\n\n let rects = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints(\n\n [\n\n Constraint::Length(3),\n\n Constraint::Length(size.height - 4),\n\n Constraint::Length(1),\n\n ]\n\n .as_ref(),\n\n )\n\n .split(size);\n\n\n\n // Draw main sections\n\n let (header_rect, content_rect, footer_rect) = (rects[0], rects[1], rects[2]);\n\n draw_header(state, f, header_rect);\n\n draw_fuzzer(state, f, content_rect);\n\n draw_footer(state, f, footer_rect);\n\n}\n\n\n", "file_path": "cf_tui/src/ui.rs", "rank": 23, "score": 218358.9499019595 }, { "content": "pub fn run_target(s: &mut crate::State) -> Result<TargetExitStatus> {\n\n Ok(TargetExitStatus::Normal(0))\n\n}", "file_path": "plugins/afl/afl_fork_server/src/windows/server.rs", "rank": 25, "score": 216419.31298513082 }, { "content": "/// Draws the view when a fuzzer is selected\n\npub fn draw_fuzzer<B: Backend>(state: &mut State, f: &mut Frame<B>, area: Rect) {\n\n use std::fmt::Write;\n\n let mut fuzzer_list = Vec::with_capacity(1);\n\n\n\n state.ui.main_title.clear();\n\n\n\n let fuzzer_details = Block::default()\n\n .borders(Borders::ALL)\n\n .title(state.ui.main_title.as_str());\n\n if state.fuzzers.is_empty() {\n\n let content = Paragraph::new(Span::raw(\"<No fuzzers>\"))\n\n .block(fuzzer_details)\n\n .style(Style::default().fg(Color::Red))\n\n .alignment(tui::layout::Alignment::Center);\n\n f.render_widget(content, area);\n\n return;\n\n }\n\n\n\n // Add up the fuzzers for the current view\n\n if state.ui.selected_tab == 0 {\n", "file_path": "cf_tui/src/ui.rs", "rank": 26, "score": 206786.54449289216 }, { "content": "/// Renders selectable tabs\n\npub fn draw_header<B: Backend>(state: &mut State, f: &mut Frame<B>, area: Rect) {\n\n let fuzzer_tab_titles = std::iter::once(\"All\")\n\n .chain(\n\n state\n\n .fuzzers\n\n .iter()\n\n .map(|f| f.stats.plugins[0].name),\n\n )\n\n .collect::<Vec<&str>>()\n\n .drain(..)\n\n .map(Spans::from)\n\n .collect();\n\n\n\n let fuzzer_tabs = Tabs::new(fuzzer_tab_titles)\n\n .block(\n\n Block::default()\n\n .borders(Borders::ALL)\n\n .title(state.ui.tab_title.as_str()),\n\n )\n\n .highlight_style(Style::default().fg(Color::Yellow))\n\n .select(state.ui.selected_tab);\n\n f.render_widget(fuzzer_tabs, area);\n\n}\n\n\n", "file_path": "cf_tui/src/ui.rs", "rank": 27, "score": 206786.54449289216 }, { "content": "pub fn draw_footer<B: Backend>(state: &mut State, f: &mut Frame<B>, area: Rect) {\n\n state.sys_info.refresh_cpu();\n\n state.sys_info.refresh_memory();\n\n let cpu_speed = state\n\n .sys_info\n\n .get_processors()\n\n .iter()\n\n .fold(0f32, |t, c| t + c.get_cpu_usage()) as usize\n\n / state.sys_info.get_processors().len();\n\n let mem_usage =\n\n ((state.sys_info.get_used_memory() * 100) / state.sys_info.get_total_memory()) as usize;\n\n\n\n use std::fmt::Write;\n\n let cpu_str = &mut state.ui.footer_content[1];\n\n cpu_str.clear();\n\n let _ = write!(cpu_str, \"{:02}%\", cpu_speed);\n\n let mem_str = &mut state.ui.footer_content[3];\n\n mem_str.clear();\n\n let _ = write!(mem_str, \"{:02}%\", mem_usage);\n\n\n", "file_path": "cf_tui/src/ui.rs", "rank": 28, "score": 206786.54449289216 }, { "content": "// Unload and free our resources\n\nfn destroy(\n\n _core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let _state = box_take!(plugin_ctx, State);\n\n\n\n store.remove(STORE_AFL_GLOBALS).unwrap();\n\n store.remove(STORE_AFL_QUEUE).unwrap();\n\n\n\n Ok(())\n\n}\n\n\n\nimpl State {\n\n /// Parse config values and sets their equivalent in AflState\n\n pub fn load_conf(&mut self, plugin_conf: &HashMap<String, String>) -> Result<()> {\n\n\n\n if plugin_conf.get(\"afl_skip_deterministic\").is_some() {\n\n self.afl.skip_deterministic = true;\n\n }\n", "file_path": "plugins/afl/afl_state/src/afl_state.rs", "rank": 30, "score": 203074.4213723967 }, { "content": "pub fn is_eq_lowercased(str1: &str, str2: &str) -> bool {\n\n\n\n let mut str1_chars = str1.chars().map(|c| c.to_lowercase().collect());\n\n let mut str2_chars = str2.chars().map(|c| c.to_lowercase().collect());\n\n\n\n loop {\n\n let s1: Option<String> = str1_chars.next();\n\n let s2: Option<String> = str2_chars.next();\n\n\n\n match (s1, s2) {\n\n (None, None) => break,\n\n (Some(s1), Some(s2)) if s1 != s2 => return false,\n\n _ => return false,\n\n }\n\n }\n\n\n\n return true;\n\n}", "file_path": "plugins/afl/afl_fork_server/src/windows/handlers/mod.rs", "rank": 32, "score": 201058.87560065312 }, { "content": "// We currently use Sha1. It might be worth exploring speed differences\n\n// with doing a pre crc32 check first ?\n\nfn compute_uid(hasher: &mut Sha1, buf: &[u8], dst: &mut [u8; 20]) {\n\n hasher.reset();\n\n hasher.input(buf);\n\n hasher.result(dst);\n\n}\n\n\n", "file_path": "plugins/fs_store/src/helpers.rs", "rank": 33, "score": 199433.05799533005 }, { "content": "pub fn ff(b: u8) -> u32 {\n\n 0xff << (b << 3)\n\n}\n\n/* Count the number of bytes set in the bitmap. Called fairly sporadically,\n\nmostly to update the status screen or calibrate and examine confirmed\n\nnew paths. */\n", "file_path": "plugins/afl/afl_lib/src/lib.rs", "rank": 34, "score": 195948.49428429862 }, { "content": "pub fn count_bytes(bitmap: &[u8]) -> u32 {\n\n let mut ptr: *const u32 = bitmap.as_ptr() as *const u32;\n\n let len = bitmap.len() >> 2;\n\n let mut ret: u32 = 0;\n\n\n\n for _ in 0..len {\n\n let v;\n\n unsafe {\n\n v = *ptr;\n\n ptr = ptr.add(1);\n\n };\n\n\n\n if v == 0 {\n\n continue;\n\n }\n\n\n\n for i in 0..4 {\n\n if v & ff(i) > 0 {\n\n ret += 1;\n\n }\n", "file_path": "plugins/afl/afl_lib/src/lib.rs", "rank": 35, "score": 190110.4812013535 }, { "content": "struct State {\n\n afl: AflGlobals,\n\n queue: AflQueue,\n\n max_cal: u8,\n\n is_calibrating: bool,\n\n prev_idx: usize,\n\n first_trace: Vec<u8>,\n\n tmp: String,\n\n init_testcase_num: usize,\n\n\n\n queued_with_cov: StatNum,\n\n queued_variable: StatNum,\n\n\n\n num_execs: &'static u64,\n\n no_select: &'static mut bool,\n\n no_mutate: &'static mut bool,\n\n\n\n inputs: &'static Vec<CfInputInfo>,\n\n input_idx: &'static usize,\n\n input_priority: &'static mut BinaryHeap<InputPriority>,\n\n prev_exec_time_ns: &'static u64,\n\n trace_bits: Option<&'static Vec<u8>>,\n\n}\n\n\n", "file_path": "plugins/afl/afl_state/src/afl_state.rs", "rank": 36, "score": 188150.0665008541 }, { "content": "pub fn rol64(x: u64, r: u8) -> u64 {\n\n (x << r) | (x >> (64 - r))\n\n}\n\n\n", "file_path": "plugins/afl/afl_lib/src/lib.rs", "rank": 37, "score": 187932.1130515176 }, { "content": "fn write_file(dst: &Path, buf: &[u8]) -> bool {\n\n let mut file = match File::create(dst) {\n\n Ok(f) => f,\n\n _ => return false,\n\n };\n\n // Write file contents\n\n if file.write_all(buf).is_err() {\n\n let _ = std::fs::remove_file(&dst);\n\n return false;\n\n }\n\n true\n\n}\n\n\n\nimpl State {\n\n pub fn init(&mut self, core: &dyn PluginInterface, extra_input_folder: &str) {\n\n // first scan the input directory\n\n if let Ok(list) = fs::read_dir(extra_input_folder) {\n\n for r in list {\n\n let item = match r {\n\n Ok(i) => i,\n", "file_path": "plugins/fs_store/src/helpers.rs", "rank": 38, "score": 182817.60487816227 }, { "content": "pub fn hash32(buf: &[u8], seed: u32) -> u32 {\n\n let mut len = buf.len() as u32;\n\n let mut h1 = (seed ^ len) as u64;\n\n let mut k1;\n\n let mut data = buf.as_ptr() as *const u64;\n\n len >>= 3;\n\n while len > 0 {\n\n len -= 1;\n\n unsafe {\n\n k1 = *data;\n\n data = data.add(1);\n\n }\n\n k1 *= 0x87c37b91114253d5;\n\n k1 = rol64(k1, 31);\n\n k1 *= 0x4cf5ad432745937f;\n\n h1 ^= k1;\n\n h1 = rol64(h1, 27);\n\n h1 = h1 * 5 + 0x52dce729;\n\n }\n\n h1 ^= h1 >> 33;\n\n h1 *= 0xff51afd7ed558ccd;\n\n h1 ^= h1 >> 33;\n\n h1 *= 0xc4ceb9fe1a85ec53;\n\n h1 ^= h1 >> 33;\n\n h1 as u32\n\n}\n\n\n", "file_path": "plugins/afl/afl_lib/src/lib.rs", "rank": 39, "score": 182351.12756053996 }, { "content": "pub fn swap_16(v: u16) -> u16 {\n\n (v << 8) | (v >> 8)\n\n}\n\n\n", "file_path": "plugins/afl/afl_mutate/src/mutators.rs", "rank": 40, "score": 180368.88523787458 }, { "content": "pub fn swap_32(v: u32) -> u32 {\n\n (v << 24) | (v >> 24) | ((v << 8) & 0x00FF0000) | ((v >> 8) & 0x0000FF00)\n\n}\n", "file_path": "plugins/afl/afl_mutate/src/mutators.rs", "rank": 41, "score": 180368.88523787458 }, { "content": "/// Removes known prefixes and postfixes\n\npub fn strip_tag_hints(tag: &str) -> (&str, (Option<&'static str>, Option<&'static str>)) {\n\n let (res_tag, prefix) = strip_tag_prefix(tag);\n\n let (res_tag, postfix) = strip_tag_postfix(res_tag);\n\n\n\n (res_tag, (prefix, postfix))\n\n}\n\n\n", "file_path": "cflib/src/helpers.rs", "rank": 42, "score": 178098.99802027908 }, { "content": "struct State {\n\n orig_buf: Vec<u8>,\n\n cur_input: CfInput,\n\n restore_input: bool,\n\n cur_input_idx: usize,\n\n seq_input_idx: usize,\n\n input_list: &'static Vec<CfInputInfo>,\n\n no_select: &'static bool,\n\n priority_list: BinaryHeap<InputPriority>,\n\n num_priority_inputs: StatNum,\n\n num_old_inputs: usize,\n\n num_new_inputs: StatNum,\n\n}\n\n\n", "file_path": "plugins/select_input/src/select_input.rs", "rank": 43, "score": 175563.02504914635 }, { "content": "struct State {\n\n /// fast/non-crypto grade random\n\n rng: SmallRng,\n\n no_mutate: &'static bool,\n\n /// Reference to the currently selected input\n\n cur_input: &'static mut CfInput,\n\n \n\n}\n\n\n", "file_path": "plugins/basic_mutate/src/basic_mutate.rs", "rank": 44, "score": 174569.09744467307 }, { "content": "/// Attemps to get a PID from the shared memory. If the fuzzer\n\n/// hasn't initialized the shared memory after 1s, Ok(None) is returned.\n\npub fn get_fuzzer_pid(shmem_buf: &[u8]) -> Result<Option<u32>> {\n\n let mut cur = Cursor::new(shmem_buf);\n\n let header = CfStatsHeader::from_slice(&mut cur)?;\n\n\n\n unsafe {\n\n if read_volatile(header.magic) != STAT_MAGIC {\n\n return Err(From::from(format!(\"Fuzzer stats invalid magic : {:X} != {:X}\", read_volatile(header.magic), STAT_MAGIC)));\n\n }\n\n\n\n let mut num_checks = 0;\n\n // Wait until shmem is initialized\n\n while std::ptr::read_volatile(header.initialized) == 0 {\n\n std::thread::sleep(std::time::Duration::from_millis(200));\n\n num_checks += 1;\n\n if num_checks == 5 {\n\n // Fuzzer didnt init in time\n\n return Ok(None);\n\n }\n\n }\n\n\n", "file_path": "cflib/src/stats.rs", "rank": 45, "score": 174439.46451896965 }, { "content": "/// This function sets the log level for env_logger.\n\n/// It will set it to at least minimum_level and can be bumped up\n\n/// with verbose_level or RUST_LOG env var. (Whichever one is higher)\n\npub fn set_log_level(verbose_level: &usize, minimum_level: &str) -> String {\n\n let level_order: [&'static str; 5] = [\"error\", \"warn\", \"info\", \"debug\", \"trace\"];\n\n let default_log_level: String = match std::env::var(\"RUST_LOG\") {\n\n Ok(v) => v,\n\n Err(_e) => String::from(level_order[0]),\n\n };\n\n\n\n let mut cur_level: usize = 0;\n\n let mut wanted_level: usize = 0;\n\n\n\n for (i, &level_str) in level_order.iter().enumerate() {\n\n if level_str == default_log_level {\n\n cur_level = i;\n\n }\n\n\n\n if level_str == minimum_level {\n\n wanted_level = i + verbose_level;\n\n if wanted_level >= level_order.len() {\n\n wanted_level = level_order.len() - 1;\n\n }\n", "file_path": "core/src/log.rs", "rank": 46, "score": 173771.99256295568 }, { "content": "/// Removes known prefixes from a stat tag if present\n\npub fn strip_tag_prefix(tag: &str) -> (&str, Option<&'static str>) {\n\n let tag_len = tag.len();\n\n\n\n for val in TAG_PREFIXES {\n\n let val_len = val.len();\n\n\n\n if tag_len < val_len {\n\n continue;\n\n }\n\n\n\n if tag.starts_with(*val) {\n\n return (&tag[val_len..], Some(*val));\n\n }\n\n }\n\n\n\n (tag, None)\n\n}\n", "file_path": "cflib/src/helpers.rs", "rank": 47, "score": 173611.32223649463 }, { "content": "/// Removes known postfixes from a stat tag if present\n\npub fn strip_tag_postfix(tag: &str) -> (&str, Option<&'static str>) {\n\n let tag_len = tag.len();\n\n\n\n for postfix in TAG_POSTFIXES {\n\n let postfix_len = postfix.len();\n\n\n\n if tag_len < postfix_len {\n\n continue;\n\n }\n\n\n\n if &tag[tag_len - postfix_len..] == *postfix {\n\n return (&tag[..tag_len - postfix_len], Some(*postfix));\n\n }\n\n }\n\n\n\n (tag, None)\n\n}\n\n\n", "file_path": "cflib/src/helpers.rs", "rank": 48, "score": 173611.32223649463 }, { "content": "// Make sure we have everything to fuzz properly\n\nfn validate(\n\n core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let s = box_ref!(plugin_ctx, State);\n\n\n\n unsafe {\n\n s.inputs = store.as_ref(STORE_INPUT_LIST, Some(core))?;\n\n s.input_idx = store.as_ref(STORE_INPUT_IDX, Some(core))?;\n\n s.input_priority = store.as_mutref(STORE_INPUT_PRIORITY, Some(core))?;\n\n s.prev_exec_time_ns = store.as_mutref(STORE_TARGET_EXEC_TIME, Some(core))?;\n\n if let Ok(v) = store.as_ref(STORE_AFL_TRACE_BITS, None) {\n\n s.trace_bits = Some(v);\n\n } else {\n\n core.warn(\"No plugin gathering instrumentation...\");\n\n }\n\n }\n\n\n\n s.init_testcase_num = s.inputs.len();\n\n s.queue.reserve(s.inputs.len());\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/afl/afl_state/src/afl_state.rs", "rank": 49, "score": 172176.44482100813 }, { "content": "#[allow(clippy::missing_safety_doc)]\n\npub trait MutateUtil {\n\n /// Sets the byte at idx to val\n\n unsafe fn set_byte(&mut self, idx: usize, val: u8) -> u8;\n\n /// Sets the word at byte offset idx to val\n\n unsafe fn set_word(&mut self, idx: usize, val: u16) -> u16;\n\n /// Sets the dword at byte offset idx to val\n\n unsafe fn set_dword(&mut self, idx: usize, val: u32) -> u32;\n\n /// Flips bit at bit idx\n\n unsafe fn flip_bit(&mut self, bit_idx: usize) -> u8;\n\n /// Flips byte at byte idx\n\n unsafe fn flip_byte(&mut self, idx: usize) -> u8;\n\n /// Flips word at byte idx\n\n unsafe fn flip_word(&mut self, idx: usize) -> u16;\n\n /// Flips dword at byte idx\n\n unsafe fn flip_dword(&mut self, idx: usize) -> u32;\n\n /// Adds val to byte at byte idx\n\n unsafe fn add_byte(&mut self, idx: usize, val: u8) -> u8;\n\n /// Subs val to byte at byte idx\n\n unsafe fn sub_byte(&mut self, idx: usize, val: u8) -> u8;\n\n /// Adds val to word at byte idx\n", "file_path": "cflib/src/input.rs", "rank": 51, "score": 168167.88626273154 }, { "content": "// Perform our task in the fuzzing loop\n\nfn mutate_input(\n\n _core: &mut dyn PluginInterface,\n\n _store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let s = box_ref!(plugin_ctx, State);\n\n\n\n if *s.no_mutate || s.cur_input.is_empty() {\n\n // Input is empty ??\n\n return Ok(());\n\n }\n\n\n\n let input_len = s.cur_input.len();\n\n // Randomly mutate some bytes in the first chunk\n\n let num_of_bytes_mutated = s.rng.gen_range(0, input_len);\n\n for _ in 0..num_of_bytes_mutated {\n\n s.cur_input[s.rng.gen_range(0, input_len)] = s.rng.gen::<u8>();\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/basic_mutate/src/basic_mutate.rs", "rank": 52, "score": 162272.80575944838 }, { "content": "pub fn update_average(cur_avg: &mut u64, new_val: u64, val_num: u64) {\n\n let cur_val = *cur_avg;\n\n *cur_avg = if cur_val > new_val {\n\n cur_val - ((cur_val - new_val) / val_num)\n\n } else {\n\n cur_val + ((new_val - cur_val) / val_num)\n\n };\n\n}\n\n\n", "file_path": "cflib/src/helpers.rs", "rank": 53, "score": 162015.8937860543 }, { "content": "// Unload and free our resources\n\nfn destroy(\n\n _core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let _ctx = box_take!(plugin_ctx, State);\n\n\n\n // Remove our store entries\n\n store.remove(STORE_INPUT_IDX).unwrap();\n\n store.remove(STORE_INPUT_BYTES).unwrap();\n\n store.remove(STORE_RESTORE_INPUT).unwrap();\n\n store.remove(STORE_INPUT_PRIORITY).unwrap();\n\n\n\n Ok(())\n\n}\n", "file_path": "plugins/select_input/src/select_input.rs", "rank": 54, "score": 161125.64708555196 }, { "content": "// Unload and free our resources\n\nfn destroy(\n\n _core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let s = box_take!(plugin_ctx, State);\n\n\n\n // If we created the input_list\n\n if s.is_input_list_owner {\n\n let _ = store.remove(STORE_INPUT_LIST);\n\n }\n\n\n\n // If we created the new_inputs\n\n if s.is_new_inputs_owner {\n\n let _ = store.remove(STORE_NEW_INPUTS);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "plugins/fs_store/src/fs_store.rs", "rank": 55, "score": 160740.19996688477 }, { "content": "// Unload and free our resources\n\nfn destroy(\n\n _core: &mut dyn PluginInterface,\n\n _store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let _state = box_take!(plugin_ctx, State);\n\n Ok(())\n\n}\n", "file_path": "plugins/basic_mutate/src/basic_mutate.rs", "rank": 56, "score": 160131.71948107868 }, { "content": "// Unload and free our resources\n\nfn destroy(\n\n _core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let _state = box_take!(plugin_ctx, State);\n\n\n\n store.remove(STORE_EXIT_STATUS).unwrap();\n\n store.remove(STORE_TARGET_EXEC_TIME).unwrap();\n\n store.remove(STORE_AVG_TARGET_EXEC_TIME).unwrap();\n\n\n\n Ok(())\n\n}\n\n\n\nimpl State {\n\n /// Parse the plugin_conf for our values\n\n pub fn load_config(\n\n &mut self,\n\n core: &mut dyn PluginInterface,\n\n conf: &HashMap<String, String>,\n", "file_path": "plugins/afl/afl_fork_server/src/afl_fork_server.rs", "rank": 57, "score": 155478.94907052728 }, { "content": "pub fn pretty_str(\n\n dst: &mut String,\n\n mut val: &str,\n\n type_hints: (Option<&'static str>, Option<&'static str>),\n\n) {\n\n if let Some(postfix) = type_hints.1 {\n\n // Strip windows path grossness\n\n if postfix == TAG_POSTFIX_PATH && val.starts_with(\"\\\\\\\\?\\\\\") {\n\n val = &val[4..];\n\n }\n\n }\n\n dst.push_str(val);\n\n}\n\n\n", "file_path": "cflib/src/helpers.rs", "rank": 58, "score": 148133.8268013259 }, { "content": "pub fn pretty_num(\n\n dst: &mut String,\n\n mut val: u64,\n\n type_hints: (Option<&'static str>, Option<&'static str>),\n\n) {\n\n use std::fmt::Write;\n\n\n\n let mut generated_str = false;\n\n\n\n if let Some(postfix) = type_hints.1 {\n\n val = match postfix {\n\n TAG_POSTFIX_HEX => {\n\n let _ = write!(dst, \"0x{:X}\", val);\n\n return;\n\n }\n\n TAG_POSTFIX_RESULT => {\n\n let _ = write!(dst, \"{}\", val);\n\n return;\n\n }\n\n // Convert time number to ns\n", "file_path": "cflib/src/helpers.rs", "rank": 59, "score": 148133.8268013259 }, { "content": "pub fn pretty_bytes(\n\n dst: &mut String,\n\n val: &[u8],\n\n type_hints: (Option<&'static str>, Option<&'static str>),\n\n) {\n\n use std::fmt::Write;\n\n let mut wrote = false;\n\n if let Some(postfix) = type_hints.1 {\n\n // Strip windows path grossness\n\n if postfix == TAG_POSTFIX_HEX {\n\n for b in val {\n\n let _ = write!(dst, \"{:02X}\", *b);\n\n }\n\n wrote = true;\n\n }\n\n }\n\n\n\n if !wrote {\n\n let _ = write!(dst, \"{:02X?}\", val);\n\n }\n", "file_path": "cflib/src/helpers.rs", "rank": 60, "score": 148133.8268013259 }, { "content": "pub fn destroy_ui() -> Result<()> {\n\n #[allow(deprecated)]\n\n execute!(stdout(), EnableBlinking, Show, LeaveAlternateScreen)?;\n\n disable_raw_mode()?;\n\n Ok(())\n\n}\n\n\n\npub struct CachedStat {\n\n pub val: CachedStatVal,\n\n pub str_repr_val: Option<u64>,\n\n pub str_repr: String,\n\n}\n\nimpl CachedStat {\n\n pub fn from(val: &mut StatVal) -> Self {\n\n // Create copy of real stat\n\n let val = match val {\n\n StatVal::Num(v) => CachedStatVal::Num(*v.val),\n\n StatVal::Str(v) => CachedStatVal::Str(String::from(*v.get())),\n\n StatVal::Bytes(v) => CachedStatVal::Bytes(Vec::from(*v.get())),\n\n };\n", "file_path": "cf_tui/src/ui.rs", "rank": 61, "score": 141033.8463335933 }, { "content": "// Perform our task in the fuzzing loop\n\nfn update_state(\n\n core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let s = box_ref!(plugin_ctx, State);\n\n\n\n // If there are new/never calibrated inputs\n\n if s.inputs.len() > s.queue.len() {\n\n let mut val = AflQueueEntry::default();\n\n val.cal_left = s.max_cal;\n\n val.handicap = *s.num_execs - 1;\n\n s.queue.resize(s.inputs.len(), val);\n\n }\n\n\n\n // Process calibration info from last run\n\n if s.is_calibrating {\n\n let mut first_cal = false;\n\n let q = unsafe { s.queue.get_unchecked_mut(s.prev_idx) };\n\n s.prev_idx = *s.input_idx;\n", "file_path": "plugins/afl/afl_state/src/afl_state.rs", "rank": 62, "score": 140428.5138739325 }, { "content": "/// Spawns another instance of the fuzzer\n\npub fn spawn_self(cwd: &Path, allow_stdout: bool) -> Result<Option<Child>> {\n\n let mut args = std::env::args();\n\n let process_path = args.next().unwrap();\n\n let mut new_args: Vec<String> = Vec::new();\n\n\n\n let mut skip_next = false;\n\n\n\n for arg in args {\n\n if skip_next {\n\n skip_next = false;\n\n continue;\n\n }\n\n\n\n // strip verbose from child process\n\n if !allow_stdout\n\n && (arg == ARG_VERBOSE_LONG\n\n || (arg.starts_with(ARG_VERBOSE_SHORT) && arg.chars().skip(1).all(|c| c == 'v')))\n\n {\n\n continue;\n\n } else if arg == ARG_INSTANCES_LONG || arg == ARG_INSTANCES_SHORT {\n", "file_path": "core/src/util.rs", "rank": 63, "score": 139416.3368546879 }, { "content": "pub trait CfStoreUtil {\n\n /// Inserts this reference casted to a raw pointer into the store.\n\n fn insert_exclusive<T>(\n\n &mut self,\n\n key: &str,\n\n val: &T,\n\n core: Option<&mut dyn PluginInterface>,\n\n ) -> Result<()>;\n\n\n\n /// Casts the value of this store's key entry to &T\n\n /// # Safety\n\n /// This function cannot validate any information about the store's values.\n\n /// Casting to the wrong T and bad assumptions about the lifetime of this reference will result in issues.\n\n unsafe fn as_ref<T>(\n\n &self,\n\n key: &str,\n\n core: Option<&mut dyn PluginInterface>,\n\n ) -> Result<&'static T>;\n\n\n\n /// Casts the value of this store's key entry to &mut T\n", "file_path": "cflib/src/store.rs", "rank": 64, "score": 139031.964911374 }, { "content": "/// Binds the current process to the specified core.\n\npub fn bind_to_core(target_core: usize) -> Result<usize> {\n\n set_affinity(&[target_core])?;\n\n Ok(target_core)\n\n}\n\n\n", "file_path": "core/src/util.rs", "rank": 65, "score": 138340.15020512984 }, { "content": "pub fn get_num_instances() -> Result<usize> {\n\n let mut num_found = 0;\n\n let prog_name: &'static str;\n\n if cfg!(target_os = \"windows\") {\n\n prog_name = concat!(env!(\"CARGO_PKG_NAME\"), \".exe\");\n\n } else {\n\n prog_name = env!(\"CARGO_PKG_NAME\");\n\n }\n\n\n\n let system = System::new_with_specifics(RefreshKind::new().with_processes());\n\n\n\n for (_pid, info) in system.get_processes().iter() {\n\n if info.name() == prog_name {\n\n num_found += 1;\n\n }\n\n }\n\n\n\n // We should at least find ourselves\n\n if num_found == 0 {\n\n return Err(From::from(format!(\n", "file_path": "core/src/util.rs", "rank": 66, "score": 136560.64500513018 }, { "content": "struct State {\n\n hasher: Sha1,\n\n tmp_uid: [u8; 20],\n\n\n\n /// Reference to the currently selected input\n\n exit_status: &'static TargetExitStatus,\n\n cur_input: &'static CfInput,\n\n cur_input_idx: &'static usize,\n\n input_list: &'static Vec<CfInputInfo>,\n\n\n\n tmp_str: String,\n\n crash_dir: PathBuf,\n\n timeout_dir: PathBuf,\n\n num_crashes: StatNum,\n\n num_timeouts: StatNum,\n\n stat_crash_dir: StatStr,\n\n stat_timeout_dir: StatStr,\n\n}\n\n\n", "file_path": "plugins/save_result/src/save_result.rs", "rank": 67, "score": 131352.1736444657 }, { "content": "struct State {\n\n /// Reference to the currently selected input\n\n input_file: Option<File>,\n\n exec_time: u64,\n\n avg_exec_time: StatNum,\n\n cur_input: &'static CfInput,\n\n avg_denominator: &'static u64,\n\n exit_status: TargetExitStatus,\n\n cmd: Command,\n\n target_input_path: Option<String>,\n\n target_working_dir: Option<String>,\n\n target_timeout_ms: Option<Duration>,\n\n}\n\n\n", "file_path": "plugins/run_target/src/run_target.rs", "rank": 68, "score": 131352.1736444657 }, { "content": "struct State {\n\n num_execs: StatNum,\n\n fuzzer_name: &'static String,\n\n}\n\n\n", "file_path": "plugins/test_plugin/src/test_plugin.rs", "rank": 69, "score": 131352.1736444657 }, { "content": "// Make sure we have everything to fuzz properly\n\nfn validate(\n\n core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let s = box_ref!(plugin_ctx, State);\n\n\n\n // Make sure someone created INPUT_LIST\n\n s.input_list = unsafe { store.as_ref(STORE_INPUT_LIST, Some(core))? };\n\n\n\n if !s.input_list.is_empty() {\n\n s.seq_input_idx = s.input_list.len() - 1;\n\n }\n\n\n\n s.num_old_inputs = s.input_list.len();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/select_input/src/select_input.rs", "rank": 70, "score": 130227.67053416342 }, { "content": "// Make sure we have everything to fuzz properly\n\nfn validate(\n\n _core: &mut dyn PluginInterface,\n\n _store: &mut CfStore,\n\n _plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n // We dont rely on any other plugin\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/fs_store/src/fs_store.rs", "rank": 71, "score": 129842.22341549622 }, { "content": "// Make sure we have everything to fuzz properly\n\nfn validate(\n\n core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let s = box_ref!(plugin_ctx, State);\n\n\n\n // We need a plugin that creates in input_bytes\n\n unsafe {\n\n s.cur_input = store.as_mutref(STORE_INPUT_BYTES, Some(core))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/basic_mutate/src/basic_mutate.rs", "rank": 72, "score": 129233.74292969014 }, { "content": "pub fn init_ui() -> Result<Terminal<CrosstermBackend<Stdout>>> {\n\n // Setup the UI\n\n enable_raw_mode()?;\n\n let mut stdout = stdout();\n\n #[allow(deprecated)]\n\n execute!(stdout, EnterAlternateScreen, Hide, DisableBlinking)?;\n\n let backend = CrosstermBackend::new(stdout);\n\n let mut terminal = Terminal::new(backend)?;\n\n terminal.hide_cursor()?;\n\n Ok(terminal)\n\n}\n\n\n", "file_path": "cf_tui/src/ui.rs", "rank": 73, "score": 126167.60956825857 }, { "content": "// Make sure we have everything to fuzz properly\n\nfn validate(\n\n core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let s = box_ref!(plugin_ctx, State);\n\n\n\n unsafe {\n\n s.afl = store.as_mutref(STORE_AFL_GLOBALS, Some(core))?;\n\n // Make sure someone is providing us input bytes\n\n s.cur_input = store.as_ref(STORE_INPUT_BYTES, Some(core))?;\n\n }\n\n\n\n s.ctx = os::State::new(s, core, store)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/afl/afl_fork_server/src/afl_fork_server.rs", "rank": 74, "score": 124580.97251913874 }, { "content": "// Unload and free our resources\n\nfn destroy(\n\n core: &mut dyn PluginInterface,\n\n _store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let _ctx = box_take!(plugin_ctx, State);\n\n\n\n core.debug(\"Unloading !\");\n\n\n\n Ok(())\n\n}\n", "file_path": "plugins/test_plugin/src/test_plugin.rs", "rank": 75, "score": 116914.79568087132 }, { "content": "// Unload and free our resources\n\nfn destroy(\n\n _core: &mut dyn PluginInterface,\n\n store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let _state = box_take!(plugin_ctx, State);\n\n\n\n store.remove(STORE_EXIT_STATUS)?;\n\n store.remove(STORE_TARGET_EXEC_TIME)?;\n\n store.remove(STORE_AVG_TARGET_EXEC_TIME)?;\n\n\n\n Ok(())\n\n}\n\n\n\nimpl State {\n\n /// Parse the plugin_conf for our values\n\n pub fn load_config(\n\n &mut self,\n\n core: &mut dyn PluginInterface,\n\n conf: &HashMap<String, String>,\n", "file_path": "plugins/run_target/src/run_target.rs", "rank": 76, "score": 116914.79568087132 }, { "content": "// Unload and free our resources\n\nfn destroy(\n\n _core: &mut dyn PluginInterface,\n\n _store: &mut CfStore,\n\n plugin_ctx: *mut u8,\n\n) -> Result<()> {\n\n let _state = box_take!(plugin_ctx, State);\n\n Ok(())\n\n}\n\n\n\nimpl State {\n\n /// Saves the current input if exit_status was interesting\n\n pub fn save_input(&mut self) -> Result<bool> {\n\n // Likely path first\n\n if let TargetExitStatus::Normal(_) = self.exit_status {\n\n return Ok(false);\n\n }\n\n\n\n let dst: &mut PathBuf = match self.exit_status {\n\n TargetExitStatus::Crash(_) => {\n\n *self.num_crashes.val += 1;\n", "file_path": "plugins/save_result/src/save_result.rs", "rank": 77, "score": 116914.79568087132 }, { "content": "fn default_stats_path() -> String {\n\n String::from(\"fuzzer_stats\")\n\n}\n\n\n", "file_path": "core/src/config.rs", "rank": 78, "score": 115025.74835125005 }, { "content": "use crate::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct InterestState {\n\n idx: usize,\n\n prev_val: Option<(usize, u32)>,\n\n width: u8,\n\n val_idx: usize,\n\n}\n\nimpl InterestState {\n\n pub fn new(input: &[u8]) -> Self {\n\n Self {\n\n idx: input.len(),\n\n prev_val: None,\n\n width: 1,\n\n val_idx: INTERESTING_8.len() - 1,\n\n }\n\n }\n\n\n\n pub fn desc(&self, dst: &mut String) {\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 79, "score": 113346.2602580777 }, { "content": "use crate::*;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct ArithState {\n\n idx: usize,\n\n // Index of last changed value with its original contents\n\n prev_val: Option<(usize, u32)>,\n\n /// Current width of the operation (1,2,4,-1,-2,-4)\n\n width: i8,\n\n /// Value use for the arithmetic operation (1 -> ARITH_MAX)\n\n cur_val: u8,\n\n}\n\n\n\nimpl ArithState {\n\n pub fn new(input: &[u8]) -> Self {\n\n Self {\n\n idx: input.len(),\n\n prev_val: None,\n\n width: 1,\n\n cur_val: 1,\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 80, "score": 113338.23939138738 }, { "content": " dst.push_str(\"interest \");\n\n dst.push_str(match self.width {\n\n 1 => \"8/8\",\n\n 2 => \"16/8\",\n\n _ => \"32/8\",\n\n });\n\n }\n\n\n\n pub fn total_cycles(&self, input: &[u8]) -> usize {\n\n let mut total = 0;\n\n let mut i = 1;\n\n loop {\n\n let max_idx = input.len() - (i - 1) as usize;\n\n\n\n total += max_idx * if i == 1 {\n\n INTERESTING_8.len()\n\n } else if i == 2 {\n\n INTERESTING_16.len()\n\n } else {\n\n INTERESTING_32.len()\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 81, "score": 113336.72502976256 }, { "content": " }\n\n }\n\n\n\n pub fn desc(&self, dst: &mut String) {\n\n dst.push_str(\"arith \");\n\n dst.push_str(match self.width.abs() {\n\n 1 => \"8/8\",\n\n 2 => \"16/8\",\n\n _ => \"32/8\",\n\n });\n\n }\n\n\n\n pub fn total_cycles(&self, input: &[u8]) -> usize {\n\n let mut total = 0;\n\n let mut i = 1;\n\n loop {\n\n total += (input.len() - (i - 1) as usize) * 4 * 35;\n\n i *= 2;\n\n if i > 4 {\n\n break;\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 82, "score": 113336.70062331071 }, { "content": " };\n\n\n\n i *= 2;\n\n if i > 4 {\n\n break;\n\n }\n\n }\n\n total\n\n }\n\n\n\n pub fn mutate(&mut self, mut input: &mut [u8]) -> StageResult {\n\n // Restore the orig input\n\n if let Some((idx, orig_val)) = self.prev_val.take() {\n\n unsafe {\n\n match self.width {\n\n 1 => {\n\n input.set_byte(idx, orig_val as _);\n\n }\n\n 2 => {\n\n input.set_word(idx, orig_val as _);\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 83, "score": 113331.79645441927 }, { "content": " }\n\n }\n\n total\n\n }\n\n\n\n /// Increment/decrement values\n\n pub fn mutate(&mut self, mut input: &mut [u8]) -> StageResult {\n\n // Restore the orig input\n\n if let Some((idx, orig_val)) = self.prev_val.take() {\n\n unsafe {\n\n match self.width.abs() {\n\n 1 => {\n\n input.set_byte(idx, orig_val as _);\n\n }\n\n 2 => {\n\n input.set_word(idx, orig_val as _);\n\n }\n\n _ => {\n\n input.set_dword(idx, orig_val as _);\n\n }\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 84, "score": 113331.04537325022 }, { "content": " if new_val == tval {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n if blen == 4 && check_le {\n\n /* See if four-byte insertions could produce the same result\n\n (LE only). */\n\n\n\n for j in 0..INTERESTING_32.len() {\n\n if new_val == unsafe { *INTERESTING_32.get_unchecked(j) } as u32 {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n\npub const INTERESTING_8: &[u8] = &[\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 85, "score": 113329.16191488385 }, { "content": " // Reset intersting value index\n\n self.val_idx = if self.width == 2 {\n\n INTERESTING_16.len() - 1\n\n } else {\n\n INTERESTING_32.len() - 1\n\n };\n\n self.idx = input.len() - (self.width - 1) as usize;\n\n return StageResult::Update;\n\n }\n\n \n\n self.idx = input.len() - (self.width - 1) as usize;\n\n self.val_idx -= 1;\n\n continue;\n\n }\n\n\n\n self.idx -= 1;\n\n\n\n let orig = unsafe {\n\n match self.width {\n\n 1 => input.set_byte(self.idx, *INTERESTING_8.get_unchecked(self.val_idx)) as _,\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 86, "score": 113326.38034128147 }, { "content": " ));\n\n }\n\n _ => unreachable!(),\n\n };\n\n }\n\n break;\n\n }\n\n StageResult::WillRestoreInput\n\n }\n\n}\n\n\n\n/* Helper function to see if a particular value is reachable through\n\narithmetic operations. Used for similar purposes. */\n\n\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 87, "score": 113324.57728141936 }, { "content": " if blen == 2 && !check_le {\n\n return false;\n\n }\n\n /* See if two-byte insertions over old_val could give us new_val. */\n\n\n\n for i in 0..blen - 1 {\n\n for j in 0..INTERESTING_16.len() {\n\n let mut tval = unsafe {\n\n (old_val & !(0xFFFF << (i * 8)))\n\n | ((*INTERESTING_16.get_unchecked(j) as u32) << (i * 8))\n\n };\n\n if new_val == tval {\n\n return true;\n\n }\n\n /* Continue here only if blen > 2. */\n\n if blen > 2 {\n\n tval = unsafe {\n\n (old_val & !(0xffff << (i * 8)))\n\n | ((swap_16(*INTERESTING_16.get_unchecked(j) as u16) as u32) << (i * 8))\n\n };\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 88, "score": 113323.62468694914 }, { "content": " self.idx = input.len() - (self.width.abs() - 1) as usize;\n\n self.cur_val = 1;\n\n return StageResult::Update;\n\n }\n\n \n\n self.idx = input.len() - (self.width.abs() - 1) as usize;\n\n self.cur_val += 1;\n\n continue; \n\n }\n\n\n\n self.idx -= 1;\n\n\n\n unsafe {\n\n match self.width {\n\n 1 => {\n\n self.prev_val =\n\n Some((self.idx, input.add_byte(self.idx, self.cur_val as _) as u32));\n\n }\n\n 2 => {\n\n self.prev_val =\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 89, "score": 113322.08408049514 }, { "content": " 2 => input.set_word(self.idx, *INTERESTING_16.get_unchecked(self.val_idx)) as _,\n\n _ => input.set_dword(self.idx, *INTERESTING_32.get_unchecked(self.val_idx)),\n\n }\n\n };\n\n\n\n self.prev_val = Some((self.idx, orig));\n\n\n\n break;\n\n }\n\n\n\n \n\n \n\n\n\n StageResult::WillRestoreInput\n\n }\n\n}\n\n\n\n/* Last but not least, a similar helper to see if insertion of an\n\ninteresting integer is redundant given the insertions done for\n\nshorter blen. The last param (check_le) is set if the caller\n\nalready executed LE insertion for current blen and wants to see\n\nif BE variant passed in new_val is unique. */\n\n\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 90, "score": 113321.78107232653 }, { "content": " 0, /* */\n\n 1, /* */\n\n 16, /* One-off with common buffer size */\n\n 32, /* One-off with common buffer size */\n\n 64, /* One-off with common buffer size */\n\n 100, /* One-off with common buffer size */\n\n 127, /* */\n\n 128, /* Overflow signed 8-bit when decremented */\n\n 255, /* u8::MAX */\n\n];\n\npub const INTERESTING_16: &[u16] = &[\n\n 0, 1, 16, 32, 64, 100, 127, 128, 128, 255, /* Overflow signed 8-bit */\n\n 256, /* Overflow unsig 8-bit */\n\n 512, /* One-off with common buffer size */\n\n 1000, /* One-off with common buffer size */\n\n 1024, /* One-off with common buffer size */\n\n 4096, /* One-off with common buffer size */\n\n 32767, /* Overflow signed 16-bit when incremented */\n\n 32768, /* Overflow signed 16-bit when decremented */\n\n 65407, /* Overflow signed 8-bit */\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 91, "score": 113321.45860160561 }, { "content": " }\n\n _ => {\n\n input.set_dword(idx, orig_val);\n\n }\n\n };\n\n }\n\n }\n\n\n\n loop {\n\n // If we have reached the end of the buffer\n\n if self.idx == 0 {\n\n // If done all interesting values\n\n if self.val_idx == 0 {\n\n if self.width == 4 {\n\n // Last stage is Interest(u32)\n\n return StageResult::Done;\n\n } else {\n\n // Move to next width\n\n self.width *= 2;\n\n }\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 92, "score": 113321.24962219901 }, { "content": "\n\n if diffs == 1 && (ov - nv <= ARITH_MAX as _ || nv - ov <= ARITH_MAX as _) {\n\n return true;\n\n }\n\n if blen == 1 {\n\n return false;\n\n }\n\n /* See if two-byte adjustments to any byte would produce this result. */\n\n diffs = 0;\n\n let mut ov: u16 = 0;\n\n let mut nv: u16 = 0;\n\n\n\n for i in 0..blen / 2 {\n\n let a = old_val >> (16 * i);\n\n let b = new_val >> (16 * i);\n\n\n\n if a != b {\n\n diffs += 1;\n\n ov = a as _;\n\n nv = b as _;\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 93, "score": 113320.58082862517 }, { "content": " 65535, /* u16::MAX */\n\n];\n\npub const INTERESTING_32: &[u32] = &[\n\n 0, 1, 16, 32, 64, 100, 127, 128, 128, 255, 256, 512, 1000, 1024, 4096, 32767, 32768, 65407,\n\n 65535, 65536, /* Overflow unsig 16 bit */\n\n 100663045, /* Large positive number (endian-agnostic) */\n\n 2147483647, /* Overflow signed 32-bit when incremented */\n\n 2147483648, /* Overflow signed 32-bit when decremented */\n\n 4194304250, /* Large negative number (endian-agnostic) */\n\n 4294934527, /* Overflow signed 16-bit */\n\n 4294967295, /* u32::MAX */\n\n];\n", "file_path": "plugins/afl/afl_mutate/src/interesting.rs", "rank": 94, "score": 113320.07239988065 }, { "content": " Some((self.idx, input.add_word(self.idx, self.cur_val as _) as u32));\n\n }\n\n 4 => {\n\n self.prev_val = Some((\n\n self.idx,\n\n input.add_dword(self.idx, self.cur_val as _) as u32,\n\n ));\n\n }\n\n -1 => {\n\n self.prev_val =\n\n Some((self.idx, input.sub_byte(self.idx, self.cur_val as _) as u32));\n\n }\n\n -2 => {\n\n self.prev_val =\n\n Some((self.idx, input.sub_word(self.idx, self.cur_val as _) as u32));\n\n }\n\n -4 => {\n\n self.prev_val = Some((\n\n self.idx,\n\n input.sub_dword(self.idx, self.cur_val as _) as u32,\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 95, "score": 113318.30345338723 }, { "content": " }\n\n }\n\n /* If only one word differs and the values are within range, return 1. */\n\n if diffs == 1 {\n\n if ov - nv <= ARITH_MAX as _ || nv - ov <= ARITH_MAX as _ {\n\n return true;\n\n }\n\n\n\n ov = swap_16(ov);\n\n nv = swap_16(nv);\n\n\n\n if ov - nv <= ARITH_MAX as _ || nv - ov <= ARITH_MAX as _ {\n\n return true;\n\n }\n\n }\n\n /* Finally, let's do the same thing for dwords. */\n\n if blen == 4 {\n\n if old_val - new_val <= ARITH_MAX as _ || new_val - old_val <= ARITH_MAX as _ {\n\n return true;\n\n }\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 96, "score": 113317.9457656938 }, { "content": "\n\n new_val = swap_32(new_val);\n\n old_val = swap_32(old_val);\n\n\n\n if old_val - new_val <= ARITH_MAX as _ || new_val - old_val <= ARITH_MAX as _ {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 97, "score": 113317.10880045946 }, { "content": " };\n\n }\n\n }\n\n\n\n loop {\n\n // If we have reached the end of the buffer\n\n if self.idx == 0 {\n\n // If done all arith values\n\n if self.cur_val == ARITH_MAX {\n\n if self.width == -4 {\n\n // Last stage is Sub(u32)\n\n return StageResult::Done;\n\n } else if self.width == 4 {\n\n // Loop from Add(u32) to Sub(u8)\n\n self.width = -1;\n\n } else {\n\n // Move to next width\n\n self.width *= 2;\n\n }\n\n\n", "file_path": "plugins/afl/afl_mutate/src/arithmetic.rs", "rank": 98, "score": 113316.89375307302 }, { "content": " HAVOC_CYCLES_INIT\n\n } as usize\n\n * (perf_score as usize)\n\n / afl.havoc_div as usize\n\n / 100;\n\n\n\n if self.num_iterations < HAVOC_MIN {\n\n self.num_iterations = HAVOC_MIN;\n\n }\n\n }\n\n\n\n pub fn mutate(&mut self, input: &mut CfInput) -> StageResult {\n\n self.num_iterations -= 1;\n\n if self.num_iterations == 0 {\n\n return StageResult::Done;\n\n }\n\n\n\n let mut num_stacks = 1 << self.rng.gen_range(1, HAVOC_STACK_POW2);\n\n loop {\n\n unsafe {\n", "file_path": "plugins/afl/afl_mutate/src/havoc.rs", "rank": 99, "score": 113256.19203203813 } ]
Rust
kapp_platforms/src/windows/application_windows.rs
kettle11/kettlewin
36109e9ab506b9bce55da6e1cdee0d10e64e6dc4
use super::external_windows::*; use super::utils_windows::*; use std::convert::TryInto; use std::ptr::{null, null_mut}; use kapp_platform_common::*; pub static mut CURRENT_CURSOR: HCURSOR = null_mut(); pub static mut WINDOWS_TO_REDRAW: Vec<WindowId> = Vec::new(); pub struct PlatformApplication { window_class_name: Vec<u16>, h_instance: HINSTANCE, } pub(crate) struct WindowData { pub minimum_width: u32, pub minimum_height: u32, pub maximum_width: u32, pub maximum_height: u32, } impl PlatformApplicationTrait for PlatformApplication { type EventLoop = PlatformEventLoop; fn new() -> Self { unsafe { SetProcessDpiAwareness(PROCESS_PER_MONITOR_DPI_AWARE); let window_class_name = win32_string("windowing_rust"); let h_instance = GetModuleHandleW(null_mut()); let window_class = WNDCLASSW { style: CS_DBLCLKS, lpfnWndProc: Some(super::event_loop_windows::window_callback), cbClsExtra: 0, cbWndExtra: 0, hInstance: h_instance, hIcon: null_mut(), hCursor: null_mut(), hbrBackground: null_mut(), lpszMenuName: null_mut(), lpszClassName: window_class_name.as_ptr(), }; CURRENT_CURSOR = LoadCursorW(null_mut(), IDC_ARROW); RegisterClassW(&window_class); Self { window_class_name, h_instance, } } } fn event_loop(&mut self) -> Self::EventLoop { PlatformEventLoop {} } fn set_window_position(&mut self, window_id: WindowId, x: u32, y: u32) { unsafe { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); let width = rect.right - rect.left; let height = rect.bottom - rect.top; MoveWindow( window_id.raw() as HWND, x as i32, y as i32, width, height, FALSE, ); } } fn set_window_size(&mut self, window_id: WindowId, width: u32, height: u32) { unsafe { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); MoveWindow( window_id.raw() as HWND, rect.left, rect.top, width as i32, height as i32, FALSE, ); } } fn set_window_title(&mut self, window_id: WindowId, title: &str) { let title = win32_string(title); unsafe { SetWindowTextW(window_id.raw() as HWND, title.as_ptr()); } } fn minimize_window(&mut self, window_id: WindowId) { unsafe { ShowWindow(window_id.raw() as HWND, SW_MINIMIZE); } } fn maximize_window(&mut self, window_id: WindowId) { unsafe { ShowWindow(window_id.raw() as HWND, SW_MAXIMIZE); } } fn fullscreen_window(&mut self, window_id: WindowId) { unsafe { let hwnd = window_id.raw() as HWND; let screen_width = GetSystemMetrics(SM_CXSCREEN); let screen_height = GetSystemMetrics(SM_CYSCREEN); SetWindowLongPtrW(hwnd, GWL_STYLE, (WS_VISIBLE | WS_POPUP).try_into().unwrap()); let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); MoveWindow( window_id.raw() as HWND, 0, 0, screen_width as i32, screen_height as i32, FALSE, ); } } fn restore_window(&mut self, window_id: WindowId) { unsafe { let hwnd = window_id.raw() as HWND; let window_style = WS_OVERLAPPEDWINDOW | WS_VISIBLE | CS_OWNDC; SetWindowLongPtrW(hwnd, GWL_STYLE, window_style.try_into().unwrap()); ShowWindow(window_id.raw() as HWND, SW_RESTORE); } } fn close_window(&mut self, window_id: WindowId) { unsafe { CloseWindow(window_id.raw() as HWND); } } fn redraw_window(&mut self, window_id: WindowId) { redraw_manager::add_draw_request(window_id); } fn get_window_size(&mut self, window_id: WindowId) -> (u32, u32) { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; unsafe { GetClientRect(window_id.raw() as HWND, &mut rect); } ( (rect.right - rect.left) as u32, (rect.bottom - rect.top) as u32, ) } fn get_window_scale(&mut self, window_id: WindowId) -> f64 { let dpi = unsafe { GetDpiForWindow(window_id.raw() as HWND) }; dpi as f64 / USER_DEFAULT_SCREEN_DPI as f64 } fn lock_mouse_position(&mut self) { unsafe { let mut position = POINT { x: 0, y: 0 }; GetCursorPos(&mut position); let rect = RECT { left: position.x, top: position.y, right: position.x, bottom: position.y, }; ClipCursor(&rect); } } fn unlock_mouse_position(&mut self) { unsafe { ClipCursor(null()); } } fn new_window(&mut self, window_parameters: &WindowParameters) -> WindowId { unsafe { let extended_style = WS_EX_APPWINDOW; let window_style = WS_OVERLAPPEDWINDOW | WS_VISIBLE | CS_OWNDC; let title = win32_string(&window_parameters.title); let (x, y) = if let Some(position) = window_parameters.position { (position.0 as i32, position.1 as i32) } else { (CW_USEDEFAULT, CW_USEDEFAULT) }; let (width, height) = window_parameters .size .map_or((CW_USEDEFAULT, CW_USEDEFAULT), |d| { let mut rect = RECT { left: 0, top: 0, right: d.0 as i32, bottom: d.1 as i32, }; AdjustWindowRectEx(&mut rect, window_style, FALSE, extended_style); (rect.right - rect.left, rect.bottom - rect.top) }); let (minimum_width, minimum_height) = window_parameters.minimum_size.unwrap_or(( GetSystemMetrics(SM_CXMINTRACK) as u32, GetSystemMetrics(SM_CYMINTRACK) as u32, )); let (maximum_width, maximum_height) = window_parameters.maximum_size.unwrap_or(( GetSystemMetrics(SM_CXMAXTRACK) as u32, GetSystemMetrics(SM_CYMAXTRACK) as u32, )); let window_data = Box::new(WindowData { minimum_width, minimum_height, maximum_width, maximum_height, }); let data = Box::leak(window_data) as *mut WindowData as *mut std::ffi::c_void; let window_handle = CreateWindowExW( extended_style, self.window_class_name.as_ptr(), title.as_ptr(), window_style, x as i32, y as i32, width, height, null_mut(), null_mut(), self.h_instance, data, ); let window_id = WindowId::new(window_handle as *mut std::ffi::c_void); redraw_manager::add_draw_request(window_id); WINDOWS_TO_REDRAW.push(window_id); window_id } } fn quit(&self) { unsafe { PostQuitMessage(0); } } fn set_cursor(&mut self, cursor: Cursor) { unsafe { let cursor = match cursor { Cursor::Arrow => LoadCursorW(null_mut(), IDC_ARROW), Cursor::IBeam => LoadCursorW(null_mut(), IDC_IBEAM), Cursor::PointingHand => LoadCursorW(null_mut(), IDC_ARROW), Cursor::OpenHand => LoadCursorW(null_mut(), IDC_HAND), Cursor::ClosedHand => LoadCursorW(null_mut(), IDC_HAND), }; SetCursor(super::application_windows::CURRENT_CURSOR); let mut position = POINT { x: 0, y: 0 }; GetCursorPos(&mut position); SetCursorPos(position.x, position.y); CURRENT_CURSOR = cursor; } } fn hide_cursor(&mut self) { unsafe { ShowCursor(FALSE); } } fn show_cursor(&mut self) { unsafe { ShowCursor(TRUE); } } fn raw_window_handle(&self, window_id: WindowId) -> RawWindowHandle { raw_window_handle::RawWindowHandle::Windows(raw_window_handle::windows::WindowsHandle { hwnd: unsafe { window_id.raw() }, hinstance: self.h_instance as *mut std::ffi::c_void, ..raw_window_handle::windows::WindowsHandle::empty() }) } fn start_text_input(&mut self) { todo!() } fn end_text_input(&mut self) { todo!() } fn set_text_input_rectangle( &mut self, _window_id: WindowId, _x: f64, _y: f64, _width: f64, _height: f64, ) { todo!() } } impl Drop for PlatformApplication { fn drop(&mut self) { self.quit(); } } pub struct PlatformEventLoop {} impl PlatformEventLoopTrait for PlatformEventLoop { fn run(&self, callback: Box<dyn FnMut(kapp_platform_common::Event)>) { super::event_loop_windows::run(callback); } }
use super::external_windows::*; use super::utils_windows::*; use std::convert::TryInto; use std::ptr::{null, null_mut}; use kapp_platform_common::*; pub static mut CURRENT_CURSOR: HCURSOR = null_mut(); pub static mut WINDOWS_TO_REDRAW: Vec<WindowId> = Vec::new(); pub struct PlatformApplication { window_class_name: Vec<u16>, h_instance: HINSTANCE, } pub(crate) struct WindowData { pub minimum_width: u32, pub minimum_height: u32, pub maximum_width: u32, pub maximum_height: u32, } impl PlatformApplicationTrait for PlatformApplication { type EventLoop = PlatformEventLoop; fn new() -> Self { unsafe { SetProcessDpiAwareness(PROCESS_PER_MONITOR_DPI_AWARE); let window_class_name = win32_string("windowing_rust"); let h_instance = GetModuleHandleW(null_mut()); let window_class = WNDCLASSW { style: CS_DBLCLKS, lpfnWndProc: Some(super::event_loop_windows::window_callback), cbClsExtra: 0, cbWndExtra: 0, hInstance: h_instance, hIcon: null_mut(), hCursor: null_mut(), hbrBackground: null_mut(), lpszMenuName: null_mut(), lpszClassName: window_class_name.as_ptr(), }; CURRENT_CURSOR = LoadCursorW(null_mut(), IDC_ARROW); RegisterClassW(&window_class); Self { window_class_name, h_instance, } } } fn event_loop(&mut self) -> Self::EventLoop { PlatformEventLoop {} } fn set_window_position(&mut self, window_id: WindowId, x: u32, y: u32) { unsafe { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); let width = rect.right - rect.left; let height = rect.bottom - rect.top; MoveWindow( window_id.raw() as HWND, x as i32, y as i32, width, height, FALSE, ); } } fn set_window_size(&mut self, window_id: WindowId, width: u32, height: u32) { unsafe { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); MoveWindow( window_id.raw() as HWND, rect.left, rect.top, width as i32, height as i32, FALSE, ); } } fn set_window_title(&mut self, window_id: WindowId, title: &str) { let title = win32_string(title); unsafe { SetWindowTextW(window_id.raw() as HWND, title.as_ptr()); } } fn minimize_window(&mut self, window_id: WindowId) { unsafe { ShowWindow(window_id.raw() as HWND, SW_MINIMIZE); } } fn maximize_window(&mut self, window_id: WindowId) { unsafe { ShowWindow(window_id.raw() as HWND, SW_MAXIMIZE); } } fn fullscreen_window(&mut self, window_id: WindowId) { unsafe { let hwnd = window_id.raw() as HWND; let screen_width = GetSystemMetrics(SM_CXSCREEN); let screen_height = GetSystemMetrics(SM_CYSCREEN); SetWindowLongPtrW(hwnd, GWL_STYLE, (WS_VISIBLE | WS_POPUP).try_into().unwrap()); let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; GetWindowRect(window_id.raw() as HWND, &mut rect); MoveWindow( window_id.raw() as HWND, 0, 0, screen_width as i32, screen_height as i32, FALSE, ); } } fn restore_window(&mut self, window_id: WindowId) { unsafe { let hwnd = window_id.raw() as HWND; let window_style = WS_OVERLAPPEDWINDOW | WS_VISIBLE | CS_OWNDC; SetWindowLongPtrW(hwnd, GWL_STYLE, window_style.try_into().unwrap()); ShowWindow(window_id.raw() as HWND, SW_RESTORE); } } fn close_window(&mut self, window_id: WindowId) { unsafe { CloseWindow(window_id.raw() as HWND); } } fn redraw_window(&mut self, window_id: WindowId) { redraw_manager::add_draw_request(window_id); } fn get_window_size(&mut self, window_id: WindowId) -> (u32, u32) { let mut rect = RECT { left: 0, top: 0, right: 0, bottom: 0, }; unsafe { GetClientRect(window_id.raw() as HWND, &mut rect); } ( (rect.right - rect.left) as u32, (rect.bottom - rect.top) as u32, ) } fn get_window_scale(&mut self, window_id: WindowId) -> f64 { let dpi = unsafe { GetDpiForWindow(window_id.raw() as HWND) }; dpi as f64 / USER_DEFAULT_SCREEN_DPI as f64 } fn lock_mouse_position(&mut self) { unsafe { let mut position = POINT { x: 0, y: 0 }; GetCursorPos(&mut position); let rect = RECT { left: position.x, top: position.y, right: position.x, bottom: position.y, }; ClipCursor(&rect); } } fn unlock_mouse_position(&mut self) { unsafe { ClipCursor(null()); } } fn new_window(&mut self, window_parameters: &WindowParameters) -> WindowId { unsafe { let extended_style = WS_EX_APPWINDOW; let window_style = WS_OVERLAPPEDWINDOW | WS_VISIBLE | CS_OWNDC; let title = win32_string(&window_parameters.title); let (x, y) = if let Some(position) = window_parameters.position { (position.0 as i32, position.1 as i32) } else { (CW_USEDEFAULT, CW_USEDEFAULT) }; let (width, height) = window_parameters .size .map_or((CW_USEDEFAULT, CW_USEDEFAULT), |d| { let mut rect = RECT { left: 0, top: 0, right: d.0 as i32, bottom: d.1 as i32, }; AdjustWindowRectEx(&mut rect, window_style, FALSE, extended_style); (rect.right - rect.left, rect.bottom - rect.top) }); let (minimum_width, minimum_height) = window_parameters.minimum_size.unwrap_or(( GetSystemMetrics(SM_CXMINTRACK) as u32, GetSystemMetrics(SM_CYMINTRACK) as u32, )); let (maximum_width, maximum_height) = window_parameters.maximum_size.unwrap_or(( GetSystemMetrics(SM_CXMAXTRACK) as u32, GetSystemMetrics(SM_CYMAXTRACK) as u32, )); let window_data = Box::new(WindowData { minimum_width, minimum_height, maximum_width, maximum_height, }); let data = Box::leak(window_data) as *mut WindowData as *mut std::ffi::c_void; let window_handle = CreateWindowExW( extended_style, self.window_class_name.as_ptr(), title.as_ptr(), window_style, x as i32, y as i32, width, height, null_mut(), null_mut(), self.h_instance, data, ); let window_id = WindowId::new(window_handle as *mut std::ffi::c_void); redraw_manager::add_draw_request(window_id); WINDOWS_TO_REDRAW.push(window_id); window_id } } fn quit(&self) { unsafe { PostQuitMessage(0); } } fn set_cursor(&mut self, cursor: Cursor) { unsafe { let cursor =
; SetCursor(super::application_windows::CURRENT_CURSOR); let mut position = POINT { x: 0, y: 0 }; GetCursorPos(&mut position); SetCursorPos(position.x, position.y); CURRENT_CURSOR = cursor; } } fn hide_cursor(&mut self) { unsafe { ShowCursor(FALSE); } } fn show_cursor(&mut self) { unsafe { ShowCursor(TRUE); } } fn raw_window_handle(&self, window_id: WindowId) -> RawWindowHandle { raw_window_handle::RawWindowHandle::Windows(raw_window_handle::windows::WindowsHandle { hwnd: unsafe { window_id.raw() }, hinstance: self.h_instance as *mut std::ffi::c_void, ..raw_window_handle::windows::WindowsHandle::empty() }) } fn start_text_input(&mut self) { todo!() } fn end_text_input(&mut self) { todo!() } fn set_text_input_rectangle( &mut self, _window_id: WindowId, _x: f64, _y: f64, _width: f64, _height: f64, ) { todo!() } } impl Drop for PlatformApplication { fn drop(&mut self) { self.quit(); } } pub struct PlatformEventLoop {} impl PlatformEventLoopTrait for PlatformEventLoop { fn run(&self, callback: Box<dyn FnMut(kapp_platform_common::Event)>) { super::event_loop_windows::run(callback); } }
match cursor { Cursor::Arrow => LoadCursorW(null_mut(), IDC_ARROW), Cursor::IBeam => LoadCursorW(null_mut(), IDC_IBEAM), Cursor::PointingHand => LoadCursorW(null_mut(), IDC_ARROW), Cursor::OpenHand => LoadCursorW(null_mut(), IDC_HAND), Cursor::ClosedHand => LoadCursorW(null_mut(), IDC_HAND), }
if_condition
[ { "content": "fn get_window_data(hwnd: HWND) -> Option<*mut WindowData> {\n\n let data = unsafe { GetWindowLongPtrW(hwnd, GWLP_USERDATA) as *mut WindowData };\n\n if data == std::ptr::null_mut() {\n\n None\n\n } else {\n\n Some(data)\n\n }\n\n}\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 0, "score": 210433.84440521352 }, { "content": "fn get_mouse_position(_this: &Object, event: *mut Object) -> (f64, f64) {\n\n unsafe {\n\n let window: *const Object = msg(event, Sels::window, ());\n\n\n\n // Are these coordinates correct or do they not correctly account for the titlebar?\n\n let backing_scale = get_backing_scale(window);\n\n let window_point: NSPoint = msg(event, Sels::locationInWindow, ());\n\n\n\n let view: *mut Object = msg(window, Sels::contentView, ());\n\n let frame: CGRect = msg(view, Sels::frame, ());\n\n\n\n let x = window_point.x * backing_scale;\n\n let y = (frame.size.height - window_point.y) * backing_scale; // Flip y coordinate because y is 0,0 on Mac.\n\n (x, y)\n\n }\n\n}\n\n\n", "file_path": "kapp_platforms/src/macos/events_mac.rs", "rank": 1, "score": 176488.32749910332 }, { "content": "type LPRECT = *mut RECT;\n\npub type LPPOINT = *mut POINT;\n\n\n\nSTRUCT! {struct POINT {\n\n x: LONG,\n\n y: LONG,\n\n}}\n\n\n\nSTRUCT! {#[debug] struct RECT {\n\n left: LONG,\n\n top: LONG,\n\n right: LONG,\n\n bottom: LONG,\n\n}}\n\n\n\nSTRUCT! {struct CREATESTRUCTA {\n\n lpCreateParams: LPVOID,\n\n hInstance: HINSTANCE,\n\n hMenu: HMENU,\n\n hwndParent: HWND,\n\n cy: c_int,\n\n cx: c_int,\n\n y: c_int,\n\n x: c_int,\n\n style: LONG,\n\n lpszName: LPCSTR,\n\n lpszClass: LPCSTR,\n\n dwExStyle: DWORD,\n\n}}\n\n\n", "file_path": "kapp_platforms/src/windows/external_windows.rs", "rank": 2, "score": 170124.7736572946 }, { "content": "// https://docs.microsoft.com/en-us/windows/win32/winmsg/wm-size\n\nfn get_width_height(l_param: LPARAM) -> (u32, u32) {\n\n let width = LOWORD(l_param as u32) as u32;\n\n let height = HIWORD(l_param as u32) as u32;\n\n (width, height)\n\n}\n\n\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 3, "score": 169096.30784688733 }, { "content": "// Draw a rect using glScissor.\n\n// This is not a good way to draw rectangles!\n\n// Use a proper library with shaders!\n\nfn draw_rect(gl: &Context, rect: &Rect, color: &Color, scale: f64) {\n\n unsafe {\n\n let scale = scale as f32;\n\n gl.scissor(\n\n (rect.x * scale) as i32,\n\n (rect.y * scale) as i32,\n\n (rect.width * scale) as i32,\n\n (rect.height * scale) as i32,\n\n );\n\n gl.clear_color(color.r, color.g, color.b, color.a);\n\n gl.clear(COLOR_BUFFER_BIT | DEPTH_BUFFER_BIT);\n\n }\n\n}\n\n\n", "file_path": "gl_context/examples/platformer.rs", "rank": 4, "score": 153701.03148726618 }, { "content": "type PROCESS_DPI_AWARENESS = u32;\n", "file_path": "kapp_platforms/src/windows/external_windows.rs", "rank": 5, "score": 148056.93471098167 }, { "content": "/// Sends events that could not be sent because the user callback was borrowed.\n\nfn flush_overflow_events(callback: &mut Box<dyn 'static + FnMut(Event)>) {\n\n // Temporarily borrow the overflow event queue and pop from it to avoid\n\n // holding a reference to it during the callback.\n\n let mut next_event = {\n\n OVERFLOW_EVENTS\n\n .try_with(|events| events.borrow_mut().pop())\n\n .unwrap_or(None)\n\n };\n\n while let Some(event) = next_event {\n\n callback(event);\n\n\n\n next_event = {\n\n OVERFLOW_EVENTS\n\n .try_with(|events| events.borrow_mut().pop())\n\n .unwrap_or(None)\n\n };\n\n }\n\n}\n", "file_path": "kapp_platform_common/src/event_receiver.rs", "rank": 6, "score": 147199.25883444428 }, { "content": "pub fn run_async<F>(run: impl Fn(Application, Events) -> F)\n\nwhere\n\n F: 'static + Future<Output = ()>,\n\n{\n\n let (application, event_loop) = crate::initialize();\n\n event_loop.run_async(application, run);\n\n}\n\n\n\npub struct EventFuture<'a> {\n\n events: &'a Events,\n\n}\n\n\n\nimpl<'a> Future for EventFuture<'a> {\n\n type Output = Event;\n\n\n\n fn poll(self: Pin<&mut Self>, _ctx: &mut Context) -> Poll<Self::Output> {\n\n if let Some(event) = self.events.queue.borrow_mut().pop() {\n\n Poll::Ready(event)\n\n } else {\n\n Poll::Pending\n", "file_path": "src/async_application.rs", "rank": 7, "score": 147090.64065131164 }, { "content": "// Hand transcribed from here:\n\n// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key/Key_Values\n\npub fn virtual_keycode_to_key(key_in: &str) -> Key {\n\n match key_in {\n\n \"Cancel\" => Cancel,\n\n \"Backspace\" => Backspace,\n\n \"Tab\" => Tab,\n\n \"Clear\" => Clear,\n\n \"Return\" => Return,\n\n \"Enter\" => Return,\n\n \"Shift\" => Shift,\n\n \"ShiftLeft\" => LeftShift,\n\n \"ShiftRight\" => RightShift,\n\n \"Control\" => Control,\n\n \"ControlLeft\" => LeftControl,\n\n \"ControlRight\" => RightControl,\n\n \"AltLeft\" => LeftAlt,\n\n \"AltRight\" => RightAlt,\n\n \"Menu\" => Menu,\n\n \"Pause\" => Pause,\n\n \"CapsLock\" => CapsLock,\n\n \"KanaMode\" /*VK_HANGUL*/ => KanaHangul,\n", "file_path": "kapp_platforms/src/web/keys_web.rs", "rank": 8, "score": 143063.31425713655 }, { "content": "/// Called when the system requests a window redraw.\n\n/// If a redraw is requested this call fulfills that redraw request.\n\npub fn draw(window_id: WindowId) {\n\n DRAW_REQUESTS.with(|requests| {\n\n // First remove the draw request to avoid it being fulfilled twice.\n\n let position = requests.borrow().iter().position(|w| w == &window_id);\n\n if let Some(position) = position {\n\n requests.borrow_mut().swap_remove(position);\n\n }\n\n\n\n crate::event_receiver::send_event(Event::Draw { window_id });\n\n });\n\n}\n\n\n", "file_path": "kapp_platform_common/src/redraw_manager.rs", "rank": 9, "score": 142363.47016681364 }, { "content": "/// Creates an OpenGL context.\n\n/// h_instance is the parent module's h_instance\n\n/// class_name is the parent class's name\n\n/// panic_if_fail will crash the program with a useful callstack if something goes wrong\n\n/// color bits and alpha bits should add up to 32\n\npub fn new_opengl_context(\n\n color_bits: u8,\n\n alpha_bits: u8,\n\n depth_bits: u8,\n\n stencil_bits: u8,\n\n msaa_samples: u8,\n\n major_version: u8,\n\n minor_version: u8,\n\n srgb: bool,\n\n) -> Result<GLContext, Error> {\n\n // This function performs the following steps:\n\n // * First register the window class.\n\n // * Then create a dummy_window with that class ...\n\n // * Which is used to setup a dummy OpenGL context ...\n\n // * Which is used to load OpenGL extensions ...\n\n // * Which are used to set more specific pixel formats and specify an OpenGL version ...\n\n // * Which is used to create another dummy window ...\n\n // * Which is used to create the final OpenGL context!\n\n unsafe {\n\n // Register the window class.\n", "file_path": "gl_context/src/windows/mod.rs", "rank": 10, "score": 142001.3006388121 }, { "content": "pub fn error_if_false(i: i32) -> Result<(), Error> {\n\n if i == 0 {\n\n Err(Error::last_os_error())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "gl_context/src/windows/utils_windows.rs", "rank": 11, "score": 141798.67414254 }, { "content": "pub fn get_class(name: &str) -> *const Class {\n\n unsafe {\n\n let class = objc::runtime::objc_getClass(name.as_ptr() as *const _) as *const Class;\n\n if class.is_null() {\n\n panic!(\"Could not find: {:?}\", name);\n\n } else {\n\n class\n\n }\n\n }\n\n}\n\n\n\n/// Only call this once!\n\npub(crate) unsafe fn initialize_classes() {\n\n NSResponderClass = get_class(\"NSResponder\\u{0}\");\n\n NSViewClass = get_class(\"NSView\\u{0}\");\n\n NSApplicationClass = get_class(\"NSApplication\\u{0}\");\n\n NSCursorClass = get_class(\"NSCursor\\u{0}\");\n\n Sels::load_all();\n\n}\n\n\n", "file_path": "kapp_platforms/src/macos/apple.rs", "rank": 12, "score": 141558.78787252784 }, { "content": "pub fn add_draw_request(window_id: WindowId) {\n\n DRAW_REQUESTS.with(|d| {\n\n let mut requests = d.borrow_mut();\n\n\n\n // Only allow one queued redraw per window.\n\n if !requests.contains(&window_id) {\n\n requests.push(window_id);\n\n }\n\n })\n\n}\n\n\n", "file_path": "kapp_platform_common/src/redraw_manager.rs", "rank": 13, "score": 139424.16397494255 }, { "content": "pub fn win32_string(value: &str) -> Vec<u16> {\n\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n\n}\n", "file_path": "kapp_platforms/src/windows/utils_windows.rs", "rank": 14, "score": 139283.68421025103 }, { "content": "pub fn win32_string(value: &str) -> Vec<u16> {\n\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n\n}\n", "file_path": "gl_context/src/windows/utils_windows.rs", "rank": 15, "score": 139283.68421025103 }, { "content": "pub fn add_application_events_to_decl(decl: &mut ClassDecl) {\n\n unsafe {\n\n decl.add_method(\n\n Sel::from_ptr(Sels::applicationShouldTerminateAfterLastWindowClosed),\n\n application_should_terminate_after_last_window_closed\n\n as extern \"C\" fn(&Object, Sel, *mut Object) -> BOOL,\n\n );\n\n decl.add_method(\n\n Sel::from_ptr(Sels::applicationShouldTerminate),\n\n application_should_terminate as extern \"C\" fn(&Object, Sel, *mut Object) -> NSUInteger,\n\n );\n\n decl.add_method(\n\n Sel::from_ptr(Sels::applicationWillTerminate),\n\n application_will_terminate as extern \"C\" fn(&Object, Sel, *mut Object),\n\n );\n\n }\n\n}\n\n// ------------------------ End Application Events --------------------------\n\n\n\n// ------------------------ View Events --------------------------\n", "file_path": "kapp_platforms/src/macos/events_mac.rs", "rank": 16, "score": 138619.49557745247 }, { "content": "pub fn add_view_events_to_decl(decl: &mut ClassDecl) {\n\n unsafe {\n\n decl.add_method(sel!(dealloc), dealloc as extern \"C\" fn(&Object, Sel));\n\n\n\n decl.add_method(\n\n sel!(displayLayer:),\n\n display_layer as extern \"C\" fn(&Object, Sel, *mut Object),\n\n );\n\n\n\n decl.add_method(\n\n Sel::from_ptr(Sels::magnifyWithEvent),\n\n magnify_with_event as extern \"C\" fn(&Object, Sel, *mut Object),\n\n );\n\n\n\n decl.add_method(\n\n Sel::from_ptr(Sels::drawRect),\n\n draw_rect as extern \"C\" fn(&Object, Sel, CGRect),\n\n );\n\n\n\n decl.add_method(\n", "file_path": "kapp_platforms/src/macos/events_mac.rs", "rank": 17, "score": 138619.49557745247 }, { "content": "pub fn add_window_events_to_decl(decl: &mut ClassDecl) {\n\n unsafe {\n\n decl.add_method(\n\n Sel::from_ptr(Sels::windowShouldClose),\n\n window_should_close as extern \"C\" fn(&Object, Sel, *mut Object) -> BOOL,\n\n );\n\n decl.add_method(\n\n Sel::from_ptr(Sels::windowDidMiniaturize),\n\n window_did_miniaturize as extern \"C\" fn(&Object, Sel, *mut Object),\n\n );\n\n decl.add_method(\n\n Sel::from_ptr(Sels::windowDidDeminiaturize),\n\n window_did_deminiaturize as extern \"C\" fn(&Object, Sel, *mut Object),\n\n );\n\n\n\n decl.add_method(\n\n Sel::from_ptr(Sels::windowDidEnterFullScreen),\n\n window_did_enter_fullscreen as extern \"C\" fn(&Object, Sel, *mut Object),\n\n );\n\n decl.add_method(\n", "file_path": "kapp_platforms/src/macos/events_mac.rs", "rank": 18, "score": 138619.49557745247 }, { "content": "pub fn run(callback: Box<dyn FnMut(kapp_platform_common::Event)>) {\n\n unsafe {\n\n event_receiver::set_callback(callback);\n\n\n\n let mut message: MSG = std::mem::zeroed();\n\n\n\n while message.message != WM_QUIT {\n\n // Block and wait for messages unless there is a redraw request.\n\n // GetMessageW will return 0 if WM_QUIT is encountered\n\n while redraw_manager::draw_requests_count() == 0\n\n && GetMessageW(&mut message, null_mut(), 0, 0) > 0\n\n {\n\n TranslateMessage(&message as *const MSG);\n\n DispatchMessageW(&message as *const MSG);\n\n }\n\n\n\n if message.message == WM_QUIT {\n\n break;\n\n }\n\n\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 19, "score": 133628.6663545873 }, { "content": "fn get_pointer_position(event: &web_sys::PointerEvent) -> (f64, f64) {\n\n // 0,0 is the upper left of the canvas on web, so no transformations need to be performed.\n\n (event.client_x().into(), event.client_y().into())\n\n}\n\n\n", "file_path": "kapp_platforms/src/web/event_loop_web.rs", "rank": 20, "score": 132246.8314549502 }, { "content": "struct Rect {\n\n x: f32,\n\n y: f32,\n\n width: f32,\n\n height: f32,\n\n}\n\n\n\nimpl Rect {\n\n pub fn bottom(&self) -> f32 {\n\n self.y\n\n }\n\n\n\n pub fn top(&self) -> f32 {\n\n self.y + self.height\n\n }\n\n\n\n pub fn right(&self) -> f32 {\n\n self.x + self.width\n\n }\n\n\n\n pub fn left(&self) -> f32 {\n\n self.x\n\n }\n\n}\n\n\n", "file_path": "gl_context/examples/platformer.rs", "rank": 21, "score": 131807.39785829303 }, { "content": "pub fn get_backing_scale(window_id: WindowId) -> CGFloat {\n\n unsafe { msg(window_id.raw() as *mut Object, Sels::backingScaleFactor, ()) }\n\n}\n\n\n\n// When the application is dropped, quit the program.\n\nimpl Drop for PlatformApplication {\n\n fn drop(&mut self) {\n\n self.quit();\n\n }\n\n}\n", "file_path": "kapp_platforms/src/macos/application_mac.rs", "rank": 22, "score": 130789.5683772555 }, { "content": "/// Create an Application and EventLoop.\n\npub fn initialize() -> (Application, EventLoop) {\n\n let platform_application = Rc::new(RefCell::new(PlatformApplication::new()));\n\n let platform_event_loop = platform_application.borrow_mut().event_loop();\n\n let state_tracker = Rc::new(RefCell::new(StateTracker::new()));\n\n (\n\n Application {\n\n platform_application: platform_application.clone(),\n\n state_tracker: state_tracker.clone(),\n\n },\n\n EventLoop {\n\n platform_event_loop,\n\n state_tracker: state_tracker.clone(),\n\n },\n\n )\n\n}\n\n\n\nimpl Application {\n\n /// Returns a new window builder.\n\n /// Call .build() on the window builder to complete the creation of the window.\n\n /// See [`crate::window_builder::WindowBuilder`] for more ways to setup a window.\n", "file_path": "src/application.rs", "rank": 23, "score": 128901.46237480626 }, { "content": "pub fn set_callback(callback: Box<dyn FnMut(Event)>) {\n\n PROGRAM_CALLBACK.with(|p| {\n\n let _ = p.replace(callback);\n\n });\n\n}\n\n\n", "file_path": "kapp_platform_common/src/event_receiver.rs", "rank": 24, "score": 123754.30277608073 }, { "content": "#[inline]\n\npub fn GET_X_LPARAM(lp: LPARAM) -> c_int {\n\n LOWORD(lp as DWORD) as c_short as c_int\n\n}\n", "file_path": "kapp_platforms/src/windows/external_windows.rs", "rank": 25, "score": 122340.11339735123 }, { "content": "pub fn get_draw_request() -> Option<WindowId> {\n\n DRAW_REQUESTS_SWAP.with(|swap| swap.borrow_mut().pop())\n\n}\n", "file_path": "kapp_platform_common/src/redraw_manager.rs", "rank": 26, "score": 118355.21608867629 }, { "content": "fn create_dummy_window(h_instance: HINSTANCE, class_name: &Vec<u16>) -> HWND {\n\n let title = win32_string(\"kapp Placeholder\");\n\n\n\n unsafe {\n\n // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw\n\n CreateWindowExW(\n\n 0, // extended style Is this ok?\n\n class_name.as_ptr(), // A class created by RegisterClass\n\n title.as_ptr(), // window title\n\n WS_CLIPSIBLINGS | WS_CLIPCHILDREN, // style\n\n 0, // x position\n\n 0, // y position\n\n 1, // width\n\n 1, // height\n\n null_mut(), // parent window\n\n null_mut(), // menu\n\n h_instance, // Module handle\n\n null_mut(), // Data sent to window\n\n )\n\n }\n", "file_path": "gl_context/src/windows/mod.rs", "rank": 27, "score": 115146.60797370889 }, { "content": "// Check if two rectangles overlap and if so return penetration depths\n\nfn rect_overlap(rect0: &Rect, rect1: &Rect) -> Option<(f32, f32)> {\n\n if rect0.left() < rect1.right()\n\n && rect0.right() > rect1.left()\n\n && rect0.bottom() < rect1.top()\n\n && rect0.top() > rect1.bottom()\n\n {\n\n let penetration_y0 = rect0.bottom() - rect1.top();\n\n let penetration_y1 = rect0.top() - rect1.bottom();\n\n let penetration_x0 = rect0.left() - rect1.right();\n\n let penetration_x1 = rect0.right() - rect1.left();\n\n // Find the direction along the y axis that penetrates the least.\n\n let penetration_y = if penetration_y0.abs() < penetration_y1.abs() {\n\n penetration_y0\n\n } else {\n\n penetration_y1\n\n };\n\n\n\n let penetration_x = if penetration_x0.abs() < penetration_x1.abs() {\n\n penetration_x0\n\n } else {\n\n penetration_x1\n\n };\n\n Some((penetration_x, penetration_y))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "gl_context/examples/platformer.rs", "rank": 28, "score": 112081.27518724446 }, { "content": "pub fn request_frame() {\n\n unsafe {\n\n request_animation_frame(REQUEST_ANIMATION_FRAME_CLOSURE.as_ref().unwrap());\n\n }\n\n}\n\n\n", "file_path": "kapp_platforms/src/web/event_loop_web.rs", "rank": 29, "score": 107702.74447675627 }, { "content": "/// Called when starting to iterate through all draw requests.\n\npub fn begin_draw_flush() {\n\n DRAW_REQUESTS_SWAP.with(|swap| {\n\n DRAW_REQUESTS.with(|requests| requests.swap(swap));\n\n });\n\n}\n\n\n", "file_path": "kapp_platform_common/src/redraw_manager.rs", "rank": 30, "score": 107702.74447675627 }, { "content": "pub fn request_fullscreen() {\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document\n\n .get_element_by_id(\"canvas\")\n\n .unwrap()\n\n .dyn_into::<web_sys::HtmlCanvasElement>()\n\n .unwrap();\n\n\n\n canvas.request_fullscreen().unwrap();\n\n}\n", "file_path": "kapp_platforms/src/web/event_loop_web.rs", "rank": 31, "score": 107702.74447675627 }, { "content": "// https://docs.microsoft.com/en-us/windows/win32/inputdev/wm-mousemove\n\nfn resize_event(hwnd: HWND, l_param: LPARAM, w_param: WPARAM) {\n\n let (width, height) = get_width_height(l_param);\n\n // First send the resize event\n\n produce_event(Event::WindowResized {\n\n width,\n\n height,\n\n window_id: WindowId::new(hwnd as *mut std::ffi::c_void),\n\n });\n\n\n\n // Then send more specific events.\n\n match w_param {\n\n SIZE_MAXIMIZED => produce_event(Event::WindowMaximized {\n\n window_id: WindowId::new(hwnd as *mut std::ffi::c_void),\n\n }),\n\n SIZE_MINIMIZED => produce_event(Event::WindowMinimized {\n\n window_id: WindowId::new(hwnd as *mut std::ffi::c_void),\n\n }),\n\n SIZE_RESTORED => {\n\n /* Quote from the docs: \"The window has been resized, but\n\n neither the SIZE_MINIMIZED nor SIZE_MAXIMIZED value applies\" */\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 32, "score": 106002.38122018398 }, { "content": "pub fn draw_requests_count() -> usize {\n\n DRAW_REQUESTS.with(|d| d.borrow().len())\n\n}\n\n\n", "file_path": "kapp_platform_common/src/redraw_manager.rs", "rank": 33, "score": 102215.969831631 }, { "content": "fn process_event(callback: &mut Box<dyn FnMut(Event)>, event: &SDL_Event) {\n\n unsafe {\n\n match event.type_ {\n\n SDL_QUIT => callback(Event::QuitRequested),\n\n SDL_WINDOWEVENT => {\n\n let window_event = event.window;\n\n let window_id =\n\n WindowId::new(SDL_GetWindowFromID(window_event.windowID) as *mut c_void);\n\n match window_event.event {\n\n SDL_WINDOWEVENT_MINIMIZED => callback(Event::WindowMinimized { window_id }),\n\n SDL_WINDOWEVENT_MAXIMIZED => callback(Event::WindowMaximized { window_id }),\n\n // There is no SDL_WINDOWEVENT_FULLSCREENED\n\n // There is no equivalent to WindowStartResize\n\n // There is no equivalent to WindowEndResize\n\n // There is no equivalent to WindowScaleChanged\n\n SDL_WINDOWEVENT_RESTORED => callback(Event::WindowRestored { window_id }),\n\n SDL_WINDOWEVENT_MOVED => callback(Event::WindowMoved {\n\n window_id,\n\n x: window_event.data1 as u32,\n\n y: window_event.data2 as u32,\n", "file_path": "kapp_platforms/src/sdl/mod.rs", "rank": 34, "score": 101744.4754915665 }, { "content": "/// Sends an event to the user callback\n\npub fn send_event(event: Event) {\n\n // try_with because events may be sent during destruction, which should be ignored.\n\n let _ = PROGRAM_CALLBACK.try_with(|p| {\n\n if let Ok(mut callback) = p.try_borrow_mut() {\n\n (callback.as_mut())(event);\n\n\n\n // Flush events here to somewhat preserve the ordering of events.\n\n flush_overflow_events(&mut callback);\n\n } else {\n\n // If the callback is in use then push the event to overflow events to be\n\n // processed later.\n\n OVERFLOW_EVENTS.with(|events| {\n\n events.borrow_mut().push(event);\n\n });\n\n }\n\n });\n\n}\n\n\n", "file_path": "kapp_platform_common/src/event_receiver.rs", "rank": 35, "score": 98992.81175393029 }, { "content": "// https://docs.microsoft.com/en-us/windows/win32/inputdev/wm-mousemove\n\nfn process_mouse_move_event(_hwnd: HWND, l_param: LPARAM) -> Event {\n\n let x = GET_X_LPARAM(l_param);\n\n let y = GET_Y_LPARAM(l_param);\n\n\n\n Event::PointerMoved {\n\n x: x as f64,\n\n y: y as f64,\n\n source: PointerSource::Mouse,\n\n timestamp: get_message_time(),\n\n }\n\n}\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 36, "score": 98788.65527831024 }, { "content": "#[inline]\n\npub fn LOWORD(l: DWORD) -> WORD {\n\n (l & 0xffff) as WORD\n\n}\n", "file_path": "kapp_platforms/src/windows/external_windows.rs", "rank": 37, "score": 97907.55817696729 }, { "content": "#[inline]\n\npub fn HIWORD(l: DWORD) -> WORD {\n\n ((l >> 16) & 0xffff) as WORD\n\n}\n\n\n\nDECLARE_HANDLE! {HINSTANCE, HINSTANCE__}\n\npub type HMODULE = HINSTANCE;\n\n\n\n// copied from https://github.com/retep998/winapi-rs/blob/0.3/src/shared/windef.rs\n\nDECLARE_HANDLE! {HWND, HWND__}\n\nDECLARE_HANDLE! {HICON, HICON__}\n\nDECLARE_HANDLE! {HMENU, HMENU__}\n\nDECLARE_HANDLE! {HBRUSH, HBRUSH__}\n\n\n\nDECLARE_HANDLE! {HIMC, HIMC__}\n\n\n\npub type HCURSOR = HICON;\n", "file_path": "kapp_platforms/src/windows/external_windows.rs", "rank": 38, "score": 97907.55817696729 }, { "content": "pub fn run<T>(callback: T)\n\nwhere\n\n T: 'static + FnMut(Event),\n\n{\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document\n\n .get_element_by_id(\"canvas\")\n\n .unwrap()\n\n .dyn_into::<web_sys::HtmlCanvasElement>()\n\n .unwrap();\n\n\n\n // While the following is 'unsafe' and uses global data in a funky way, it's actually safe because web's main loop is single threaded.\n\n // An alternative approach is documented here: https://rustwasm.github.io/docs/wasm-bindgen/examples/request-animation-frame.html\n\n // It may be better, but for now I found the following simpler to understand and implement.\n\n unsafe {\n\n CALLBACK = Some(Box::new(Box::new(callback)));\n\n {\n\n let canvas = canvas.clone();\n\n REQUEST_ANIMATION_FRAME_CLOSURE = Some(Closure::wrap(Box::new(move || {\n\n let canvas_client_width = canvas.client_width() as u32;\n", "file_path": "kapp_platforms/src/web/event_loop_web.rs", "rank": 39, "score": 94641.42233386252 }, { "content": "fn new_shader(\n\n gl: &glow::Context,\n\n shader_type: u32,\n\n source: &str,\n\n) -> <Context as HasContext>::Shader {\n\n #[cfg(all(target_arch = \"wasm32\"))]\n\n let version = \"#version 300 es\";\n\n #[cfg(all(not(target_arch = \"wasm32\")))]\n\n let version = \"#version 410\";\n\n\n\n let source = &format!(\"{}\\n{}\", version, source);\n\n unsafe {\n\n let shader = gl.create_shader(shader_type).unwrap();\n\n gl.shader_source(shader, source);\n\n gl.compile_shader(shader);\n\n\n\n if !gl.get_shader_compile_status(shader) {\n\n log!(\"Type: {:?}\", shader_type);\n\n log!(\"{}\", source);\n\n log!(\"{}\", gl.get_shader_info_log(shader));\n\n }\n\n\n\n shader\n\n }\n\n}\n\n\n", "file_path": "gl_context/examples/triangle.rs", "rank": 40, "score": 93171.74021861567 }, { "content": "pub fn scancode_to_key(key_in: SDL_Scancode) -> Key {\n\n match key_in {\n\n SDL_SCANCODE_0 => Digit0,\n\n SDL_SCANCODE_1 => Digit1,\n\n SDL_SCANCODE_2 => Digit2,\n\n SDL_SCANCODE_3 => Digit3,\n\n SDL_SCANCODE_4 => Digit4,\n\n SDL_SCANCODE_5 => Digit5,\n\n SDL_SCANCODE_6 => Digit6,\n\n SDL_SCANCODE_7 => Digit7,\n\n SDL_SCANCODE_8 => Digit8,\n\n SDL_SCANCODE_9 => Digit9,\n\n SDL_SCANCODE_A => A,\n\n SDL_SCANCODE_B => B,\n\n SDL_SCANCODE_C => C,\n\n SDL_SCANCODE_D => D,\n\n SDL_SCANCODE_E => E,\n\n SDL_SCANCODE_F => F,\n\n SDL_SCANCODE_G => G,\n\n SDL_SCANCODE_H => H,\n", "file_path": "kapp_platforms/src/sdl/keys_sdl.rs", "rank": 41, "score": 93134.0602566064 }, { "content": "pub fn virtual_keycode_to_key(key_in: u16) -> Key {\n\n match key_in {\n\n 0x1D => Digit0,\n\n 0x12 => Digit1,\n\n 0x13 => Digit2,\n\n 0x14 => Digit3,\n\n 0x15 => Digit4,\n\n 0x17 => Digit5,\n\n 0x16 => Digit6,\n\n 0x1A => Digit7,\n\n 0x1C => Digit8,\n\n 0x19 => Digit9,\n\n 0x00 => A,\n\n 0x0B => B,\n\n 0x08 => C,\n\n 0x02 => D,\n\n 0x0E => E,\n\n 0x03 => F,\n\n 0x05 => G,\n\n 0x04 => H,\n", "file_path": "kapp_platforms/src/macos/keys_mac.rs", "rank": 42, "score": 93134.0602566064 }, { "content": "#[inline]\n\npub fn GET_Y_LPARAM(lp: LPARAM) -> c_int {\n\n HIWORD(lp as DWORD) as c_short as c_int\n\n}\n\n\n\n// Copied from https://github.com/retep998/winapi-rs/blob/0.3/src/um/libloaderapi.rs\n\nextern \"system\" {\n\n pub fn GetModuleHandleW(lpModuleName: LPCWSTR) -> HMODULE;\n\n}\n\n\n\n// Copied from https://github.com/retep998/winapi-rs/blob/0.3/src/um/winuser.rs\n\nFN! {stdcall WNDPROC(\n\n HWND,\n\n UINT,\n\n WPARAM,\n\n LPARAM,\n\n) -> LRESULT}\n\n\n\npub type LPMSG = *mut MSG;\n\n\n\n#[link(name = \"user32\")]\n", "file_path": "kapp_platforms/src/windows/external_windows.rs", "rank": 43, "score": 93134.0602566064 }, { "content": "fn send_mouse_move(this: &Object, event: *mut Object) {\n\n let mouse_lock = APPLICATION_DATA.with(|d| d.borrow().mouse_lock);\n\n\n\n // These deltas are probably smoothed, right?\n\n // So they're less good for something like first-person controls?\n\n // Investigation is required to see if there's a more \"raw\" input\" that\n\n // should be exposed.\n\n let delta_x: CGFloat = unsafe { msg_send![event, deltaX] };\n\n let delta_y: CGFloat = unsafe { msg_send![event, deltaY] };\n\n\n\n let timestamp = get_timestamp(event);\n\n submit_event(Event::MouseMotion {\n\n delta_x,\n\n delta_y,\n\n timestamp,\n\n });\n\n\n\n if !mouse_lock {\n\n let (x, y) = get_mouse_position(this, event);\n\n self::submit_event(Event::PointerMoved {\n\n x,\n\n y,\n\n source: PointerSource::Mouse,\n\n timestamp,\n\n });\n\n }\n\n}\n", "file_path": "kapp_platforms/src/macos/events_mac.rs", "rank": 44, "score": 92038.15666473634 }, { "content": "fn new_shader_program(\n\n gl: &glow::Context,\n\n vertex_source: &str,\n\n fragment_source: &str,\n\n) -> <Context as HasContext>::Program {\n\n unsafe {\n\n let vertex_shader = new_shader(gl, VERTEX_SHADER, vertex_source);\n\n let fragment_shader = new_shader(gl, FRAGMENT_SHADER, fragment_source);\n\n\n\n let shader_program = gl.create_program().unwrap();\n\n gl.attach_shader(shader_program, vertex_shader);\n\n gl.attach_shader(shader_program, fragment_shader);\n\n gl.link_program(shader_program);\n\n\n\n if !gl.get_program_link_status(shader_program) {\n\n log!(\"{}\", gl.get_program_info_log(shader_program));\n\n }\n\n shader_program\n\n }\n\n}\n\n\n", "file_path": "gl_context/examples/triangle.rs", "rank": 45, "score": 91043.56380960968 }, { "content": "// Make sure the rect doesn't leave the screen\n\n// And bounce it off the walls if it does\n\n// Also detect if the rect is touching the floor.\n\nfn check_rect_bounds(\n\n rect: &mut Rect,\n\n velocity: &mut (f32, f32),\n\n screen_width: i32,\n\n screen_height: i32,\n\n grounded: &mut bool,\n\n) {\n\n if rect.bottom() < 0. {\n\n rect.y = 0.;\n\n velocity.1 = 0.;\n\n *grounded = true;\n\n }\n\n\n\n if rect.left() < 0. {\n\n rect.x = 0.;\n\n }\n\n\n\n if rect.right() > screen_width as f32 {\n\n rect.x = screen_width as f32 - rect.width;\n\n }\n\n\n\n if rect.top() > screen_height as f32 {\n\n rect.y = screen_height as f32 - rect.height;\n\n }\n\n}\n\n\n", "file_path": "gl_context/examples/platformer.rs", "rank": 46, "score": 91024.10580102363 }, { "content": "fn get_timestamp(event: *mut Object) -> std::time::Duration {\n\n let number: f64 = unsafe { msg(event, Sels::timestamp, ()) };\n\n std::time::Duration::from_secs_f64(number)\n\n}\n\n\n", "file_path": "kapp_platforms/src/macos/events_mac.rs", "rank": 47, "score": 88438.99796943954 }, { "content": "fn wgl_get_proc_address(name: &str) -> Result<*const c_void, Error> {\n\n let name = std::ffi::CString::new(name).unwrap();\n\n let result = unsafe { wglGetProcAddress(name.as_ptr() as *const i8) as *const c_void };\n\n error_if_null(result)?;\n\n Ok(result)\n\n}\n\n\n\n// These definitions are based on the wglext.h header available here:\n\n// https://www.khronos.org/registry/OpenGL/api/GL/wglext.h\n\n#[allow(non_snake_case, non_upper_case_globals)]\n\nstatic mut wglChoosePixelFormatARB_ptr: *const c_void = std::ptr::null();\n", "file_path": "gl_context/src/windows/mod.rs", "rank": 48, "score": 87278.3422975143 }, { "content": "pub fn error_if_null<T>(pointer: *const T) -> Result<(), Error> {\n\n if pointer.is_null() {\n\n Err(Error::last_os_error())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "gl_context/src/windows/utils_windows.rs", "rank": 49, "score": 84003.34725359702 }, { "content": "pub trait PlatformApplicationTrait {\n\n type EventLoop: PlatformEventLoopTrait;\n\n\n\n fn new() -> Self;\n\n fn event_loop(&mut self) -> Self::EventLoop;\n\n\n\n /// Sets window position in physical coordinates on its current screen.\n\n fn set_window_position(&mut self, window_id: WindowId, x: u32, y: u32);\n\n /// Sets window size with physical coordinates.\n\n fn set_window_size(&mut self, window_id: WindowId, width: u32, height: u32);\n\n fn set_window_title(&mut self, window_id: WindowId, title: &str);\n\n fn minimize_window(&mut self, window_id: WindowId);\n\n fn maximize_window(&mut self, window_id: WindowId);\n\n fn fullscreen_window(&mut self, window_id: WindowId);\n\n /// Returns the window to the state where it's not minimized, maximized, or fullscreen\n\n fn restore_window(&mut self, window_id: WindowId);\n\n fn close_window(&mut self, window_id: WindowId);\n\n\n\n fn get_window_size(&mut self, _window_id: WindowId) -> (u32, u32);\n\n fn get_window_scale(&mut self, _window_id: WindowId) -> f64;\n", "file_path": "kapp_platform_common/src/platform_traits.rs", "rank": 50, "score": 82742.9163595894 }, { "content": "// Hand transcribed from here:\n\n// It took a while\n\n// https://docs.microsoft.com/en-us/windows/win32/inputdev/virtual-key-codes\n\n// Also useful:\n\n// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/key/Key_Values\n\npub fn virtual_keycode_to_key(key_in: std::os::raw::c_int) -> Key {\n\n match key_in {\n\n VK_CANCEL => Cancel,\n\n VK_BACK => Backspace,\n\n VK_TAB => Tab,\n\n VK_CLEAR => Clear,\n\n VK_RETURN => Return,\n\n VK_SHIFT => Shift,\n\n VK_CONTROL => Control,\n\n VK_MENU => Menu,\n\n VK_PAUSE => Pause,\n\n VK_CAPITAL => CapsLock,\n\n VK_KANA /*VK_HANGUL*/ => KanaHangul,\n\n VK_JUNJA => Junja,\n\n VK_FINAL => FinalMode,\n\n VK_HANJA /*VK_KANJI*/ => HanjaKanji,\n\n VK_ESCAPE => Escape,\n\n VK_CONVERT => Convert,\n\n VK_NONCONVERT => NonConvert,\n\n VK_ACCEPT => Accept,\n", "file_path": "kapp_platforms/src/windows/keys_windows.rs", "rank": 51, "score": 82709.24808666052 }, { "content": "fn request_animation_frame(f: &Closure<dyn FnMut()>) {\n\n window()\n\n .request_animation_frame(f.as_ref().unchecked_ref())\n\n .expect(\"should register `requestAnimationFrame` OK\");\n\n}\n\n\n", "file_path": "kapp_platforms/src/web/event_loop_web.rs", "rank": 52, "score": 82656.40342305611 }, { "content": "fn get_proc_address_inner(opengl_module: HMODULE, address: &str) -> *const core::ffi::c_void {\n\n unsafe {\n\n let name = std::ffi::CString::new(address).unwrap();\n\n let mut result = wglGetProcAddress(name.as_ptr() as *const i8) as *const std::ffi::c_void;\n\n if result.is_null() {\n\n // Functions that were part of OpenGL1 need to be loaded differently.\n\n result = GetProcAddress(opengl_module, name.as_ptr() as *const i8)\n\n as *const std::ffi::c_void;\n\n }\n\n\n\n /*\n\n if result.is_null() {\n\n println!(\"FAILED TO LOAD: {}\", address);\n\n } else {\n\n println!(\"Loaded: {} {:?}\", address, result);\n\n }\n\n */\n\n result\n\n }\n\n}\n", "file_path": "gl_context/src/windows/mod.rs", "rank": 53, "score": 79926.72757944334 }, { "content": "fn get_pointer_type(event: &web_sys::PointerEvent) -> PointerSource {\n\n match event.pointer_type().as_str() {\n\n \"mouse\" => PointerSource::Mouse,\n\n \"pen\" => PointerSource::Pen,\n\n \"touch\" => PointerSource::Touch,\n\n _ => PointerSource::Unknown,\n\n }\n\n}\n\n\n", "file_path": "kapp_platforms/src/web/event_loop_web.rs", "rank": 54, "score": 68616.81928141654 }, { "content": "struct Block {\n\n rect: Rect,\n\n color: Color,\n\n}\n", "file_path": "gl_context/examples/platformer.rs", "rank": 55, "score": 65910.08467483356 }, { "content": "#[derive(Clone, Copy)]\n\nstruct Color {\n\n r: f32,\n\n g: f32,\n\n b: f32,\n\n a: f32,\n\n}\n\n\n", "file_path": "gl_context/examples/platformer.rs", "rank": 56, "score": 65910.08467483356 }, { "content": "struct Player {\n\n velocity: (f32, f32),\n\n rect: Rect,\n\n grounded: bool,\n\n}\n\n\n", "file_path": "gl_context/examples/platformer.rs", "rank": 57, "score": 65910.08467483356 }, { "content": "fn main() {\n\n let (app, event_loop) = initialize();\n\n let _window = app.new_window().title(\"Log Events\").build().unwrap();\n\n\n\n event_loop.run(move |event| match event {\n\n // EventsCleared and MouseMoved log too much, so ignore them.\n\n Event::EventsCleared | Event::PointerMoved { .. } => {}\n\n Event::WindowCloseRequested { .. } => app.quit(),\n\n _ => {\n\n println!(\"{:?}\", event);\n\n }\n\n });\n\n}\n", "file_path": "examples/events.rs", "rank": 58, "score": 61180.55311836843 }, { "content": "fn main() {\n\n let (app, event_loop) = initialize();\n\n let _window = app.new_window().minimum_size(1000, 1000).build().unwrap();\n\n\n\n event_loop.run(move |event| match event {\n\n Event::WindowCloseRequested { .. } => app.quit(),\n\n Event::Draw { .. } => {\n\n // Render something here.\n\n }\n\n _ => {}\n\n });\n\n}\n", "file_path": "examples/hello.rs", "rank": 59, "score": 61180.55311836843 }, { "content": "type HRESULT = c_long;\n\n\n\nSTRUCT! {struct MSG {\n\n hwnd: HWND,\n\n message: UINT,\n\n wParam: WPARAM,\n\n lParam: LPARAM,\n\n time: DWORD,\n\n pt: POINT,\n\n}}\n\n\n\nSTRUCT! {struct WNDCLASSW {\n\n style: UINT,\n\n lpfnWndProc: WNDPROC,\n\n cbClsExtra: c_int,\n\n cbWndExtra: c_int,\n\n hInstance: HINSTANCE,\n\n hIcon: HICON,\n\n hCursor: HCURSOR,\n\n hbrBackground: HBRUSH,\n", "file_path": "kapp_platforms/src/windows/external_windows.rs", "rank": 60, "score": 60000.655670448265 }, { "content": "fn main() {\n\n run_async(run);\n\n}\n\n\n\nasync fn run(app: Application, events: Events) {\n\n let mut _window = app.new_window().build().unwrap();\n\n\n\n // Loop forever!\n\n loop {\n\n match events.next().await {\n\n Event::WindowCloseRequested { .. } => app.quit(),\n\n Event::Draw { .. } => {}\n\n _ => {}\n\n }\n\n }\n\n}\n", "file_path": "examples/hello_async.rs", "rank": 61, "score": 59906.68501554347 }, { "content": "fn main() {\n\n let (app, event_loop) = initialize();\n\n let _window = app.new_window().title(\"Log Events\").build().unwrap();\n\n\n\n app.start_text_input();\n\n event_loop.run(move |event| match event {\n\n // Just log text input related events.\n\n Event::IMEComposition { .. }\n\n | Event::IMEEndComposition\n\n | Event::CharacterReceived { .. } => println!(\"{:?}\", event),\n\n Event::WindowCloseRequested { .. } => app.quit(),\n\n _ => {}\n\n });\n\n}\n", "file_path": "examples/text_input.rs", "rank": 62, "score": 59906.68501554347 }, { "content": "fn main() {\n\n let (app, event_loop) = initialize();\n\n let _window = app.new_window().build().unwrap();\n\n\n\n event_loop.run(move |event| {\n\n if app.pointer_button(PointerButton::Primary) {\n\n println!(\"Mouse pressed\");\n\n }\n\n\n\n match event {\n\n Event::WindowCloseRequested { .. } => app.quit(),\n\n _ => {}\n\n }\n\n });\n\n}\n", "file_path": "examples/state_tracking.rs", "rank": 63, "score": 59906.68501554347 }, { "content": "fn main() {\n\n // Create a new application with default settings.\n\n let (app, event_loop) = initialize();\n\n\n\n let mut screen_width = 500;\n\n let mut screen_height = 500;\n\n\n\n let mut gl_context = GLContext::new().build().unwrap(); // Create a gl_context for the app\n\n #[cfg(target_arch = \"wasm32\")]\n\n let gl = glow::Context::from_webgl1_context(gl_context.webgl1_context().unwrap());\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n let gl = glow::Context::from_loader_function(|s| gl_context.get_proc_address(s));\n\n\n\n unsafe {\n\n gl.enable(SCISSOR_TEST);\n\n }\n\n\n\n let window = app\n\n .new_window()\n\n .title(\"Platformer\")\n", "file_path": "gl_context/examples/platformer.rs", "rank": 64, "score": 58715.07211790217 }, { "content": "fn main() {\n\n // Create a new application with default settings.\n\n let (app, event_loop) = initialize();\n\n let window = app\n\n .new_window()\n\n .size(400, 400)\n\n .title(\"Hello\")\n\n .build()\n\n .unwrap();\n\n let mut gl_context = GLContext::new().build().unwrap(); // Create a gl_context for the app\n\n\n\n gl_context.set_window(Some(&window)).unwrap();\n\n\n\n #[cfg(target_arch = \"wasm32\")]\n\n let gl = glow::Context::from_webgl2_context(gl_context.webgl2_context().unwrap());\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n let gl = glow::Context::from_loader_function(|s| gl_context.get_proc_address(s));\n\n\n\n setup(&gl);\n\n\n", "file_path": "gl_context/examples/triangle.rs", "rank": 65, "score": 58715.07211790217 }, { "content": "pub trait GLContextTrait {\n\n /// Gets the pixel format and attributes of the context.\n\n fn get_attributes(&self) -> GLContextAttributes;\n\n\n\n /// Makes the GLContext current to the current thread\n\n fn make_current(&mut self) -> Result<(), std::io::Error>;\n\n\n\n /// Sets the Vsync for the window attached to this context.\n\n /// Returns a system error if not successful\n\n fn set_vsync(&mut self, vsync: VSync) -> Result<(), std::io::Error>;\n\n fn get_vsync(&self) -> VSync;\n\n\n\n /// Assigns a window to draw to\n\n fn set_window(\n\n &mut self,\n\n window: Option<&impl raw_window_handle::HasRawWindowHandle>,\n\n ) -> Result<(), SetWindowError>;\n\n\n\n /// Resizes the context to match the attached window\n\n fn resize(&mut self);\n", "file_path": "gl_context/src/common.rs", "rank": 66, "score": 57884.29930240255 }, { "content": "fn main() {\n\n let (app, event_loop) = initialize();\n\n let window = app.new_window().build().unwrap();\n\n\n\n // Create a GLContext\n\n let mut gl_context = GLContext::new().build().unwrap();\n\n\n\n // Assign the GLContext's window.\n\n gl_context.set_window(Some(&window)).unwrap();\n\n\n\n // Glow is a library for accessing GL function calls from a variety of platforms\n\n // Glow requires a cross platform way to load function pointers,\n\n // which GLContext provides with get_proc_address.\n\n // Glow requires different setup on web, hence the cfgs below.\n\n\n\n #[cfg(target_arch = \"wasm32\")]\n\n let gl = glow::Context::from_webgl1_context(gl_context.webgl1_context().unwrap());\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n let gl = glow::Context::from_loader_function(|s| gl_context.get_proc_address(s));\n\n\n", "file_path": "gl_context/examples/simple_gl.rs", "rank": 67, "score": 57597.99662216355 }, { "content": "type LPCSTR = *const i8;\n\n\n\n// Copied from https://github.com/retep998/winapi-rs/blob/0.3/src/shared/windowsx.rs\n", "file_path": "kapp_platforms/src/windows/external_windows.rs", "rank": 68, "score": 56830.784955632815 }, { "content": "pub trait RawWindowHandleTrait {\n\n fn raw_window_handle(&self) -> raw_window_handle::RawWindowHandle;\n\n}\n", "file_path": "kapp_platform_common/src/window_id.rs", "rank": 69, "score": 55198.5028486498 }, { "content": "pub trait PlatformEventLoopTrait {\n\n /// Runs until the application quits.\n\n fn run(&self, callback: Box<dyn FnMut(crate::Event)>);\n\n}\n", "file_path": "kapp_platform_common/src/platform_traits.rs", "rank": 70, "score": 55198.5028486498 }, { "content": "#[allow(non_snake_case, non_upper_case_globals)]\n\nfn wglChoosePixelFormatARB(\n\n hdc: HDC,\n\n piAttribIList: *const c_int,\n\n pfAttribFList: *const c_float,\n\n nMaxFormats: c_uint,\n\n piFormats: *mut c_int,\n\n nNumFormats: *mut c_uint,\n\n) -> c_int {\n\n unsafe {\n\n std::mem::transmute::<\n\n _,\n\n extern \"system\" fn(\n\n HDC,\n\n *const c_int,\n\n *const c_float,\n\n c_uint,\n\n *mut c_int,\n\n *mut c_uint,\n\n ) -> c_int,\n\n >(wglChoosePixelFormatARB_ptr)(\n", "file_path": "gl_context/src/windows/mod.rs", "rank": 71, "score": 53750.748334948 }, { "content": "fn setup(gl: &glow::Context) {\n\n let vertex_source = r#\"\n\n const vec2 verts[3] = vec2[3](\n\n vec2(0.0f, 1.0f),\n\n vec2(-1.0f, -1.0f),\n\n vec2(1.0f, -1.0f)\n\n );\n\n void main() {\n\n gl_Position = vec4(verts[gl_VertexID], 0.0, 1.0);\n\n }\n\n \"#;\n\n\n\n let fragment_source = r#\"\n\n precision mediump float;\n\n \n\n out vec4 color;\n\n void main()\n\n {\n\n color = vec4(1.0, 0.0, 0.0, 1.0);\n\n }\n", "file_path": "gl_context/examples/triangle.rs", "rank": 72, "score": 50251.16170879974 }, { "content": "fn submit_event(event: Event) {\n\n kapp_platform_common::event_receiver::send_event(event);\n\n}\n\n\n", "file_path": "kapp_platforms/src/macos/events_mac.rs", "rank": 73, "score": 49915.6090808257 }, { "content": "fn send_event(event: Event) {\n\n unsafe {\n\n (CALLBACK.as_mut().unwrap())(event);\n\n }\n\n}\n\n\n", "file_path": "kapp_platforms/src/web/event_loop_web.rs", "rank": 74, "score": 49083.86457289057 }, { "content": "fn window() -> web_sys::Window {\n\n web_sys::window().expect(\"no global `window` exists\")\n\n}\n\n\n\nstatic mut CALLBACK: Option<Box<dyn FnMut(Event)>> = None;\n\nstatic mut REQUEST_ANIMATION_FRAME_CLOSURE: Option<Closure<dyn FnMut()>> = None;\n\nstatic mut REQUEST_FULLSCREEN_CLOSURE: Option<Closure<dyn FnMut()>> = None;\n\nstatic mut CANVAS_HEIGHT: u32 = 0;\n\n\n", "file_path": "kapp_platforms/src/web/event_loop_web.rs", "rank": 75, "score": 49083.86457289057 }, { "content": "fn produce_event(event: Event) {\n\n event_receiver::send_event(event);\n\n}\n\n\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 76, "score": 49083.86457289057 }, { "content": "fn create_run_loop_source() -> CFRunLoopSourceRef {\n\n extern \"C\" fn event_loop_proxy_handler(_: *mut std::ffi::c_void) {}\n\n\n\n unsafe {\n\n let rl = CFRunLoopGetMain();\n\n let mut context: CFRunLoopSourceContext = std::mem::zeroed();\n\n context.perform = Some(event_loop_proxy_handler);\n\n let source =\n\n CFRunLoopSourceCreate(std::ptr::null_mut(), CFIndex::max_value() - 1, &mut context);\n\n CFRunLoopAddSource(rl, source, kCFRunLoopCommonModes);\n\n CFRunLoopWakeUp(rl);\n\n source\n\n }\n\n}\n\n\n\nextern \"C\" fn control_flow_end_handler(\n\n _: CFRunLoopObserverRef,\n\n _: CFRunLoopActivity,\n\n _: *mut std::ffi::c_void,\n\n) {\n", "file_path": "kapp_platforms/src/macos/application_mac.rs", "rank": 77, "score": 47906.01801889539 }, { "content": "/// Gets the message time with millisecond precision\n\n/// https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-getmessagetime\n\nfn get_message_time() -> std::time::Duration {\n\n std::time::Duration::from_millis(unsafe { GetMessageTime() } as u64)\n\n}\n\n\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 78, "score": 46073.59399492837 }, { "content": "fn view_delegate_declaration() -> *const objc::runtime::Class {\n\n let superclass = unsafe { &*NSViewClass };\n\n let mut decl = ClassDecl::new(\"kappViewClass\", superclass).unwrap();\n\n super::events_mac::add_view_events_to_decl(&mut decl);\n\n decl.register()\n\n}\n\n\n", "file_path": "kapp_platforms/src/macos/application_mac.rs", "rank": 79, "score": 44809.55402650067 }, { "content": "fn window_delegate_declaration() -> *const objc::runtime::Class {\n\n let superclass = unsafe { &*NSResponderClass };\n\n let mut decl = ClassDecl::new(\"kappWindowClass\", superclass).unwrap();\n\n super::events_mac::add_window_events_to_decl(&mut decl);\n\n decl.register()\n\n}\n\n\n", "file_path": "kapp_platforms/src/macos/application_mac.rs", "rank": 80, "score": 44809.55402650067 }, { "content": "fn application_delegate_declaration() -> *const objc::runtime::Class {\n\n let superclass = unsafe { &*NSResponderClass };\n\n let mut decl = ClassDecl::new(\"kappApplicationClass\", superclass).unwrap();\n\n super::events_mac::add_application_events_to_decl(&mut decl);\n\n decl.register()\n\n}\n\n\n", "file_path": "kapp_platforms/src/macos/application_mac.rs", "rank": 81, "score": 44809.55402650067 }, { "content": "fn get_backing_scale(window: *const Object) -> CGFloat {\n\n unsafe { msg(window, Sels::backingScaleFactor, ()) }\n\n}\n\n\n", "file_path": "kapp_platforms/src/macos/events_mac.rs", "rank": 82, "score": 44099.450423592694 }, { "content": "#[allow(non_upper_case_globals)]\n\n#[allow(non_snake_case)]\n\nfn wglGetSwapIntervalEXT() -> std::os::raw::c_int {\n\n unsafe {\n\n std::mem::transmute::<_, extern \"system\" fn() -> std::os::raw::c_int>(\n\n wglGetSwapIntervalEXT_ptr,\n\n )()\n\n }\n\n}\n", "file_path": "gl_context/src/windows/mod.rs", "rank": 83, "score": 44099.450423592694 }, { "content": "fn process_key_down(w_param: WPARAM, l_param: LPARAM) -> Event {\n\n let (_scancode, key, repeat) = process_key_event(w_param, l_param);\n\n\n\n if repeat {\n\n Event::KeyRepeat {\n\n key,\n\n timestamp: get_message_time(),\n\n }\n\n } else {\n\n Event::KeyDown {\n\n key,\n\n timestamp: get_message_time(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 84, "score": 43004.02320985573 }, { "content": "fn process_key_up(w_param: WPARAM, l_param: LPARAM) -> Event {\n\n let (_scancode, key, _repeat) = process_key_event(w_param, l_param);\n\n Event::KeyUp {\n\n key,\n\n timestamp: get_message_time(),\n\n }\n\n}\n\n\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 85, "score": 43004.02320985573 }, { "content": "#[allow(non_upper_case_globals)]\n\n#[allow(non_snake_case)]\n\nfn wglSwapIntervalEXT(i: std::os::raw::c_int) -> bool {\n\n unsafe {\n\n std::mem::transmute::<_, extern \"system\" fn(std::os::raw::c_int) -> bool>(\n\n wglSwapIntervalEXT_ptr,\n\n )(i)\n\n }\n\n}\n\n\n\n// This is a C extension function requested on load.\n\n#[allow(non_upper_case_globals)]\n\nstatic mut wglGetSwapIntervalEXT_ptr: *const std::ffi::c_void = std::ptr::null();\n", "file_path": "gl_context/src/windows/mod.rs", "rank": 86, "score": 42045.580829911945 }, { "content": "fn process_key_event(w_param: WPARAM, l_param: LPARAM) -> (UINT, Key, bool) {\n\n let scancode = ((l_param >> 16) & 16) as UINT; // bits 16-23 represent the scancode\n\n let _extended = (l_param >> 24) & 1 != 0; // bit 24 represents if its an extended key\n\n let repeat = (l_param >> 30) & 1 == 1;\n\n let key = virtual_keycode_to_key(w_param as _);\n\n (scancode, key, repeat)\n\n}\n\n\n", "file_path": "kapp_platforms/src/windows/event_loop_windows.rs", "rank": 87, "score": 39267.91969561408 }, { "content": "#[allow(non_snake_case, non_upper_case_globals)]\n\nfn wglCreateContextAttribsARB(hdc: HDC, hShareContext: HGLRC, attribList: *const c_int) -> HGLRC {\n\n unsafe {\n\n std::mem::transmute::<_, extern \"system\" fn(HDC, HGLRC, *const c_int) -> HGLRC>(\n\n wglCreateContextAttribsARB_ptr,\n\n )(hdc, hShareContext, attribList)\n\n }\n\n}\n\n\n\n// Once again these are all from here:\n\n// https://www.khronos.org/registry/OpenGL/api/GL/wglext.h\n\n// A few are commented out that may be useful later.\n\nconst WGL_DRAW_TO_WINDOW_ARB: c_int = 0x2001;\n\n// const WGL_DRAW_TO_BITMAP_ARB: c_int = 0x2002;\n\nconst WGL_ACCELERATION_ARB: c_int = 0x2003;\n\nconst WGL_SUPPORT_OPENGL_ARB: c_int = 0x2010;\n\nconst WGL_DOUBLE_BUFFER_ARB: c_int = 0x2011;\n\nconst WGL_PIXEL_TYPE_ARB: c_int = 0x2013;\n\nconst WGL_COLOR_BITS_ARB: c_int = 0x2014;\n\n// const WGL_RED_BITS_ARB: c_int = 0x2015;\n\n// const WGL_GREEN_BITS_ARB: c_int = 0x2017;\n", "file_path": "gl_context/src/windows/mod.rs", "rank": 88, "score": 36836.40067540854 }, { "content": "pub enum Cursor {\n\n Arrow,\n\n IBeam,\n\n PointingHand,\n\n ClosedHand,\n\n OpenHand,\n\n}\n", "file_path": "kapp_platform_common/src/cursors.rs", "rank": 89, "score": 36761.37207079931 }, { "content": "impl CGSize {\n\n pub fn new(width: CGFloat, height: CGFloat) -> Self {\n\n Self { width, height }\n\n }\n\n}\n\n\n\npub type NSSize = CGSize;\n\n\n\nimpl CGRect {\n\n pub fn new(origin: CGPoint, size: CGSize) -> Self {\n\n Self { origin, size }\n\n }\n\n}\n\n\n\nunsafe impl objc::Encode for CGRect {\n\n fn encode() -> objc::Encoding {\n\n let encoding = format!(\n\n \"{{CGRect={}{}}}\",\n\n NSPoint::encode().as_str(),\n\n NSSize::encode().as_str()\n", "file_path": "kapp_platforms/src/macos/apple.rs", "rank": 94, "score": 41.293186246138696 }, { "content": " fn set_window_position(&mut self, window_id: WindowId, x: u32, y: u32) {\n\n unsafe {\n\n SDL_SetWindowPosition(window_id.raw() as *mut SDL_Window, x as i32, y as i32);\n\n }\n\n }\n\n fn set_window_size(&mut self, window_id: WindowId, width: u32, height: u32) {\n\n unsafe {\n\n SDL_SetWindowPosition(\n\n window_id.raw() as *mut SDL_Window,\n\n width as i32,\n\n height as i32,\n\n );\n\n }\n\n }\n\n fn set_window_title(&mut self, window_id: WindowId, title: &str) {\n\n unsafe {\n\n let c_string = CString::new(title).unwrap();\n\n SDL_SetWindowTitle(window_id.raw() as *mut SDL_Window, c_string.as_ptr());\n\n }\n\n }\n", "file_path": "kapp_platforms/src/sdl/mod.rs", "rank": 98, "score": 36.0850505056603 }, { "content": " }\n\n\n\n fn set_window_position(&mut self, window_id: WindowId, x: u32, y: u32) {\n\n unsafe {\n\n let screen: *const Object = msg(window_id.raw() as *mut Object, Sels::screen, ());\n\n let screen_frame: CGRect = msg(screen, Sels::frame, ());\n\n\n\n let backing_scale = get_backing_scale(window_id);\n\n let () = msg(\n\n window_id.raw() as *mut Object,\n\n Sels::setFrameTopLeftPoint,\n\n (NSPoint::new(\n\n (x as f64) / backing_scale,\n\n screen_frame.size.height - (y as f64) / backing_scale,\n\n ),),\n\n );\n\n }\n\n }\n\n\n\n fn set_window_size(&mut self, window_id: WindowId, width: u32, height: u32) {\n", "file_path": "kapp_platforms/src/macos/application_mac.rs", "rank": 99, "score": 35.85826680734117 } ]
Rust
fix41/src/standard_message_header.rs
nappa85/serde_fix
1f11fc5484e6f7fd516c430a61241fb7070e7d4c
use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)] pub struct StandardMessageHeader<const T: char> { #[serde(rename = "8")] #[serde(default)] pub begin_string: fix_common::FixVersion<1>, #[serde(deserialize_with = "fix_common::workarounds::from_str")] #[serde(rename = "9")] pub body_length: u32, #[serde(rename = "35")] #[serde(default)] pub msg_type: MsgType<T>, #[serde(rename = "49")] pub sender_comp_id: String, #[serde(rename = "56")] pub target_comp_id: String, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "115")] pub on_behalf_of_comp_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "128")] pub deliver_to_comp_id: Option<String>, #[serde(rename = "90")] #[serde(skip_serializing_if = "Option::is_none")] #[serde(alias = "91")] pub secure_data: Option<fix_common::EncodedText<91>>, #[serde(deserialize_with = "fix_common::workarounds::from_str")] #[serde(rename = "34")] pub msg_seq_num: u32, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "50")] pub sender_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "142")] pub sender_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "57")] pub target_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "143")] pub target_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "116")] pub on_behalf_of_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "144")] pub on_behalf_of_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "129")] pub deliver_to_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "145")] pub deliver_to_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "43")] pub poss_dup_flag: Option<PossDupFlag>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "97")] pub poss_resend: Option<PossResend>, #[serde(rename = "52")] pub sending_time: fix_common::UTCTimeOnly, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "122")] pub orig_sending_time: Option<fix_common::UTCTimeOnly>, } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum MsgType<const T: char> { #[serde(rename = "0")] Heartbeat, #[serde(rename = "1")] TestRequest, #[serde(rename = "2")] ResendRequest, #[serde(rename = "3")] Reject, #[serde(rename = "4")] SequenceReset, #[serde(rename = "5")] Logout, #[serde(rename = "6")] IndicationOfInterest, #[serde(rename = "7")] Advertisement, #[serde(rename = "8")] ExecutionReport, #[serde(rename = "9")] OrderCancelReject, #[serde(rename = "A")] Logon, #[serde(rename = "B")] News, #[serde(rename = "C")] Email, #[serde(rename = "D")] NewOrderSingle, #[serde(rename = "E")] NewOrderList, #[serde(rename = "F")] OrderCancelRequest, #[serde(rename = "G")] OrderCancelReplaceRequest, #[serde(rename = "H")] OrderStatusRequest, #[serde(rename = "J")] Allocation, #[serde(rename = "K")] ListCancelRequest, #[serde(rename = "L")] ListExecute, #[serde(rename = "M")] ListStatusRequest, #[serde(rename = "N")] ListStatus, #[serde(rename = "P")] AllocationAck, #[serde(rename = "Q")] DonTKnowTrade, #[serde(rename = "R")] QuoteRequest, #[serde(rename = "S")] Quote, #[serde(rename = "T")] SettlementInstructions, } impl<const T: char> Default for MsgType<T> { fn default() -> Self { match T { '0' => MsgType::Heartbeat, '1' => MsgType::TestRequest, '2' => MsgType::ResendRequest, '3' => MsgType::Reject, '4' => MsgType::SequenceReset, '5' => MsgType::Logout, '6' => MsgType::IndicationOfInterest, '7' => MsgType::Advertisement, '8' => MsgType::ExecutionReport, '9' => MsgType::OrderCancelReject, 'A' => MsgType::Logon, 'B' => MsgType::News, 'C' => MsgType::Email, 'D' => MsgType::NewOrderSingle, 'E' => MsgType::NewOrderList, 'F' => MsgType::OrderCancelRequest, 'G' => MsgType::OrderCancelReplaceRequest, 'H' => MsgType::OrderStatusRequest, 'J' => MsgType::Allocation, 'K' => MsgType::ListCancelRequest, 'L' => MsgType::ListExecute, 'M' => MsgType::ListStatusRequest, 'N' => MsgType::ListStatus, 'P' => MsgType::AllocationAck, 'Q' => MsgType::DonTKnowTrade, 'R' => MsgType::QuoteRequest, 'S' => MsgType::Quote, 'T' => MsgType::SettlementInstructions, _ => unimplemented!(), } } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum PossDupFlag { #[serde(rename = "Y")] PossibleDuplicate, #[serde(rename = "N")] OriginalTransmission, } impl Default for PossDupFlag { fn default() -> Self { PossDupFlag::PossibleDuplicate } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum PossResend { #[serde(rename = "Y")] PossibleResend, #[serde(rename = "N")] OriginalTransmission, } impl Default for PossResend { fn default() -> Self { PossResend::PossibleResend } }
use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)] pub struct StandardMessageHeader<const T: char> { #[serde(rename = "8")] #[serde(default)] pub begin_string: fix_common::FixVersion<1>, #[serde(deserialize_with = "fix_common::workarounds::from_str")] #[serde(rename = "9")] pub body_length: u32, #[serde(rename = "35")] #[serde(default)] pub msg_type: MsgType<T>, #[serde(rename = "49")] pub sender_comp_id: String, #[serde(rename = "56")] pub target_comp_id: String, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "115")] pub on_behalf_of_comp_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "128")] pub deliver_to_comp_id: Option<String>, #[serde(rename = "90")] #[serde(skip_serializing_if = "Option::is_none")] #[serde(alias = "91")] pub secure_data: Option<fix_common::EncodedText<91>>, #[serde(deserialize_with = "fix_common::workarounds::from_str")] #[serde(rename = "34")] pub msg_seq_num: u32, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "50")] pub sender_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "142")] pub sender_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "57")] pub target_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "143")] pub target_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "116")] pub on_behalf_of_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "144")] pub on_behalf_of_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "129")] pub deliver_to_sub_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "145")] pub deliver_to_location_id: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "43")] pub poss_dup_flag: Option<PossDupFlag>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "97")] pub poss_resend: Option<PossResend>, #[serde(rename = "52")] pub sending_time: fix_common::UTCTimeOnly, #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "122")] pub orig_sending_time: Option<fix_common::UTCTimeOnly>, } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum MsgType<const T: char> { #[serde(rename = "0")] Heartbeat, #[serde(rename = "1")] TestRequest, #[serde(rename = "2")] ResendRequest, #[serde(rename = "3")] Reject, #[serde(rename = "4")] SequenceReset, #[serde(rename = "5")] Logout, #[serde(rename = "6")] IndicationOfInterest, #[serde(rename = "7")] Advertisement, #[serde(rename = "8")] ExecutionReport, #[serde(rename = "9")] OrderCancelReject, #[serde(rename = "A")] Logon, #[serde(rename = "B")] News, #[serde(rename = "C")] Email, #[serde(rename = "D")] NewOrderSingle, #[serde(rename = "E")] NewOrderList, #[serde(rename = "F")] OrderCancelRequest, #[serde(rename = "G")] OrderCancelReplaceRequest, #[serde(rename = "H")] OrderStatusRequest, #[serde(rename = "J")] Allocation, #[serde(rename = "K")] ListCancelRequest, #[serde(rename = "L")] ListExecute, #[serde(rename = "M")] ListStatusRequest, #[serde(rename = "N")] ListStatus, #[serde(rename = "P")] AllocationAck, #[serde(rename = "Q")] DonTKnowTrade, #[serde(rename = "R")] QuoteRequest, #[serde(rename = "S")] Quote, #[serde(rename = "T")] SettlementInstructions, } impl<const T: char> Default for MsgType<T> { fn default() -> Self { match T { '0' => MsgType::Heartbeat, '1' => MsgType::TestRequest, '2' => MsgType::ResendRequest, '3' => MsgType::Reject, '4' => MsgType::SequenceReset, '5' => MsgType::Logout, '6' => MsgType::IndicationOfInterest, '7' => MsgType::Advertisement, '
} #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum PossDupFlag { #[serde(rename = "Y")] PossibleDuplicate, #[serde(rename = "N")] OriginalTransmission, } impl Default for PossDupFlag { fn default() -> Self { PossDupFlag::PossibleDuplicate } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum PossResend { #[serde(rename = "Y")] PossibleResend, #[serde(rename = "N")] OriginalTransmission, } impl Default for PossResend { fn default() -> Self { PossResend::PossibleResend } }
8' => MsgType::ExecutionReport, '9' => MsgType::OrderCancelReject, 'A' => MsgType::Logon, 'B' => MsgType::News, 'C' => MsgType::Email, 'D' => MsgType::NewOrderSingle, 'E' => MsgType::NewOrderList, 'F' => MsgType::OrderCancelRequest, 'G' => MsgType::OrderCancelReplaceRequest, 'H' => MsgType::OrderStatusRequest, 'J' => MsgType::Allocation, 'K' => MsgType::ListCancelRequest, 'L' => MsgType::ListExecute, 'M' => MsgType::ListStatusRequest, 'N' => MsgType::ListStatus, 'P' => MsgType::AllocationAck, 'Q' => MsgType::DonTKnowTrade, 'R' => MsgType::QuoteRequest, 'S' => MsgType::Quote, 'T' => MsgType::SettlementInstructions, _ => unimplemented!(), } }
function_block-function_prefix_line
[ { "content": "/// Serializes a value into a FiX `String` buffer.\n\n///\n\n/// ```\n\n/// let meal = &[\n\n/// (\"bread\", \"baguette\"),\n\n/// (\"cheese\", \"comté\"),\n\n/// (\"meat\", \"ham\"),\n\n/// (\"fat\", \"butter\"),\n\n/// ];\n\n///\n\n/// assert_eq!(\n\n/// serde_fix::to_string(meal),\n\n/// Ok(\"bread=baguette\\u{1}cheese=comté\\u{1}meat=ham\\u{1}fat=butter\\u{1}\".to_owned()));\n\n/// ```\n\npub fn to_string<T: ser::Serialize>(input: T) -> Result<String, Error> {\n\n let mut encoder = encoder::Encoder::new();\n\n input.serialize(Serializer::new(&mut encoder))?;\n\n Ok(encoder.finish(false))\n\n}\n\n\n", "file_path": "serde_fix/src/ser/mod.rs", "rank": 0, "score": 257490.1911202769 }, { "content": "/// Serializes a value into a FiX `String` buffer, with calculated checksum\n\n///\n\n/// ```\n\n/// let meal = &[\n\n/// (\"bread\", \"baguette\"),\n\n/// (\"cheese\", \"comté\"),\n\n/// (\"meat\", \"ham\"),\n\n/// (\"fat\", \"butter\"),\n\n/// ];\n\n///\n\n/// assert_eq!(\n\n/// serde_fix::to_string_checked(meal),\n\n/// Ok(\"bread=baguette\\u{1}cheese=comté\\u{1}meat=ham\\u{1}fat=butter\\u{1}10=129\\u{1}\".to_owned()));\n\n/// ```\n\npub fn to_string_checked<T: ser::Serialize>(input: T) -> Result<String, Error> {\n\n let mut encoder = encoder::Encoder::new();\n\n input.serialize(Serializer::new(&mut encoder))?;\n\n Ok(encoder.finish(true))\n\n}\n\n\n\n/// A serializer for the FiX format.\n\n///\n\n/// * Supported top-level inputs are structs, maps and sequences of pairs,\n\n/// with or without a given length.\n\n///\n\n/// * Supported keys and values are integers, bytes (if convertible to strings),\n\n/// unit structs and unit variants.\n\n///\n\n/// * Newtype structs defer to their inner values.\n\npub struct Serializer<'output> {\n\n encoder: &'output mut encoder::Encoder,\n\n}\n\n\n\nimpl<'output> Serializer<'output> {\n", "file_path": "serde_fix/src/ser/mod.rs", "rank": 1, "score": 253487.8859794081 }, { "content": "#[test]\n\nfn serialize_unit_struct() {\n\n assert_eq!(serde_fix::to_string(Unit), Ok(\"\\u{1}\".to_owned()));\n\n}\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 2, "score": 224899.6148734053 }, { "content": "#[test]\n\nfn deserialize_unit_enum() {\n\n let result = vec![\n\n (\"one\".to_owned(), X::A),\n\n (\"two\".to_owned(), X::B),\n\n (\"three\".to_owned(), X::C),\n\n ];\n\n\n\n assert_eq!(\n\n serde_fix::from_str(\"one=A\\u{1}two=B\\u{1}three=C\\u{1}\"),\n\n Ok(result)\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 3, "score": 224899.414331822 }, { "content": "#[test]\n\nfn serialize_unit_enum() {\n\n let params = &[(\"one\", X::A), (\"two\", X::B), (\"three\", X::C)];\n\n assert_eq!(\n\n serde_fix::to_string(params),\n\n Ok(\"one=A\\u{1}two=B\\u{1}three=C\\u{1}\".to_owned())\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 4, "score": 224899.414331822 }, { "content": "#[test]\n\nfn serialize_option_map_string() {\n\n let params = &[\n\n (\"first\", Some(\"hello\")),\n\n (\"middle\", None),\n\n (\"last\", Some(\"world\")),\n\n ];\n\n\n\n assert_eq!(\n\n serde_fix::to_string(params),\n\n Ok(\"first=hello\\u{1}last=world\\u{1}\".to_owned())\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 5, "score": 221139.90213359345 }, { "content": "/// Convenience function that reads all bytes from `reader` and deserializes\n\n/// them with `from_bytes`.\n\npub fn from_reader<T, R>(mut reader: R) -> Result<T, Error>\n\nwhere\n\n T: de::DeserializeOwned,\n\n R: Read,\n\n{\n\n let mut buf = vec![];\n\n reader.read_to_end(&mut buf).map_err(|e| {\n\n de::Error::custom(format_args!(\"could not read input: {}\", e))\n\n })?;\n\n _from_bytes(&buf, false)\n\n}\n\n\n", "file_path": "serde_fix/src/de.rs", "rank": 6, "score": 202884.11804565397 }, { "content": "// why isn't it async?\n\n// because I haven't found a simple async flat_map equivalent\n\nfn dir_walk<P: AsRef<Path>>(dir: P) -> Vec<PathBuf> {\n\n read_dir(dir).unwrap()\n\n .flat_map(|rde| {\n\n let p = rde.unwrap().path();\n\n if p.is_dir() {\n\n DirOrFile::Dir(dir_walk(p).into_iter())\n\n }\n\n else {\n\n DirOrFile::File(Some(p))\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "enum_checker/src/main.rs", "rank": 7, "score": 202395.49171757215 }, { "content": "#[derive(Deserialize, Debug, PartialEq)]\n\nstruct NewType<T>(T);\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 8, "score": 200941.4795846826 }, { "content": "#[derive(Serialize)]\n\nstruct NewType<T>(T);\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 9, "score": 200933.51911174707 }, { "content": "/// Convenience function that reads all bytes from `reader` and deserializes\n\n/// them with `from_bytes`, with checksum\n\npub fn from_reader_checked<T, R>(mut reader: R) -> Result<T, Error>\n\nwhere\n\n T: de::DeserializeOwned,\n\n R: Read,\n\n{\n\n let mut buf = vec![];\n\n reader.read_to_end(&mut buf).map_err(|e| {\n\n de::Error::custom(format_args!(\"could not read input: {}\", e))\n\n })?;\n\n _from_bytes(&buf, true)\n\n}\n\n\n\n/// A deserializer for the FiX format.\n\n///\n\n/// * Supported top-level outputs are structs, maps and sequences of pairs,\n\n/// with or without a given length.\n\n///\n\n/// * Main `deserialize` methods defers to `deserialize_map`.\n\n///\n\n/// * Everything else but `deserialize_seq` and `deserialize_seq_fixed_size`\n", "file_path": "serde_fix/src/de.rs", "rank": 10, "score": 199823.42431864675 }, { "content": "fn clean_enum_name(name: &str) -> String {\n\n static CLEANUP: Lazy<Regex> = Lazy::new(|| Regex::new(r#\"\\([^\\)]+\\)\"#).unwrap());\n\n CLEANUP.replace_all(name, \"\").as_ref().replace(|c: char| !c.is_ascii_alphanumeric(), \" \").to_case(Case::Pascal)\n\n}\n\n\n", "file_path": "code_generator/src/main.rs", "rank": 11, "score": 199774.30568084575 }, { "content": "/// due to https://github.com/serde-rs/serde/issues/1183\n\npub fn from_str<'de, D, S>(deserializer: D) -> Result<S, D::Error>\n\n where D: Deserializer<'de>,\n\n S: FromStr,\n\n S::Err: Display,\n\n{\n\n let s = <Cow<'_, str> as Deserialize>::deserialize(deserializer)?;\n\n S::from_str(&s).map_err(|_| serde::de::Error::custom(format!(\"could not parse {}\", type_name::<S>())))\n\n}\n\n\n", "file_path": "fix_common/src/workarounds.rs", "rank": 12, "score": 179164.1547001325 }, { "content": "/// due to https://github.com/serde-rs/serde/issues/1183\n\npub fn from_opt_str<'de, D, S>(deserializer: D) -> Result<Option<S>, D::Error>\n\n where D: Deserializer<'de>,\n\n S: FromStr,\n\n S::Err: Display,\n\n{\n\n Ok(match <Option<Cow<'_, str>> as Deserialize>::deserialize(deserializer)? {\n\n Some(s) => Some(S::from_str(&s).map_err(|e| serde::de::Error::custom(format!(\"could not parse option {}: {}\", type_name::<S>(), e)))?),\n\n None => None,\n\n })\n\n}\n", "file_path": "fix_common/src/workarounds.rs", "rank": 13, "score": 172861.36114963674 }, { "content": "struct EnumEntry {\n\n file: PathBuf,\n\n name: String,\n\n elements: Vec<String>,\n\n}\n\n\n\nasync fn read_file(path: PathBuf) -> Result<Vec<EnumEntry>, ()> {\n\n let mut file = File::open(&path).await.map_err(|e| error!(\"Error opening file {}: {}\", path.display(), e))?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents).await.map_err(|e| error!(\"Error opening file {}: {}\", path.display(), e))?;\n\n\n\n let mut entries = Vec::new();\n\n for capture in ENUM_FINDER.captures_iter(&contents) {\n\n let elements: Vec<String> = ENUM_ANALIZER.captures_iter(&capture[\"content\"]).map(|c| c.name(\"rename\").map(|m| m.as_str()).unwrap_or(&c[\"symbol\"]).to_owned()).collect();\n\n if !elements.is_empty() {\n\n entries.push(EnumEntry {\n\n file: path.clone(),\n\n name: (&capture[\"name\"]).to_owned(),\n\n elements,\n\n });\n", "file_path": "enum_checker/src/main.rs", "rank": 14, "score": 162600.51391968623 }, { "content": "#[test]\n\nfn deserialize_reader() {\n\n let result = vec![(\"first\".to_owned(), 23), (\"last\".to_owned(), 42)];\n\n\n\n assert_eq!(\n\n serde_fix::from_reader(\"first=23\\u{1}last=42\\u{1}\".as_bytes()),\n\n Ok(result)\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 15, "score": 160004.93860345462 }, { "content": "#[test]\n\nfn deserialize_str() {\n\n let result = vec![(\"first\".to_owned(), 23), (\"last\".to_owned(), 42)];\n\n\n\n assert_eq!(serde_fix::from_str(\"first=23\\u{1}last=42\\u{1}\"), Ok(result));\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 16, "score": 160004.93860345462 }, { "content": "#[test]\n\nfn deserialize_option() {\n\n let result = vec![\n\n (\"first\".to_owned(), Some(23)),\n\n (\"last\".to_owned(), Some(42)),\n\n ];\n\n assert_eq!(serde_fix::from_str(\"first=23\\u{1}last=42\\u{1}\"), Ok(result));\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 17, "score": 160004.93860345462 }, { "content": "#[test]\n\nfn deserialize_bytes() {\n\n let result = vec![(\"first\".to_owned(), 23), (\"last\".to_owned(), 42)];\n\n\n\n assert_eq!(\n\n serde_fix::from_bytes(\"first=23\\u{1}last=42\\u{1}\".as_bytes()),\n\n Ok(result)\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 18, "score": 160004.93860345462 }, { "content": "#[test]\n\nfn deserialize_unit() {\n\n assert_eq!(serde_fix::from_str(\"\"), Ok(()));\n\n assert_eq!(serde_fix::from_str(\"\\u{1}\"), Ok(()));\n\n assert_eq!(serde_fix::from_str(\"\\u{1}\\u{1}\"), Ok(()));\n\n assert!(serde_fix::from_str::<()>(\"first=23\\u{1}\").is_err());\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 19, "score": 160004.93860345462 }, { "content": "#[test]\n\nfn serialize_unit_type() {\n\n assert_eq!(serde_fix::to_string(()), Ok(\"\\u{1}\".to_owned()));\n\n}\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 20, "score": 157515.39239695555 }, { "content": "#[test]\n\nfn deserialize_unit_type() {\n\n assert_eq!(serde_fix::from_str(\"\"), Ok(()));\n\n}\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 21, "score": 157515.39239695555 }, { "content": "#[test]\n\nfn deserialize_borrowed_str() {\n\n let result = vec![(\"first\", 23), (\"last\", 42)];\n\n\n\n assert_eq!(serde_fix::from_str(\"first=23\\u{1}last=42\\u{1}\"), Ok(result));\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 22, "score": 157515.39239695555 }, { "content": "#[test]\n\nfn serialize_newtype_i32() {\n\n let params = &[(\"field\", Some(NewType(11)))];\n\n assert_eq!(\n\n serde_fix::to_string(params),\n\n Ok(\"field=11\\u{1}\".to_owned())\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 23, "score": 157515.39239695555 }, { "content": "#[test]\n\nfn deserialize_newtype_i32() {\n\n let result = vec![(\"field\".to_owned(), NewType(11))];\n\n\n\n assert_eq!(serde_fix::from_str(\"field=11\\u{1}\"), Ok(result));\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 24, "score": 157515.39239695555 }, { "content": "#[test]\n\nfn serialize_map_bool() {\n\n let params = &[(\"one\", true), (\"two\", false)];\n\n\n\n assert_eq!(\n\n serde_fix::to_string(params),\n\n Ok(\"one=true\\u{1}two=false\\u{1}\".to_owned())\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 25, "score": 157515.39239695555 }, { "content": "#[test]\n\nfn serialize_option_map_bool() {\n\n let params = &[(\"one\", Some(true)), (\"two\", Some(false))];\n\n\n\n assert_eq!(\n\n serde_fix::to_string(params),\n\n Ok(\"one=true\\u{1}two=false\\u{1}\".to_owned())\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 26, "score": 155109.68894610557 }, { "content": "#[test]\n\nfn serialize_option_map_int() {\n\n let params = &[(\"first\", Some(23)), (\"middle\", None), (\"last\", Some(42))];\n\n\n\n assert_eq!(\n\n serde_fix::to_string(params),\n\n Ok(\"first=23\\u{1}last=42\\u{1}\".to_owned())\n\n );\n\n}\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 27, "score": 155109.68894610557 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"enum_checker\", about = \"Enum duplicate finder\")]\n\nstruct Opt {\n\n dir: PathBuf,\n\n}\n\n\n", "file_path": "enum_checker/src/main.rs", "rank": 28, "score": 150146.94056248968 }, { "content": "#[derive(Deserialize, Debug, PartialEq, Eq)]\n\nenum X {\n\n A,\n\n B,\n\n C,\n\n}\n\n\n", "file_path": "serde_fix/tests/test_deserialize.rs", "rank": 29, "score": 147070.26779852415 }, { "content": "#[derive(Serialize)]\n\nenum X {\n\n A,\n\n B,\n\n C,\n\n}\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 30, "score": 147062.40747223722 }, { "content": "#[derive(Serialize)]\n\nstruct Unit;\n\n\n", "file_path": "serde_fix/tests/test_serialize.rs", "rank": 31, "score": 147058.4029547879 }, { "content": "\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]\n\npub struct QuoteRequestReject {\n\n\t/// MsgType = AG\n\n\t#[serde(flatten)]\n\n\tpub standard_message_header: super::super::standard_message_header::StandardMessageHeader<'A', 'G'>,\n\n\t/// QuoteReqID\n\n\t#[serde(rename = \"131\")]\n\n\tpub quote_req_id: String,\n\n\t/// For tradeable quote model - used to indicate to which <a href=\"message_RFQ_Request_AH.html\" target=\"main\">RFQ Request&nbsp;(AH)</a> this <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> is in response.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"644\")]\n\n\tpub rfq_req_id: Option<String>,\n\n\t/// Reason Quote was rejected\n\n\t#[serde(rename = \"658\")]\n\n\tpub quote_request_reject_reason: QuoteRequestRejectReason,\n\n\t/// Number of related symbols (instruments) in Request\n\n\t#[serde(rename = \"146\")]\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 32, "score": 140112.01386854766 }, { "content": "\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]\n\npub struct QuoteRequestReject {\n\n\t/// MsgType = AG\n\n\t#[serde(flatten)]\n\n\tpub standard_message_header: super::super::standard_message_header::StandardMessageHeader<'A', 'G'>,\n\n\t/// QuoteReqID\n\n\t#[serde(rename = \"131\")]\n\n\tpub quote_req_id: String,\n\n\t/// For tradeable quote model - used to indicate to which <a href=\"message_RFQ_Request_AH.html\" target=\"main\">RFQ Request&nbsp;(AH)</a> this <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> is in response.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"644\")]\n\n\tpub rfq_req_id: Option<String>,\n\n\t/// Reason Quote was rejected\n\n\t#[serde(rename = \"658\")]\n\n\tpub quote_request_reject_reason: QuoteRequestRejectReason,\n\n\t/// Number of related symbols (instruments) in Request\n\n\t#[serde(rename = \"146\")]\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 33, "score": 140112.01386854766 }, { "content": "\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]\n\npub struct QuoteRequestReject {\n\n\t/// MsgType = AG\n\n\t#[serde(flatten)]\n\n\tpub standard_message_header: super::super::standard_message_header::StandardMessageHeader<'A', 'G'>,\n\n\t/// QuoteReqID\n\n\t#[serde(rename = \"131\")]\n\n\tpub quote_req_id: String,\n\n\t/// For tradeable quote model - used to indicate to which <a href=\"message_RFQ_Request_AH.html\" target=\"main\">RFQ Request&nbsp;(AH)</a> this <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> is in response.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"644\")]\n\n\tpub rfq_req_id: Option<String>,\n\n\t/// Reason <a href=\"message_Quote_S.html\" target=\"main\">Quote&nbsp;(S)</a> was rejected\n\n\t#[serde(rename = \"658\")]\n\n\tpub quote_request_reject_reason: QuoteRequestRejectReason,\n\n\t/// Used to indicate whether a private negotiation is requested or if the response should be public. Only relevant in markets\n\n\t/// supporting both Private and Public quotes.\n", "file_path": "fix50sp2/src/messages/quote_request_reject.rs", "rank": 34, "score": 140110.6236280652 }, { "content": "\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]\n\npub struct QuoteRequestReject {\n\n\t/// MsgType = AG\n\n\t#[serde(flatten)]\n\n\tpub standard_message_header: super::super::standard_message_header::StandardMessageHeader<'A', 'G'>,\n\n\t/// QuoteReqID\n\n\t#[serde(rename = \"131\")]\n\n\tpub quote_req_id: String,\n\n\t/// For tradeable quote model - used to indicate to which <a href=\"message_RFQ_Request_AH.html\" target=\"main\">RFQ Request&nbsp;(AH)</a> this <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> is in response.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"644\")]\n\n\tpub rfq_req_id: Option<String>,\n\n\t/// Reason <a href=\"message_Quote_S.html\" target=\"main\">Quote&nbsp;(S)</a> was rejected\n\n\t#[serde(rename = \"658\")]\n\n\tpub quote_request_reject_reason: QuoteRequestRejectReason,\n\n\t/// Used to indicate whether a private negotiation is requested or if the response should be public. Only relevant in markets\n\n\t/// supporting both Private and Public quotes.\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 35, "score": 140110.6236280652 }, { "content": "\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]\n\npub struct QuoteRequestReject {\n\n\t/// MsgType = AG\n\n\t#[serde(flatten)]\n\n\tpub standard_message_header: super::super::standard_message_header::StandardMessageHeader<'A', 'G'>,\n\n\t/// QuoteReqID\n\n\t#[serde(rename = \"131\")]\n\n\tpub quote_req_id: String,\n\n\t/// For tradeable quote model - used to indicate to which <a href=\"message_RFQ_Request_AH.html\" target=\"main\">RFQ Request&nbsp;(AH)</a> this <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> is in response.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"644\")]\n\n\tpub rfq_req_id: Option<String>,\n\n\t/// Reason <a href=\"message_Quote_S.html\" target=\"main\">Quote&nbsp;(S)</a> was rejected\n\n\t#[serde(rename = \"658\")]\n\n\tpub quote_request_reject_reason: QuoteRequestRejectReason,\n\n\t/// Number of related symbols (instruments) in Request\n\n\t#[serde(rename = \"146\")]\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 36, "score": 140110.42377322563 }, { "content": "\t/// Exceeded CS01 limit\n\n\t#[serde(rename = \"15\")]\n\n\tExceededCs01Limit,\n\n}\n\n\n\nimpl Default for QuoteRequestRejectReason {\n\n\tfn default() -> Self {\n\n\t\tQuoteRequestRejectReason::UnknownSymbol\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum PrivateQuote {\n\n\t/// Private Quote\n\n\t#[serde(rename = \"Y\")]\n\n\tPrivateQuote,\n\n\t/// Public Quote\n\n\t#[serde(rename = \"N\")]\n\n\tPublicQuote,\n\n}\n", "file_path": "fix50sp2/src/messages/quote_request_reject.rs", "rank": 37, "score": 140106.31268447955 }, { "content": "}\n\n\n\nimpl Default for QuoteRequestRejectReason {\n\n\tfn default() -> Self {\n\n\t\tQuoteRequestRejectReason::UnknownSymbol\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestType {\n\n\t/// Manual\n\n\t#[serde(rename = \"1\")]\n\n\tManual,\n\n\t/// Automatic\n\n\t#[serde(rename = \"2\")]\n\n\tAutomatic,\n\n}\n\n\n\nimpl Default for QuoteRequestType {\n\n\tfn default() -> Self {\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 38, "score": 140104.22406370757 }, { "content": "\t#[serde(rename = \"99\")]\n\n\tOther,\n\n}\n\n\n\nimpl Default for QuoteRequestRejectReason {\n\n\tfn default() -> Self {\n\n\t\tQuoteRequestRejectReason::UnknownSymbol\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestType {\n\n\t/// Manual\n\n\t#[serde(rename = \"1\")]\n\n\tManual,\n\n\t/// Automatic\n\n\t#[serde(rename = \"2\")]\n\n\tAutomatic,\n\n}\n\n\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 39, "score": 140102.2059088929 }, { "content": "\t/// Insufficient credit\n\n\t#[serde(rename = \"11\")]\n\n\tInsufficientCredit,\n\n\t/// Other\n\n\t#[serde(rename = \"99\")]\n\n\tOther,\n\n}\n\n\n\nimpl Default for QuoteRequestRejectReason {\n\n\tfn default() -> Self {\n\n\t\tQuoteRequestRejectReason::UnknownSymbol\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum PrivateQuote {\n\n\t/// Private Quote\n\n\t#[serde(rename = \"Y\")]\n\n\tPrivateQuote,\n\n\t/// Public Quote\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 40, "score": 140101.15808060791 }, { "content": "\t#[serde(rename = \"C\")]\n\n\tFxSpotNextSettlement,\n\n}\n\n\n\nimpl Default for LegSettlType {\n\n\tfn default() -> Self {\n\n\t\tLegSettlType::RegularFxSpotSettlement\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteQualifier {\n\n\t/// All or none\n\n\t#[serde(rename = \"A\")]\n\n\tAllOrNone,\n\n\t/// Market On Close (MOC) (held to close)\n\n\t#[serde(rename = \"B\")]\n\n\tMarketOnClose,\n\n\t/// At the close (around/not held to close)\n\n\t#[serde(rename = \"C\")]\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 41, "score": 140100.79601262324 }, { "content": "\t\tQuoteRequestRejectReason::UnknownSymbol\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestType {\n\n\t/// Manual\n\n\t#[serde(rename = \"1\")]\n\n\tManual,\n\n\t/// Automatic\n\n\t#[serde(rename = \"2\")]\n\n\tAutomatic,\n\n}\n\n\n\nimpl Default for QuoteRequestType {\n\n\tfn default() -> Self {\n\n\t\tQuoteRequestType::Manual\n\n\t}\n\n}\n\n\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 42, "score": 140100.68116391008 }, { "content": "\tFxSpotNextSettlement,\n\n}\n\n\n\nimpl Default for LegSettlType {\n\n\tfn default() -> Self {\n\n\t\tLegSettlType::RegularFxSpotSettlement\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteQualifier {\n\n\t/// All or none\n\n\t#[serde(rename = \"A\")]\n\n\tAllOrNone,\n\n\t/// Market On Close (MOC) (held to close)\n\n\t#[serde(rename = \"B\")]\n\n\tMarketOnClose,\n\n\t/// At the close (around/not held to close)\n\n\t#[serde(rename = \"C\")]\n\n\tAtTheClose,\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 43, "score": 140099.75120685645 }, { "content": "\n\nimpl Default for LegSettlType {\n\n\tfn default() -> Self {\n\n\t\tLegSettlType::Regular\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteQualifier {\n\n\t/// All or none\n\n\t#[serde(rename = \"A\")]\n\n\tAllOrNone,\n\n\t/// Market On Close (MOC) (held to close)\n\n\t#[serde(rename = \"B\")]\n\n\tMarketOnClose,\n\n\t/// At the close (around/not held to close)\n\n\t#[serde(rename = \"C\")]\n\n\tAtTheClose,\n\n\t/// VWAP (Volume Weighted Avg Price)\n\n\t#[serde(rename = \"D\")]\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 44, "score": 140099.4168471795 }, { "content": "\t#[serde(rename = \"N\")]\n\n\tPublicQuote,\n\n}\n\n\n\nimpl Default for PrivateQuote {\n\n\tfn default() -> Self {\n\n\t\tPrivateQuote::PrivateQuote\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum RespondentType {\n\n\t/// All market participants\n\n\t#[serde(rename = \"1\")]\n\n\tAllMarketParticipants,\n\n\t/// Specified market participants\n\n\t#[serde(rename = \"2\")]\n\n\tSpecifiedMarketParticipants,\n\n\t/// All Market Makers\n\n\t#[serde(rename = \"3\")]\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 45, "score": 140098.42852222448 }, { "content": "#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QtyType {\n\n\t/// Units (shares, par, currency)\n\n\t#[serde(rename = \"0\")]\n\n\tUnits,\n\n\t/// Contracts (if used - should specify <a href=\"tag_231_ContractMultiplier.html\" target=\"bottom\">ContractMultiplier&nbsp;(231)</a> )\n\n\t#[serde(rename = \"1\")]\n\n\tContractsA,\n\n}\n\n\n\nimpl Default for QtyType {\n\n\tfn default() -> Self {\n\n\t\tQtyType::Units\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum SettlType {\n\n\t/// Regular\n\n\t#[serde(rename = \"0\")]\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 46, "score": 140095.61193937683 }, { "content": "\tNextFundValuationPoint,\n\n\t/// Pegged\n\n\t#[serde(rename = \"P\")]\n\n\tPegged,\n\n\t/// Counter-order selection\n\n\t#[serde(rename = \"Q\")]\n\n\tCounterOrderSelection,\n\n}\n\n\n\nimpl Default for OrdType {\n\n\tfn default() -> Self {\n\n\t\tOrdType::Market\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum PriceType {\n\n\t/// Percentage (e.g. percent of par) (often called \"dollar price\" for fixed income)\n\n\t#[serde(rename = \"1\")]\n\n\tPercentage,\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 47, "score": 140093.71014300105 }, { "content": "\t\tQuoteRequestType::Manual\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteType {\n\n\t/// Indicative\n\n\t#[serde(rename = \"0\")]\n\n\tIndicative,\n\n\t/// Tradeable\n\n\t#[serde(rename = \"1\")]\n\n\tTradeable,\n\n\t/// Restricted Tradeable\n\n\t#[serde(rename = \"2\")]\n\n\tRestrictedTradeable,\n\n}\n\n\n\nimpl Default for QuoteType {\n\n\tfn default() -> Self {\n\n\t\tQuoteType::Indicative\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 48, "score": 140093.55114216762 }, { "content": "\t/// Yield\n\n\t#[serde(rename = \"9\")]\n\n\tN9,\n\n\t/// Yield\n\n\t#[serde(rename = \"10\")]\n\n\tN10,\n\n}\n\n\n\nimpl Default for QuotePriceType {\n\n\tfn default() -> Self {\n\n\t\tQuotePriceType::N1\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum OrdType {\n\n\t/// Market\n\n\t#[serde(rename = \"1\")]\n\n\tMarket,\n\n\t/// Limit\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 49, "score": 140093.35286678319 }, { "content": "\t/// Yield\n\n\t#[serde(rename = \"10\")]\n\n\tN10,\n\n}\n\n\n\nimpl Default for QuotePriceType {\n\n\tfn default() -> Self {\n\n\t\tQuotePriceType::N1\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum OrdType {\n\n\t/// Market\n\n\t#[serde(rename = \"1\")]\n\n\tMarket,\n\n\t/// Limit\n\n\t#[serde(rename = \"2\")]\n\n\tLimit,\n\n\t/// Stop\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 50, "score": 140093.35286678319 }, { "content": "}\n\n\n\nimpl Default for QuoteType {\n\n\tfn default() -> Self {\n\n\t\tQuoteType::Indicative\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum Side {\n\n\t/// Buy\n\n\t#[serde(rename = \"1\")]\n\n\tBuy,\n\n\t/// Sell\n\n\t#[serde(rename = \"2\")]\n\n\tSell,\n\n\t/// Buy minus\n\n\t#[serde(rename = \"3\")]\n\n\tBuyMinus,\n\n\t/// Sell plus\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 51, "score": 140093.35286678319 }, { "content": "}\n\n\n\nimpl Default for QuoteRequestType {\n\n\tfn default() -> Self {\n\n\t\tQuoteRequestType::Manual\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteType {\n\n\t/// Indicative\n\n\t#[serde(rename = \"0\")]\n\n\tIndicative,\n\n\t/// Tradeable\n\n\t#[serde(rename = \"1\")]\n\n\tTradeable,\n\n\t/// Restricted Tradeable\n\n\t#[serde(rename = \"2\")]\n\n\tRestrictedTradeable,\n\n\t/// Counter (tradeable)\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 52, "score": 140093.14431778505 }, { "content": "\t#[serde(rename = \"M\")]\n\n\tNextFundValuationPoint,\n\n\t/// Pegged\n\n\t#[serde(rename = \"P\")]\n\n\tPegged,\n\n\t/// Counter-order selection\n\n\t#[serde(rename = \"Q\")]\n\n\tCounterOrderSelection,\n\n}\n\n\n\nimpl Default for OrdType {\n\n\tfn default() -> Self {\n\n\t\tOrdType::Market\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum PriceType {\n\n\t/// Percentage (e.g. percent of par) (often called \"dollar price\" for fixed income)\n\n\t#[serde(rename = \"1\")]\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 53, "score": 140093.09978140343 }, { "content": "\t#[serde(rename = \"9\")]\n\n\tN9,\n\n\t/// Yield\n\n\t#[serde(rename = \"10\")]\n\n\tN10,\n\n}\n\n\n\nimpl Default for QuotePriceType {\n\n\tfn default() -> Self {\n\n\t\tQuotePriceType::N1\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum OrdType {\n\n\t/// Market\n\n\t#[serde(rename = \"1\")]\n\n\tMarket,\n\n\t/// Limit\n\n\t#[serde(rename = \"2\")]\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 54, "score": 140093.04104816233 }, { "content": "#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteType {\n\n\t/// Indicative\n\n\t#[serde(rename = \"0\")]\n\n\tIndicative,\n\n\t/// Tradeable\n\n\t#[serde(rename = \"1\")]\n\n\tTradeable,\n\n\t/// Restricted Tradeable\n\n\t#[serde(rename = \"2\")]\n\n\tRestrictedTradeable,\n\n\t/// Counter (tradable)\n\n\t#[serde(rename = \"3\")]\n\n\tCounter,\n\n}\n\n\n\nimpl Default for QuoteType {\n\n\tfn default() -> Self {\n\n\t\tQuoteType::Indicative\n\n\t}\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 55, "score": 140092.84750090743 }, { "content": "\t#[serde(rename = \"3\")]\n\n\tCounter,\n\n}\n\n\n\nimpl Default for QuoteType {\n\n\tfn default() -> Self {\n\n\t\tQuoteType::Indicative\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum TradingSessionID {\n\n\t/// Day\n\n\t#[serde(rename = \"1\")]\n\n\tDay,\n\n\t/// HalfDay\n\n\t#[serde(rename = \"2\")]\n\n\tHalfDay,\n\n\t/// Morning\n\n\t#[serde(rename = \"3\")]\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 56, "score": 140092.73236806566 }, { "content": "\t/// Can be used with <a href=\"tag_40_OrdType.html\" target=\"bottom\">OrdType&nbsp;(40)</a> = \"Forex - Swap\" to specify the Quoted or target price for the future portion of a F/X swap.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(deserialize_with = \"fix_common::workarounds::from_opt_str\")]// https://github.com/serde-rs/serde/issues/1183\n\n\t#[serde(default)]\n\n\t#[serde(rename = \"640\")]\n\n\tpub price_2: Option<f64>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestRejectReason {\n\n\t/// Unknown symbol (Security)\n\n\t#[serde(rename = \"1\")]\n\n\tUnknownSymbol,\n\n\t/// Exchange(Security) closed\n\n\t#[serde(rename = \"2\")]\n\n\tExchangeClosed,\n\n\t/// Quote request exceeds limit\n\n\t#[serde(rename = \"3\")]\n\n\tQuoteRequestExceedsLimit,\n\n\t/// Too late to enter\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 57, "score": 140092.67275478784 }, { "content": "impl Default for QuoteRequestType {\n\n\tfn default() -> Self {\n\n\t\tQuoteRequestType::Manual\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteType {\n\n\t/// Indicative\n\n\t#[serde(rename = \"0\")]\n\n\tIndicative,\n\n\t/// Tradeable\n\n\t#[serde(rename = \"1\")]\n\n\tTradeable,\n\n\t/// Restricted Tradeable\n\n\t#[serde(rename = \"2\")]\n\n\tRestrictedTradeable,\n\n\t/// Counter (tradeable)\n\n\t#[serde(rename = \"3\")]\n\n\tCounter,\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 58, "score": 140092.26259785309 }, { "content": "\t/// At the Midpoint\n\n\t#[serde(rename = \"Y\")]\n\n\tAtTheMidpoint,\n\n\t/// Pre-open\n\n\t#[serde(rename = \"Z\")]\n\n\tPreOpen,\n\n}\n\n\n\nimpl Default for QuoteQualifier {\n\n\tfn default() -> Self {\n\n\t\tQuoteQualifier::AllOrNone\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuotePriceType {\n\n\t/// Percentage (e.g. percent of par)\n\n\t#[serde(rename = \"1\")]\n\n\tN1,\n\n\t/// Per unit (i.e. cents per contract)\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 59, "score": 140091.40642399705 }, { "content": "\t#[serde(rename = \"Y\")]\n\n\tAtTheMidpoint,\n\n\t/// Pre-open\n\n\t#[serde(rename = \"Z\")]\n\n\tPreOpen,\n\n}\n\n\n\nimpl Default for QuoteQualifier {\n\n\tfn default() -> Self {\n\n\t\tQuoteQualifier::AllOrNone\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuotePriceType {\n\n\t/// Percentage (e.g. percent of par)\n\n\t#[serde(rename = \"1\")]\n\n\tN1,\n\n\t/// Per unit (i.e. cents per contract)\n\n\t#[serde(rename = \"2\")]\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 60, "score": 140091.12651460926 }, { "content": "\n\nimpl Default for PrivateQuote {\n\n\tfn default() -> Self {\n\n\t\tPrivateQuote::PrivateQuote\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum RespondentType {\n\n\t/// All market participants\n\n\t#[serde(rename = \"1\")]\n\n\tAllMarketParticipants,\n\n\t/// Specified market participants\n\n\t#[serde(rename = \"2\")]\n\n\tSpecifiedMarketParticipants,\n\n\t/// All Market Makers\n\n\t#[serde(rename = \"3\")]\n\n\tAllMarketMakers,\n\n\t/// Primary Market Maker(s)\n\n\t#[serde(rename = \"4\")]\n\n\tPrimaryMarketMaker,\n\n}\n\n\n\nimpl Default for RespondentType {\n\n\tfn default() -> Self {\n\n\t\tRespondentType::AllMarketParticipants\n\n\t}\n\n}\n", "file_path": "fix50sp2/src/messages/quote_request_reject.rs", "rank": 61, "score": 140090.91416097007 }, { "content": "\t/// Pre-open\n\n\t#[serde(rename = \"Z\")]\n\n\tPreOpen,\n\n}\n\n\n\nimpl Default for QuoteQualifier {\n\n\tfn default() -> Self {\n\n\t\tQuoteQualifier::AllOrNone\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuotePriceType {\n\n\t/// Percentage (e.g. percent of par)\n\n\t#[serde(rename = \"1\")]\n\n\tN1,\n\n\t/// Per unit (i.e. cents per contract)\n\n\t#[serde(rename = \"2\")]\n\n\tN2,\n\n\t/// Fixed Amount (absolute value)\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 62, "score": 140090.84927927237 }, { "content": "\tAllMarketMakers,\n\n\t/// Primary Market Maker(s)\n\n\t#[serde(rename = \"4\")]\n\n\tPrimaryMarketMaker,\n\n}\n\n\n\nimpl Default for RespondentType {\n\n\tfn default() -> Self {\n\n\t\tRespondentType::AllMarketParticipants\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestType {\n\n\t/// Manual\n\n\t#[serde(rename = \"1\")]\n\n\tManual,\n\n\t/// Automatic\n\n\t#[serde(rename = \"2\")]\n\n\tAutomatic,\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 63, "score": 140090.20433748272 }, { "content": "\tpub related_sym: fix_common::RepeatingValues<RelatedSy>,\n\n\t/// Standard Message Trailer\n\n\t#[serde(flatten)]\n\n\tpub standard_message_trailer: super::super::standard_message_trailer::StandardMessageTrailer,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq)]\n\npub struct RelatedSy {\n\n #[serde(flatten)]\n\n pub instruments: super::super::instrument::Instrument,\n\n\t/// Useful for verifying security identification\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(deserialize_with = \"fix_common::workarounds::from_opt_str\")]// https://github.com/serde-rs/serde/issues/1183\n\n\t#[serde(default)]\n\n\t#[serde(rename = \"140\")]\n\n\tpub prev_close_px: Option<f64>,\n\n\t/// Indicates the type of <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> (e.g. Manual vs. Automatic) being generated.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"303\")]\n\n\tpub quote_request_type: Option<QuoteRequestType>,\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 64, "score": 140088.91397681533 }, { "content": "\tNextFundValuationPoint,\n\n\t/// Pegged\n\n\t#[serde(rename = \"P\")]\n\n\tPegged,\n\n}\n\n\n\nimpl Default for OrdType {\n\n\tfn default() -> Self {\n\n\t\tOrdType::Market\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum PriceType {\n\n\t/// Percentage (e.g. percent of par) (often called \"dollar price\" for fixed income)\n\n\t#[serde(rename = \"1\")]\n\n\tPercentage,\n\n\t/// Per unit (i.e. per share or contract)\n\n\t#[serde(rename = \"2\")]\n\n\tPerUnit,\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 65, "score": 140087.30486491285 }, { "content": "\t/// FX Spot Next settlement (Spot+1, aka next day)\n\n\t#[serde(rename = \"C\")]\n\n\tFxSpotNextSettlement,\n\n}\n\n\n\nimpl Default for SettlType {\n\n\tfn default() -> Self {\n\n\t\tSettlType::RegularFxSpotSettlement\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum Currency {\n\n\t/// Afghani\n\n\t#[serde(rename = \"AFA\")]\n\n\tAfa,\n\n\t/// Algerian Dinar\n\n\t#[serde(rename = \"DZD\")]\n\n\tDzd,\n\n\t/// Andorran Peseta\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 66, "score": 140087.29859852837 }, { "content": "\tOpposite,\n\n}\n\n\n\nimpl Default for Side {\n\n\tfn default() -> Self {\n\n\t\tSide::Buy\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuantityType {\n\n\t/// SHARES\n\n\t#[serde(rename = \"1\")]\n\n\tShares,\n\n\t/// BONDS\n\n\t#[serde(rename = \"2\")]\n\n\tBonds,\n\n\t/// CURRENTFACE\n\n\t#[serde(rename = \"3\")]\n\n\tCurrentface,\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 67, "score": 140086.0457389282 }, { "content": "\tBrokenDateForFxExpressingNonStandardTenorSettlDateMustBeSpecified,\n\n\t/// FX Spot Next settlement (Spot+1, aka next day)\n\n\t#[serde(rename = \"C\")]\n\n\tFxSpotNextSettlement,\n\n}\n\n\n\nimpl Default for SettlType {\n\n\tfn default() -> Self {\n\n\t\tSettlType::RegularFxSpotSettlement\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum Currency {\n\n\t/// Afghani\n\n\t#[serde(rename = \"AFA\")]\n\n\tAfa,\n\n\t/// Algerian Dinar\n\n\t#[serde(rename = \"DZD\")]\n\n\tDzd,\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 68, "score": 140084.9113990403 }, { "content": "\t#[serde(rename = \"7\")]\n\n\tWhenAndIfIssued,\n\n\t/// Sellers Option\n\n\t#[serde(rename = \"8\")]\n\n\tSellersOption,\n\n\t/// T+5\n\n\t#[serde(rename = \"9\")]\n\n\tT5,\n\n}\n\n\n\nimpl Default for SettlType {\n\n\tfn default() -> Self {\n\n\t\tSettlType::Regular\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum Currency {\n\n\t/// Afghani\n\n\t#[serde(rename = \"AFA\")]\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 69, "score": 140084.5926358318 }, { "content": "\t#[serde(rename = \"640\")]\n\n\tpub price_2: Option<f64>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestRejectReason {\n\n\t/// Unknown symbol (Security)\n\n\t#[serde(rename = \"1\")]\n\n\tUnknownSymbol,\n\n\t/// Exchange(Security) closed\n\n\t#[serde(rename = \"2\")]\n\n\tExchangeClosed,\n\n\t/// Quote request exceeds limit\n\n\t#[serde(rename = \"3\")]\n\n\tQuoteRequestExceedsLimit,\n\n\t/// Too late to enter\n\n\t#[serde(rename = \"4\")]\n\n\tTooLateToEnter,\n\n\t/// Invalid price\n\n\t#[serde(rename = \"5\")]\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 70, "score": 140084.5063242137 }, { "content": "\tMorning,\n\n\t/// Afternoon\n\n\t#[serde(rename = \"4\")]\n\n\tAfternoon,\n\n\t/// Evening\n\n\t#[serde(rename = \"5\")]\n\n\tEvening,\n\n\t/// After-hours\n\n\t#[serde(rename = \"6\")]\n\n\tAfterHours,\n\n}\n\n\n\nimpl Default for TradingSessionID {\n\n\tfn default() -> Self {\n\n\t\tTradingSessionID::Day\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum TradingSessionSubID {\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 71, "score": 140084.3110954276 }, { "content": "impl Default for SettlmntTyp {\n\n\tfn default() -> Self {\n\n\t\tSettlmntTyp::Regular\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum OrdType {\n\n\t/// Market\n\n\t#[serde(rename = \"1\")]\n\n\tMarket,\n\n\t/// Limit\n\n\t#[serde(rename = \"2\")]\n\n\tLimit,\n\n\t/// Stop\n\n\t#[serde(rename = \"3\")]\n\n\tStop,\n\n\t/// Stop limit\n\n\t#[serde(rename = \"4\")]\n\n\tStopLimit,\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 72, "score": 140084.0324720039 }, { "content": "\tN961,\n\n\t/// Codes specifically reserved for testing purposes\n\n\t#[serde(rename = \"963\")]\n\n\tN963,\n\n\t/// Codes assigned for transactions where no currency is involved\n\n\t#[serde(rename = \"999\")]\n\n\tN999,\n\n}\n\n\n\nimpl Default for Currency {\n\n\tfn default() -> Self {\n\n\t\tCurrency::Afa\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum PriceType {\n\n\t/// Percentage\n\n\t#[serde(rename = \"1\")]\n\n\tPercentage,\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 73, "score": 140083.75672046098 }, { "content": "\tQuiescent,\n\n}\n\n\n\nimpl Default for TradingSessionSubID {\n\n\tfn default() -> Self {\n\n\t\tTradingSessionSubID::PreTrading\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum Side {\n\n\t/// Buy\n\n\t#[serde(rename = \"1\")]\n\n\tBuy,\n\n\t/// Sell\n\n\t#[serde(rename = \"2\")]\n\n\tSell,\n\n\t/// Buy minus\n\n\t#[serde(rename = \"3\")]\n\n\tBuyMinus,\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 74, "score": 140083.75672046098 }, { "content": "\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum LegSwapType {\n\n\t/// Par For Par\n\n\t#[serde(rename = \"1\")]\n\n\tParForPar,\n\n\t/// Modified Duration\n\n\t#[serde(rename = \"2\")]\n\n\tModifiedDuration,\n\n\t/// Risk\n\n\t#[serde(rename = \"4\")]\n\n\tRisk,\n\n\t/// Proceeds\n\n\t#[serde(rename = \"5\")]\n\n\tProceeds,\n\n}\n\n\n\nimpl Default for LegSwapType {\n\n\tfn default() -> Self {\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 75, "score": 140083.75672046098 }, { "content": "impl Default for Currency {\n\n\tfn default() -> Self {\n\n\t\tCurrency::Afa\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum AcctIDSource {\n\n\t/// BIC\n\n\t#[serde(rename = \"1\")]\n\n\tBic,\n\n\t/// SID code\n\n\t#[serde(rename = \"2\")]\n\n\tSidCode,\n\n\t/// TFM (GSPTA)\n\n\t#[serde(rename = \"3\")]\n\n\tTfm,\n\n\t/// OMGEO (AlertID)\n\n\t#[serde(rename = \"4\")]\n\n\tOmgeo,\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 76, "score": 140083.48379662412 }, { "content": "\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestRejectReason {\n\n\t/// Unknown symbol (Security)\n\n\t#[serde(rename = \"1\")]\n\n\tUnknownSymbol,\n\n\t/// Exchange(Security) closed\n\n\t#[serde(rename = \"2\")]\n\n\tExchangeClosed,\n\n\t/// Quote request exceeds limit\n\n\t#[serde(rename = \"3\")]\n\n\tQuoteRequestExceedsLimit,\n\n\t/// Too late to enter\n\n\t#[serde(rename = \"4\")]\n\n\tTooLateToEnter,\n\n\t/// Invalid price\n\n\t#[serde(rename = \"5\")]\n\n\tInvalidPrice,\n\n\t/// Not authorized to request quote\n\n\t#[serde(rename = \"6\")]\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 77, "score": 140083.0704506975 }, { "content": "impl Default for Side {\n\n\tfn default() -> Self {\n\n\t\tSide::Buy\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QtyType {\n\n\t/// Units (shares, par, currency)\n\n\t#[serde(rename = \"0\")]\n\n\tUnits,\n\n\t/// Contracts (if used - must specify <a href=\"tag_231_ContractMultiplier.html\" target=\"bottom\">ContractMultiplier&nbsp;(231)</a> )\n\n\t#[serde(rename = \"1\")]\n\n\tContractsA,\n\n\t/// Units of Measure per Time Unit (if used - must specify <a href=\"tag_996_UnitofMeasure.html\" target=\"bottom\">UnitofMeasure&nbsp;(996)</a> and <a href=\"tag_997_TimeUnit.html\" target=\"bottom\">TimeUnit&nbsp;(997)</a> )\n\n\t#[serde(rename = \"2\")]\n\n\tUnitsOfMeasurePerTimeUnitAAndTimeUnit,\n\n}\n\n\n\nimpl Default for QtyType {\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 78, "score": 140083.02850976036 }, { "content": "\n\nimpl Default for LegSwapType {\n\n\tfn default() -> Self {\n\n\t\tLegSwapType::ParForPar\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum LegSettlType {\n\n\t/// Regular\n\n\t#[serde(rename = \"0\")]\n\n\tRegular,\n\n\t/// Cash\n\n\t#[serde(rename = \"1\")]\n\n\tCash,\n\n\t/// Next Day (T+1)\n\n\t#[serde(rename = \"2\")]\n\n\tNextDay,\n\n\t/// T+2\n\n\t#[serde(rename = \"3\")]\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 79, "score": 140082.94625985352 }, { "content": "#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestRejectReason {\n\n\t/// Unknown symbol (Security)\n\n\t#[serde(rename = \"1\")]\n\n\tUnknownSymbol,\n\n\t/// Exchange(Security) closed\n\n\t#[serde(rename = \"2\")]\n\n\tExchangeClosed,\n\n\t/// Quote request exceeds limit\n\n\t#[serde(rename = \"3\")]\n\n\tQuoteRequestExceedsLimit,\n\n\t/// Too late to enter\n\n\t#[serde(rename = \"4\")]\n\n\tTooLateToEnter,\n\n\t/// Invalid price\n\n\t#[serde(rename = \"5\")]\n\n\tInvalidPrice,\n\n\t/// Not authorized to request quote\n\n\t#[serde(rename = \"6\")]\n\n\tNotAuthorizedToRequestQuote,\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 80, "score": 140082.7189271278 }, { "content": "\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum LegSwapType {\n\n\t/// Par For Par\n\n\t#[serde(rename = \"1\")]\n\n\tParForPar,\n\n\t/// Modified Duration\n\n\t#[serde(rename = \"2\")]\n\n\tModifiedDuration,\n\n\t/// Risk\n\n\t#[serde(rename = \"4\")]\n\n\tRisk,\n\n\t/// Proceeds\n\n\t#[serde(rename = \"5\")]\n\n\tProceeds,\n\n}\n\n\n\nimpl Default for LegSwapType {\n\n\tfn default() -> Self {\n\n\t\tLegSwapType::ParForPar\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 81, "score": 140082.41952591453 }, { "content": "\n\nimpl Default for Side {\n\n\tfn default() -> Self {\n\n\t\tSide::Buy\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QtyType {\n\n\t/// Units (shares, par, currency)\n\n\t#[serde(rename = \"0\")]\n\n\tUnits,\n\n\t/// Contracts (if used - must specify <a href=\"tag_231_ContractMultiplier.html\" target=\"bottom\">ContractMultiplier&nbsp;(231)</a> )\n\n\t#[serde(rename = \"1\")]\n\n\tContractsA,\n\n\t/// Units of Measure per Time Unit (if used - must specify <a href=\"tag_996_UnitOfMeasure.html\" target=\"bottom\">UnitofMeasure&nbsp;(996)</a> and <a href=\"tag_997_TimeUnit.html\" target=\"bottom\">TimeUnit&nbsp;(997)</a> )\n\n\t#[serde(rename = \"2\")]\n\n\tUnitsOfMeasurePerTimeUnitAAndTimeUnit,\n\n}\n\n\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 82, "score": 140082.04663974352 }, { "content": "\t/// DTCC code\n\n\t#[serde(rename = \"5\")]\n\n\tDtccCode,\n\n\t/// Other (custom or proprietary)\n\n\t#[serde(rename = \"99\")]\n\n\tOther,\n\n}\n\n\n\nimpl Default for AcctIDSource {\n\n\tfn default() -> Self {\n\n\t\tAcctIDSource::Bic\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum AccountType {\n\n\t/// Account is carried on customer Side of Books\n\n\t#[serde(rename = \"1\")]\n\n\tAccountIsCarriedOnCustomerSideOfBooks,\n\n\t/// Account is carried on non-Customer Side of books\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 83, "score": 140080.90097983068 }, { "content": "impl Default for QtyType {\n\n\tfn default() -> Self {\n\n\t\tQtyType::Units\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum SettlType {\n\n\t/// Regular / FX Spot settlement (T+1 or T+2 depending on currency)\n\n\t#[serde(rename = \"0\")]\n\n\tRegularFxSpotSettlement,\n\n\t/// Cash (TOD / T+0)\n\n\t#[serde(rename = \"1\")]\n\n\tCash,\n\n\t/// Next Day (TOM / T+1)\n\n\t#[serde(rename = \"2\")]\n\n\tNextDay,\n\n\t/// T+2\n\n\t#[serde(rename = \"3\")]\n\n\tT2,\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 84, "score": 140080.41435703807 }, { "content": "\t#[serde(rename = \"99\")]\n\n\tOther,\n\n}\n\n\n\nimpl Default for AcctIDSource {\n\n\tfn default() -> Self {\n\n\t\tAcctIDSource::Bic\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum AccountType {\n\n\t/// Account is carried on customer Side of Books\n\n\t#[serde(rename = \"1\")]\n\n\tAccountIsCarriedOnCustomerSideOfBooks,\n\n\t/// Account is carried on non-Customer Side of books\n\n\t#[serde(rename = \"2\")]\n\n\tAccountIsCarriedOnNonCustomerSideOfBooks,\n\n\t/// House Trader\n\n\t#[serde(rename = \"3\")]\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 85, "score": 140079.9370460183 }, { "content": "\tOther,\n\n}\n\n\n\nimpl Default for AcctIDSource {\n\n\tfn default() -> Self {\n\n\t\tAcctIDSource::Bic\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum AccountType {\n\n\t/// Account is carried on customer Side of Books\n\n\t#[serde(rename = \"1\")]\n\n\tAccountIsCarriedOnCustomerSideOfBooks,\n\n\t/// Account is carried on non-Customer Side of books\n\n\t#[serde(rename = \"2\")]\n\n\tAccountIsCarriedOnNonCustomerSideOfBooks,\n\n\t/// House Trader\n\n\t#[serde(rename = \"3\")]\n\n\tHouseTrader,\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 86, "score": 140079.9370460183 }, { "content": "\t#[serde(rename = \"58\")]\n\n\tpub text: Option<String>,\n\n\t/// Must be set if <a href=\"tag_355_EncodedText.html\" target=\"bottom\">EncodedText&nbsp;(355)</a> field is specified and must immediately precede it.\n\n\t#[serde(rename = \"354\")]\n\n\t/// Encoded (non-ASCII characters) representation of the <a href=\"tag_58_Text.html\" target=\"bottom\">Text&nbsp;(58)</a> field in the encoded format specified via the <a href=\"tag_347_MessageEncoding.html\" target=\"bottom\">MessageEncoding&nbsp;(347)</a> field.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(alias = \"355\")]\n\n\tpub encoded_text: Option<fix_common::EncodedText<355>>,\n\n\t/// Standard Message Trailer\n\n\t#[serde(flatten)]\n\n\tpub standard_message_trailer: super::super::standard_message_trailer::StandardMessageTrailer,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum QuoteRequestRejectReason {\n\n\t/// Unknown symbol (Security)\n\n\t#[serde(rename = \"1\")]\n\n\tUnknownSymbol,\n\n\t/// Exchange(Security) closed\n\n\t#[serde(rename = \"2\")]\n", "file_path": "fix50sp2/src/messages/quote_request_reject.rs", "rank": 87, "score": 140079.77793611173 }, { "content": "\tpub settlmnt_typ: Option<SettlmntTyp>,\n\n\t/// Can be used (e.g. with forex quotes) to specify the desired \"value date\"\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"64\")]\n\n\tpub fut_sett_date: Option<fix_common::LocalMktDate>,\n\n\t/// Can be used to specify the type of order the quote request is for\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"40\")]\n\n\tpub ord_type: Option<OrdType>,\n\n\t/// Can be used with <a href=\"tag_40_OrdType.html\" target=\"bottom\">OrdType&nbsp;(40)</a> = \"Forex - Swap\" to specify the \"value date\" for the future portion of a F/X swap.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"193\")]\n\n\tpub fut_sett_date_2: Option<fix_common::LocalMktDate>,\n\n\t/// Can be used with <a href=\"tag_40_OrdType.html\" target=\"bottom\">OrdType&nbsp;(40)</a> = \"Forex - Swap\" to specify the order quantity for the future portion of a F/X swap.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(deserialize_with = \"fix_common::workarounds::from_opt_str\")]// https://github.com/serde-rs/serde/issues/1183\n\n\t#[serde(default)]\n\n\t#[serde(rename = \"192\")]\n\n\tpub order_qty_2: Option<f64>,\n\n\t/// The time when <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> will expire.\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 88, "score": 140078.75400919377 }, { "content": "\tfn default() -> Self {\n\n\t\tOrdType::Market\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum Currency {\n\n\t/// Afghani\n\n\t#[serde(rename = \"AFA\")]\n\n\tAfa,\n\n\t/// Algerian Dinar\n\n\t#[serde(rename = \"DZD\")]\n\n\tDzd,\n\n\t/// Andorran Peseta\n\n\t#[serde(rename = \"ADP\")]\n\n\tAdp,\n\n\t/// Argentine Peso\n\n\t#[serde(rename = \"ARS\")]\n\n\tArs,\n\n\t/// Armenian Dram\n", "file_path": "fix43/src/messages/quote_request_reject.rs", "rank": 89, "score": 140076.85276738077 }, { "content": "\tForexLimit,\n\n\t/// Forex Swap\n\n\t#[serde(rename = \"G\")]\n\n\tForexSwap,\n\n\t/// Forex Previously Quoted (No longer used)\n\n\t#[serde(rename = \"H\")]\n\n\tForexPreviouslyQuoted,\n\n\t/// Funari (Limit day order with unexecuted portion handles as Market On Close. E.g. Japan)\n\n\t#[serde(rename = \"I\")]\n\n\tFunari,\n\n\t/// Market If Touched (MIT)\n\n\t#[serde(rename = \"J\")]\n\n\tMarketIfTouched,\n\n\t/// Market With Left Over as Limit (market order with unexecuted quantity becoming limit order at last price)\n\n\t#[serde(rename = \"K\")]\n\n\tMarketWithLeftOverAsLimit,\n\n\t/// Previous Fund Valuation Point (Historic pricing; for CIV)\"\n\n\t#[serde(rename = \"L\")]\n\n\tPreviousFundValuationPoint,\n\n\t/// Next Fund Valuation Point (Forward pricing; for CIV)\"\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 90, "score": 140076.78961369337 }, { "content": "\tfn default() -> Self {\n\n\t\tAccountType::AccountIsCarriedOnCustomerSideOfBooks\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum LegSwapType {\n\n\t/// Par For Par\n\n\t#[serde(rename = \"1\")]\n\n\tParForPar,\n\n\t/// Modified Duration\n\n\t#[serde(rename = \"2\")]\n\n\tModifiedDuration,\n\n\t/// Risk\n\n\t#[serde(rename = \"4\")]\n\n\tRisk,\n\n\t/// Proceeds\n\n\t#[serde(rename = \"5\")]\n\n\tProceeds,\n\n}\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 91, "score": 140076.61645363047 }, { "content": "\t/// Forex Swap\n\n\t#[serde(rename = \"G\")]\n\n\tForexSwap,\n\n\t/// Forex Previously Quoted (No longer used)\n\n\t#[serde(rename = \"H\")]\n\n\tForexPreviouslyQuoted,\n\n\t/// Funari (Limit day order with unexecuted portion handles as Market On Close. E.g. Japan)\n\n\t#[serde(rename = \"I\")]\n\n\tFunari,\n\n\t/// Market If Touched (MIT)\n\n\t#[serde(rename = \"J\")]\n\n\tMarketIfTouched,\n\n\t/// Market With Left Over as Limit (market order with unexecuted quantity becoming limit order at last price)\n\n\t#[serde(rename = \"K\")]\n\n\tMarketWithLeftOverAsLimit,\n\n\t/// Previous Fund Valuation Point (Historic pricing; for CIV)\"\n\n\t#[serde(rename = \"L\")]\n\n\tPreviousFundValuationPoint,\n\n\t/// Next Fund Valuation Point (Forward pricing; for CIV)\"\n\n\t#[serde(rename = \"M\")]\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 92, "score": 140076.60486055372 }, { "content": "\tfn default() -> Self {\n\n\t\tQtyType::Units\n\n\t}\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]\n\npub enum SettlType {\n\n\t/// Regular / FX Spot settlement (T+1 or T+2 depending on currency)\n\n\t#[serde(rename = \"0\")]\n\n\tRegularFxSpotSettlement,\n\n\t/// Cash (TOD / T+0)\n\n\t#[serde(rename = \"1\")]\n\n\tCash,\n\n\t/// Next Day (TOM / T+1)\n\n\t#[serde(rename = \"2\")]\n\n\tNextDay,\n\n\t/// T+2\n\n\t#[serde(rename = \"3\")]\n\n\tT2,\n\n\t/// T+3\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 93, "score": 140074.8093339664 }, { "content": "\tpub quote_type: Option<QuoteType>,\n\n\t/// TradingSessionID\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"336\")]\n\n\tpub trading_session_id: Option<String>,\n\n\t/// TradingSessionSubID\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"625\")]\n\n\tpub trading_session_sub_id: Option<String>,\n\n\t/// TradeOriginationDate\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"229\")]\n\n\tpub trade_origination_date: Option<fix_common::LocalMktDate>,\n\n\t/// If <a href=\"tag_40_OrdType.html\" target=\"bottom\">OrdType&nbsp;(40)</a> = \"Forex - Swap\", should be the side of the future portion of a F/X swap. The absence of a side implies that a two-sided quote\n\n\t/// is being requested. Required if specified in <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> message.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"54\")]\n\n\tpub side: Option<Side>,\n\n\t/// QtyType\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 94, "score": 140074.6024327409 }, { "content": "\t/// On Basis\n\n\t#[serde(rename = \"9\")]\n\n\tOnBasis,\n\n\t/// On Close (No longer used)\n\n\t#[serde(rename = \"A\")]\n\n\tOnClose,\n\n\t/// Limit On Close (No longer used)\n\n\t#[serde(rename = \"B\")]\n\n\tLimitOnClose,\n\n\t/// Forex Market (No longer used)\n\n\t#[serde(rename = \"C\")]\n\n\tForexMarket,\n\n\t/// Previously Quoted\n\n\t#[serde(rename = \"D\")]\n\n\tPreviouslyQuoted,\n\n\t/// Previously Indicated\n\n\t#[serde(rename = \"E\")]\n\n\tPreviouslyIndicated,\n\n\t/// Forex Limit (No longer used)\n\n\t#[serde(rename = \"F\")]\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 95, "score": 140073.63041366183 }, { "content": "\t#[serde(rename = \"336\")]\n\n\tpub trading_session_id: Option<String>,\n\n\t/// TradingSessionSubID\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"625\")]\n\n\tpub trading_session_sub_id: Option<String>,\n\n\t/// TradeOriginationDate\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"229\")]\n\n\tpub trade_origination_date: Option<fix_common::LocalMktDate>,\n\n\t/// If <a href=\"tag_40_OrdType.html\" target=\"bottom\">OrdType&nbsp;(40)</a> = \"Forex - Swap\", should be the side of the future portion of a F/X swap. The absence of a side implies that a two-sided quote\n\n\t/// is being requested. Required if specified in <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> message.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"54\")]\n\n\tpub side: Option<Side>,\n\n\t/// QtyType\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"854\")]\n\n\tpub qty_type: Option<QtyType>,\n\n\t/// SettlType\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 96, "score": 140073.50891283917 }, { "content": "\t#[serde(rename = \"9\")]\n\n\tOnBasis,\n\n\t/// On Close (No longer used)\n\n\t#[serde(rename = \"A\")]\n\n\tOnClose,\n\n\t/// Limit On Close (No longer used)\n\n\t#[serde(rename = \"B\")]\n\n\tLimitOnClose,\n\n\t/// Forex Market (No longer used)\n\n\t#[serde(rename = \"C\")]\n\n\tForexMarket,\n\n\t/// Previously Quoted\n\n\t#[serde(rename = \"D\")]\n\n\tPreviouslyQuoted,\n\n\t/// Previously Indicated\n\n\t#[serde(rename = \"E\")]\n\n\tPreviouslyIndicated,\n\n\t/// Forex Limit (No longer used)\n\n\t#[serde(rename = \"F\")]\n\n\tForexLimit,\n", "file_path": "fix50sp1/src/messages/quote_request_reject.rs", "rank": 97, "score": 140073.46353975075 }, { "content": "\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(deserialize_with = \"fix_common::workarounds::from_opt_str\")]// https://github.com/serde-rs/serde/issues/1183\n\n\t#[serde(default)]\n\n\t#[serde(rename = \"735\")]\n\n\tpub no_quote_qualifiers: Option<usize>,\n\n\t/// Required if <a href=\"tag_735_NoQuoteQualifiers.html\" target=\"bottom\">NoQuoteQualifiers&nbsp;(735)</a> &gt; 1\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"695\")]\n\n\tpub quote_qualifier: Option<QuoteQualifier>,\n\n\t/// Initiator can specify the price type the quote needs to be quoted at. If not specified, the Respondent has option to specify\n\n\t/// how quote is quoted.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"692\")]\n\n\tpub quote_price_type: Option<QuotePriceType>,\n\n\t/// Can be used to specify the type of order the quote request is for\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"40\")]\n\n\tpub ord_type: Option<OrdType>,\n\n\t/// The time when <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> will expire.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "fix50/src/messages/quote_request_reject.rs", "rank": 98, "score": 140073.37724287782 }, { "content": "\t/// Number of underlyings\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"711\")]\n\n\tpub underlyings: Option<fix_common::RepeatingValues<super::super::underlying_instrument::UnderlyingInstrument>>,\n\n\t/// Useful for verifying security identification\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(deserialize_with = \"fix_common::workarounds::from_opt_str\")]// https://github.com/serde-rs/serde/issues/1183\n\n\t#[serde(default)]\n\n\t#[serde(rename = \"140\")]\n\n\tpub prev_close_px: Option<f64>,\n\n\t/// Indicates the type of <a href=\"message_Quote_Request_R.html\" target=\"main\">Quote Request&nbsp;(R)</a> (e.g. Manual vs. Automatic) being generated.\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"303\")]\n\n\tpub quote_request_type: Option<QuoteRequestType>,\n\n\t/// Type of quote being requested from counterparty or market (e.g. Indicative, Firm, or Restricted Tradeable)\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n\n\t#[serde(rename = \"537\")]\n\n\tpub quote_type: Option<QuoteType>,\n\n\t/// TradingSessionID\n\n\t#[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "fix44/src/messages/quote_request_reject.rs", "rank": 99, "score": 140072.93443561235 } ]
Rust
alap_gen/src/attributes.rs
pwil3058/rs_lalr1_parsers
1cd7a8a75450f2848cbcf8048c0e92b167c3e4bb
use lexan; #[cfg(not(feature = "bootstrap"))] use crate::alap_gen::AATerminal; #[cfg(feature = "bootstrap")] use crate::bootstrap::AATerminal; use crate::production::ProductionTail; use crate::symbol::non_terminal::NonTerminal; use crate::symbol::tag::TagOrToken; use crate::symbol::{Associativity, Symbol}; use std::collections::BTreeSet; #[derive(Debug, Clone)] pub enum AttributeData { Token(lexan::Token<AATerminal>), SyntaxError(lexan::Token<AATerminal>, BTreeSet<AATerminal>), LexicalError(lexan::Error<AATerminal>, BTreeSet<AATerminal>), Number(u32), Symbol(Symbol), SymbolList(Vec<Symbol>), LeftHandSide(NonTerminal), TagOrToken(TagOrToken), TagOrTokenList(Vec<TagOrToken>), ProductionTail(ProductionTail), ProductionTailList(Vec<ProductionTail>), Action(String), Predicate(String), AssociativityAndPrecedence(Associativity, u16), Default, } impl Default for AttributeData { fn default() -> Self { AttributeData::Default } } impl AttributeData { pub fn matched_text(&self) -> &String { match self { AttributeData::Token(token) => token.lexeme(), AttributeData::SyntaxError(token, _) => token.lexeme(), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(text, _) => text, lexan::Error::AmbiguousMatches(_, text, _) => text, lexan::Error::AdvancedWhenEmpty(_) => panic!("Wrong attribute variant."), }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn location(&self) -> &lexan::Location { match self { AttributeData::Token(token) => token.location(), AttributeData::SyntaxError(token, _) => token.location(), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(_, location) => location, lexan::Error::AmbiguousMatches(_, _, location) => location, lexan::Error::AdvancedWhenEmpty(location) => location, }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn text_and_location(&self) -> (&String, &lexan::Location) { match self { AttributeData::Token(token) => (token.lexeme(), token.location()), AttributeData::SyntaxError(token, _) => (token.lexeme(), token.location()), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(text, location) => (text, location), lexan::Error::AmbiguousMatches(_, text, location) => (text, location), lexan::Error::AdvancedWhenEmpty(_) => panic!("Wrong attribute variant."), }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn number(&self) -> u32 { match self { AttributeData::Number(number) => *number, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol(&self) -> &Symbol { match self { AttributeData::Symbol(symbol) => symbol, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol_list(&self) -> &Vec<Symbol> { match self { AttributeData::SymbolList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol_list_mut(&mut self) -> &mut Vec<Symbol> { match self { AttributeData::SymbolList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn left_hand_side(&self) -> &NonTerminal { match self { AttributeData::LeftHandSide(lhs) => lhs, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token(&self) -> &TagOrToken { match self { AttributeData::TagOrToken(tag_or_token) => tag_or_token, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token_list(&self) -> &Vec<TagOrToken> { match self { AttributeData::TagOrTokenList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token_list_mut(&mut self) -> &mut Vec<TagOrToken> { match self { AttributeData::TagOrTokenList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail(&self) -> &ProductionTail { match self { AttributeData::ProductionTail(production_tail) => production_tail, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail_list(&self) -> &Vec<ProductionTail> { match self { AttributeData::ProductionTailList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail_list_mut(&mut self) -> &mut Vec<ProductionTail> { match self { AttributeData::ProductionTailList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn action(&self) -> &str { match self { AttributeData::Action(action) => action, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn predicate(&self) -> &str { match self { AttributeData::Predicate(predicate) => predicate, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn associativity_and_precedence(&self) -> (Associativity, u16) { match self { AttributeData::AssociativityAndPrecedence(associativity, precedence) => { (*associativity, *precedence) } _ => panic!("{:?}: Wrong attribute variant.", self), } } } impl From<lexan::Token<AATerminal>> for AttributeData { fn from(token: lexan::Token<AATerminal>) -> Self { AttributeData::Token(token) } } impl From<lalr1_plus::Error<AATerminal>> for AttributeData { fn from(error: lalr1_plus::Error<AATerminal>) -> Self { match error { lalr1_plus::Error::LexicalError(error, expected) => { AttributeData::LexicalError(error, expected) } lalr1_plus::Error::SyntaxError(token, expected) => { AttributeData::SyntaxError(token, expected) } } } }
use lexan; #[cfg(not(feature = "bootstrap"))] use crate::alap_gen::AATerminal; #[cfg(feature = "bootstrap")] use crate::bootstrap::AATerminal; use crate::production::ProductionTail; use crate::symbol::non_terminal::NonTerminal; use crate::symbol::tag::TagOrToken; use crate::symbol::{Associativity, Symbol}; use std::collections::BTreeSet; #[derive(Debug, Clon
h self { AttributeData::SymbolList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol_list_mut(&mut self) -> &mut Vec<Symbol> { match self { AttributeData::SymbolList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn left_hand_side(&self) -> &NonTerminal { match self { AttributeData::LeftHandSide(lhs) => lhs, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token(&self) -> &TagOrToken { match self { AttributeData::TagOrToken(tag_or_token) => tag_or_token, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token_list(&self) -> &Vec<TagOrToken> { match self { AttributeData::TagOrTokenList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn tag_or_token_list_mut(&mut self) -> &mut Vec<TagOrToken> { match self { AttributeData::TagOrTokenList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail(&self) -> &ProductionTail { match self { AttributeData::ProductionTail(production_tail) => production_tail, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail_list(&self) -> &Vec<ProductionTail> { match self { AttributeData::ProductionTailList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn production_tail_list_mut(&mut self) -> &mut Vec<ProductionTail> { match self { AttributeData::ProductionTailList(list) => list, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn action(&self) -> &str { match self { AttributeData::Action(action) => action, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn predicate(&self) -> &str { match self { AttributeData::Predicate(predicate) => predicate, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn associativity_and_precedence(&self) -> (Associativity, u16) { match self { AttributeData::AssociativityAndPrecedence(associativity, precedence) => { (*associativity, *precedence) } _ => panic!("{:?}: Wrong attribute variant.", self), } } } impl From<lexan::Token<AATerminal>> for AttributeData { fn from(token: lexan::Token<AATerminal>) -> Self { AttributeData::Token(token) } } impl From<lalr1_plus::Error<AATerminal>> for AttributeData { fn from(error: lalr1_plus::Error<AATerminal>) -> Self { match error { lalr1_plus::Error::LexicalError(error, expected) => { AttributeData::LexicalError(error, expected) } lalr1_plus::Error::SyntaxError(token, expected) => { AttributeData::SyntaxError(token, expected) } } } }
e)] pub enum AttributeData { Token(lexan::Token<AATerminal>), SyntaxError(lexan::Token<AATerminal>, BTreeSet<AATerminal>), LexicalError(lexan::Error<AATerminal>, BTreeSet<AATerminal>), Number(u32), Symbol(Symbol), SymbolList(Vec<Symbol>), LeftHandSide(NonTerminal), TagOrToken(TagOrToken), TagOrTokenList(Vec<TagOrToken>), ProductionTail(ProductionTail), ProductionTailList(Vec<ProductionTail>), Action(String), Predicate(String), AssociativityAndPrecedence(Associativity, u16), Default, } impl Default for AttributeData { fn default() -> Self { AttributeData::Default } } impl AttributeData { pub fn matched_text(&self) -> &String { match self { AttributeData::Token(token) => token.lexeme(), AttributeData::SyntaxError(token, _) => token.lexeme(), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(text, _) => text, lexan::Error::AmbiguousMatches(_, text, _) => text, lexan::Error::AdvancedWhenEmpty(_) => panic!("Wrong attribute variant."), }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn location(&self) -> &lexan::Location { match self { AttributeData::Token(token) => token.location(), AttributeData::SyntaxError(token, _) => token.location(), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(_, location) => location, lexan::Error::AmbiguousMatches(_, _, location) => location, lexan::Error::AdvancedWhenEmpty(location) => location, }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn text_and_location(&self) -> (&String, &lexan::Location) { match self { AttributeData::Token(token) => (token.lexeme(), token.location()), AttributeData::SyntaxError(token, _) => (token.lexeme(), token.location()), AttributeData::LexicalError(error, _) => match error { lexan::Error::UnexpectedText(text, location) => (text, location), lexan::Error::AmbiguousMatches(_, text, location) => (text, location), lexan::Error::AdvancedWhenEmpty(_) => panic!("Wrong attribute variant."), }, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn number(&self) -> u32 { match self { AttributeData::Number(number) => *number, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol(&self) -> &Symbol { match self { AttributeData::Symbol(symbol) => symbol, _ => panic!("{:?}: Wrong attribute variant.", self), } } pub fn symbol_list(&self) -> &Vec<Symbol> { matc
random
[ { "content": "fn rhs_associated_precedence(symbols: &[Symbol]) -> Option<(Associativity, u16)> {\n\n for symbol in symbols.iter() {\n\n match symbol {\n\n Symbol::Terminal(token) => {\n\n return Some(token.associativity_and_precedence());\n\n }\n\n _ => (),\n\n }\n\n }\n\n None\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct ProductionId(u32);\n\n\n\nimpl ProductionId {\n\n fn new() -> Self {\n\n static NEXT_ID: AtomicU32 = AtomicU32::new(0);\n\n ProductionId(NEXT_ID.fetch_add(1, atomic::Ordering::Relaxed))\n\n }\n", "file_path": "alap_gen/src/production.rs", "rank": 0, "score": 44042.37095613848 }, { "content": "use std::{cmp::Eq, collections::HashMap, fmt::Debug};\n\n\n\nuse regex::Regex;\n\n\n\nuse crate::error::LexanError;\n\n\n\n#[derive(Debug, Default)]\n", "file_path": "lexan/src/matcher.rs", "rank": 1, "score": 34221.242758848675 }, { "content": "use std::convert::From;\n\n\n\nuse regex;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum LexanError<'a, T> {\n\n DuplicateHandle(T),\n\n DuplicatePattern(&'a str),\n\n EmptyPattern(Option<T>),\n\n RegexError(regex::Error),\n\n}\n\n\n\nimpl<'a, T> From<regex::Error> for LexanError<'a, T> {\n\n fn from(error: regex::Error) -> Self {\n\n LexanError::RegexError(error)\n\n }\n\n}\n", "file_path": "lexan/src/error.rs", "rank": 2, "score": 34220.825113587016 }, { "content": " fn lexical_analyser() {\n\n use Handle::*;\n\n\n\n let lexan = super::LexicalAnalyzer::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[\n\n (Ident, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n\n (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n\n\n let mut token_stream = lexan.token_stream(\n\n \"if iffy\\n \\\"quoted\\\" \\\"if\\\" \\n9 $ \\tname &{ one \\n two &} and so ?{on?}\".to_string(),\n", "file_path": "lexan/src/lib.rs", "rank": 3, "score": 34219.89747389389 }, { "content": "pub use std::fmt::{Debug, Display};\n\n\n\nuse crate::error::LexanError;\n\nuse crate::matcher::{LiteralMatcher, RegexMatcher, SkipMatcher};\n\n\n\n#[derive(Default)]\n\npub struct Lexicon<T>\n\nwhere\n\n T: Copy + PartialEq + Debug + Display,\n\n{\n\n literal_matcher: LiteralMatcher<T>,\n\n regex_matcher: RegexMatcher<T>,\n\n skip_matcher: SkipMatcher,\n\n end_marker: T,\n\n}\n\n\n\nimpl<T> Lexicon<T>\n\nwhere\n\n T: Copy + Eq + Debug + Display + Ord,\n\n{\n", "file_path": "lexan/src/lexicon.rs", "rank": 4, "score": 34219.17146076398 }, { "content": "extern crate regex;\n\n\n\npub use std::fmt::{Debug, Display};\n\nuse std::sync::Arc;\n\n\n\nmod analyzer;\n\nmod error;\n\nmod lexicon;\n\nmod matcher;\n\n\n\npub use analyzer::{Error, Location, Token, TokenStream};\n\nuse lexicon::Lexicon;\n\n\n\npub struct LexicalAnalyzer<T>\n\nwhere\n\n T: Ord + Copy + PartialEq + Debug + Display,\n\n{\n\n lexicon: Arc<Lexicon<T>>,\n\n}\n\n\n", "file_path": "lexan/src/lib.rs", "rank": 5, "score": 34217.615251985415 }, { "content": " front\n\n }\n\n\n\n pub fn advance_front(&mut self) -> Result<Token<T>, Error<T>> {\n\n self.advance();\n\n self.front.clone()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::lexicon::Lexicon;\n\n\n\n #[test]\n\n fn format_location() {\n\n let location = Location {\n\n line_number: 10,\n\n offset: 15,\n\n label: \"whatever\".to_string(),\n", "file_path": "lexan/src/analyzer.rs", "rank": 6, "score": 34217.04506280369 }, { "content": " tag: None,\n\n length: s_index,\n\n tails: t,\n\n }\n\n }\n\n }\n\n\n\n fn add<'a>(\n\n &mut self,\n\n tag: T,\n\n string: &'a str,\n\n s_index: usize,\n\n ) -> Result<(), LexanError<'a, T>> {\n\n debug_assert!(string.len() > 0);\n\n if string.len() == s_index {\n\n if self.tag.is_some() {\n\n return Err(LexanError::DuplicatePattern(string));\n\n }\n\n self.tag = Some(tag);\n\n self.length = string.len();\n", "file_path": "lexan/src/matcher.rs", "rank": 7, "score": 34217.00632489316 }, { "content": "pub use std::{\n\n fmt::{self, Debug, Display},\n\n sync::Arc,\n\n};\n\n\n\nuse crate::lexicon::Lexicon;\n\n\n\n/// Data for use in user friendly lexical analysis error messages\n\n#[derive(Debug, Clone, PartialEq, Eq, Default, PartialOrd, Ord)]\n\npub struct Location {\n\n /// A label describing the source of the string in which this location occurs\n\n label: String,\n\n /// Human friendly line number of this location\n\n line_number: usize,\n\n /// Human friendly offset of this location within its line\n\n offset: usize,\n\n}\n\n\n\nimpl Location {\n\n fn new(label: String) -> Self {\n", "file_path": "lexan/src/analyzer.rs", "rank": 8, "score": 34216.94104010583 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", \"(\\\"\\\\S+\\\")\"],\n\n End,\n\n );\n\n if let Err(err) = lexicon {\n\n assert_eq!(err, LexanError::DuplicatePattern(\"(\\\"\\\\S+\\\")\"));\n\n } else {\n\n assert!(false)\n\n }\n\n }\n\n}\n", "file_path": "lexan/src/lexicon.rs", "rank": 9, "score": 34216.797768310134 }, { "content": " pub fn new<'a>(\n\n literal_lexemes: &[(T, &'a str)],\n\n regex_lexemes: &[(T, &'a str)],\n\n skip_regex_strs: &[&'a str],\n\n end_marker: T,\n\n ) -> Result<Self, LexanError<'a, T>> {\n\n let mut tags = vec![end_marker];\n\n let mut patterns = vec![];\n\n for (tag, pattern) in literal_lexemes.iter().chain(regex_lexemes.iter()) {\n\n match tags.binary_search(tag) {\n\n Ok(_) => return Err(LexanError::DuplicateHandle(*tag)),\n\n Err(index) => tags.insert(index, *tag),\n\n }\n\n match patterns.binary_search(pattern) {\n\n Ok(_) => return Err(LexanError::DuplicatePattern(pattern)),\n\n Err(index) => patterns.insert(index, pattern),\n\n }\n\n }\n\n for regex in skip_regex_strs.iter() {\n\n match patterns.binary_search(regex) {\n", "file_path": "lexan/src/lexicon.rs", "rank": 10, "score": 34216.5975392478 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n assert!(lexicon.is_ok());\n\n }\n\n\n\n #[test]\n\n fn lexicon_fail() {\n\n use self::Tag::*;\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (If, \"when\")],\n\n &[\n\n (Ident, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 11, "score": 34216.48943302526 }, { "content": "}\n\n\n\n#[derive(Debug, Default)]\n\npub(crate) struct SkipMatcher {\n\n regexes: Vec<Regex>,\n\n}\n\n\n\nimpl SkipMatcher {\n\n pub fn new<'a, T>(regex_strs: &[&'a str]) -> Result<Self, LexanError<'a, T>> {\n\n let mut regexes = vec![];\n\n for regex_str in regex_strs.iter() {\n\n if regex_str.len() == 0 {\n\n return Err(LexanError::EmptyPattern(None));\n\n };\n\n let mut anchored_pattern = \"\\\\A\".to_string();\n\n anchored_pattern.push_str(regex_str);\n\n regexes.push(Regex::new(&anchored_pattern)?);\n\n }\n\n Ok(Self { regexes })\n\n }\n", "file_path": "lexan/src/matcher.rs", "rank": 12, "score": 34216.41024208326 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n if let Err(err) = lexicon {\n\n assert_eq!(err, LexanError::DuplicatePattern(\"if\"));\n\n } else {\n\n assert!(false)\n\n }\n\n\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[\n\n (Ident, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"when\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 13, "score": 34216.38021498403 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n if let Err(err) = lexicon {\n\n assert_eq!(err, LexanError::DuplicateHandle(If));\n\n } else {\n\n assert!(false)\n\n }\n\n\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[\n\n (Action, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 14, "score": 34216.33995306098 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n if let Err(err) = lexicon {\n\n assert_eq!(err, LexanError::DuplicateHandle(Action));\n\n } else {\n\n assert!(false)\n\n }\n\n\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[\n\n (Ident, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (When, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 15, "score": 34216.33995306098 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n if let Err(err) = lexicon {\n\n assert_eq!(err, LexanError::DuplicatePattern(\"when\"));\n\n } else {\n\n assert!(false)\n\n }\n\n\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[\n\n (Ident, \"(\\\"\\\\S+\\\")\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 16, "score": 34216.33995306098 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n if let Err(err) = lexicon {\n\n assert_eq!(err, LexanError::DuplicateHandle(When));\n\n } else {\n\n assert!(false)\n\n }\n\n\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[\n\n (Ident, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 17, "score": 34216.33995306098 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n Action,\n\n );\n\n if let Err(err) = lexicon {\n\n assert_eq!(err, LexanError::DuplicateHandle(Action));\n\n } else {\n\n assert!(false)\n\n }\n\n\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (When, \"if\")],\n\n &[\n\n (Ident, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 18, "score": 34216.30074168866 }, { "content": " (Code, r\"(%\\{(.|[\\n\\r])*?%\\})\"),\n\n ],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n if let Err(err) = lexicon {\n\n assert_eq!(err, LexanError::DuplicatePattern(\"(\\\"\\\\S+\\\")\"));\n\n } else {\n\n assert!(false)\n\n }\n\n\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[\n\n (Ident, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 19, "score": 34216.30074168866 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::analyzer::Error;\n\n\n\n #[derive(PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord, Debug)]\n\n enum Handle {\n\n If,\n\n When,\n\n Ident,\n\n Btextl,\n\n Pred,\n\n Literal,\n\n Action,\n\n Predicate,\n\n Code,\n\n End,\n\n }\n", "file_path": "lexan/src/lib.rs", "rank": 20, "score": 34216.245644268696 }, { "content": " Action => write!(f, \"Action\"),\n\n Predicate => write!(f, \"Predicate\"),\n\n Code => write!(f, \"Code\"),\n\n End => write!(f, \"End\"),\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn lexicon_ok() {\n\n use self::Tag::*;\n\n let lexicon = Lexicon::<Tag>::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[\n\n (Ident, \"[a-zA-Z]+[\\\\w_]*\"),\n\n (Btextl, r\"&\\{(.|[\\n\\r])*&\\}\"),\n\n (Pred, r\"\\?\\{(.|[\\n\\r])*\\?\\}\"),\n\n (Literal, \"(\\\"\\\\S+\\\")\"),\n\n (Action, r\"(!\\{(.|[\\n\\r])*?!\\})\"),\n\n (Predicate, r\"(\\?\\((.|[\\n\\r])*?\\?\\))\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 21, "score": 34216.208629124594 }, { "content": " }\n\n if self.regex_matcher.matches(&text[index..]) {\n\n return index;\n\n }\n\n if self.skip_matcher.matches(&text[index..]) {\n\n return index;\n\n }\n\n }\n\n text.len()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[derive(PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord, Debug)]\n\n enum Tag {\n\n If,\n\n When,\n", "file_path": "lexan/src/lexicon.rs", "rank": 22, "score": 34216.10302381746 }, { "content": "}\n\n\n\nimpl<T: Eq + Debug + Copy + Ord> LiteralMatcher<T> {\n\n pub fn new<'a>(lexemes: &[(T, &'a str)]) -> Result<LiteralMatcher<T>, LexanError<'a, T>> {\n\n let mut lexes = HashMap::<u8, LiteralMatcherNode<T>>::new();\n\n for &(tag, pattern) in lexemes.iter() {\n\n // make sure that tags are unique and strings are not empty\n\n if pattern.len() == 0 {\n\n return Err(LexanError::EmptyPattern(Some(tag)));\n\n }\n\n\n\n let key = pattern.as_bytes()[0];\n\n if lexes.contains_key(&key) {\n\n lexes.get_mut(&key).unwrap().add(tag, pattern, 1)?;\n\n } else {\n\n lexes.insert(key, LiteralMatcherNode::<T>::new(tag, pattern, 1));\n\n }\n\n }\n\n Ok(LiteralMatcher { lexemes: lexes })\n\n }\n", "file_path": "lexan/src/matcher.rs", "rank": 23, "score": 34216.07640062559 }, { "content": "impl<T: Copy + Ord + Debug> RegexMatcher<T> {\n\n pub fn new<'a>(lexeme_patterns: &[(T, &'a str)]) -> Result<RegexMatcher<T>, LexanError<'a, T>> {\n\n let mut lexemes = vec![];\n\n for (tag, pattern) in lexeme_patterns.iter() {\n\n if pattern.len() == 0 {\n\n return Err(LexanError::EmptyPattern(Some(*tag)));\n\n };\n\n let mut anchored_pattern = \"\\\\A\".to_string();\n\n anchored_pattern.push_str(pattern);\n\n lexemes.push((*tag, Regex::new(&anchored_pattern)?));\n\n }\n\n Ok(Self { lexemes })\n\n }\n\n\n\n /// Returns the longest regular expression matches at start of `text`.\n\n pub fn longest_matches(&self, text: &str) -> (Vec<T>, usize) {\n\n let mut matches = vec![];\n\n let mut largest_end = 0;\n\n for (tag, regex) in self.lexemes.iter() {\n\n if let Some(m) = regex.find(text) {\n", "file_path": "lexan/src/matcher.rs", "rank": 24, "score": 34215.98066203036 }, { "content": " Ident,\n\n Btextl,\n\n Pred,\n\n Literal,\n\n Action,\n\n Predicate,\n\n Code,\n\n End,\n\n }\n\n\n\n impl std::fmt::Display for Tag {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n use Tag::*;\n\n match self {\n\n If => write!(f, \"\\\"if\\\"\"),\n\n When => write!(f, \"\\\"when\\\"\"),\n\n Ident => write!(f, \"Ident\"),\n\n Btextl => write!(f, \"Btextl\"),\n\n Pred => write!(f, \"Pred\"),\n\n Literal => write!(f, \"Literal\"),\n", "file_path": "lexan/src/lexicon.rs", "rank": 25, "score": 34215.913500749426 }, { "content": " match self {\n\n If => write!(f, \"\\\"if\\\"\"),\n\n When => write!(f, \"\\\"when\\\"\"),\n\n Ident => write!(f, \"Ident\"),\n\n End => write!(f, \"End\"),\n\n }\n\n }\n\n }\n\n use Handle::*;\n\n let lexicon = Lexicon::new(\n\n &[(If, \"if\"), (When, \"when\")],\n\n &[(Ident, \"[a-zA-Z]+[\\\\w_]*\")],\n\n &[r\"(/\\*(.|[\\n\\r])*?\\*/)\", r\"(//[^\\n\\r]*)\", r\"(\\s+)\"],\n\n End,\n\n );\n\n let lexicon = Arc::new(lexicon.unwrap());\n\n let text = \" \".to_string();\n\n let label = \"label\".to_string();\n\n let mut token_stream = TokenStream::new(&lexicon, text, label);\n\n assert!(token_stream.is_empty());\n", "file_path": "lexan/src/analyzer.rs", "rank": 26, "score": 34215.85596386693 }, { "content": "\n\n impl std::fmt::Display for Handle {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n use Handle::*;\n\n match self {\n\n If => write!(f, \"\\\"if\\\"\"),\n\n When => write!(f, \"\\\"when\\\"\"),\n\n Ident => write!(f, \"Ident\"),\n\n Btextl => write!(f, \"Btextl\"),\n\n Pred => write!(f, \"Pred\"),\n\n Literal => write!(f, \"Literal\"),\n\n Action => write!(f, \"Action\"),\n\n Predicate => write!(f, \"Predicate\"),\n\n Code => write!(f, \"Code\"),\n\n End => write!(f, \"End\"),\n\n }\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "lexan/src/lib.rs", "rank": 27, "score": 34215.77431618445 }, { "content": " Ok(_) => return Err(LexanError::DuplicatePattern(regex)),\n\n Err(index) => patterns.insert(index, regex),\n\n }\n\n }\n\n let literal_matcher = LiteralMatcher::new(literal_lexemes)?;\n\n let regex_matcher = RegexMatcher::new(regex_lexemes)?;\n\n let skip_matcher = SkipMatcher::new(skip_regex_strs)?;\n\n Ok(Self {\n\n literal_matcher,\n\n regex_matcher,\n\n skip_matcher,\n\n end_marker,\n\n })\n\n }\n\n\n\n /// Returns the end marker for this Lexicon\n\n pub fn end_marker(&self) -> T {\n\n self.end_marker\n\n }\n\n\n", "file_path": "lexan/src/lexicon.rs", "rank": 28, "score": 34215.59341953605 }, { "content": " token_stream.incr_index_and_location(11);\n\n println!(\"{:?}\", token_stream.location);\n\n assert_eq!(token_stream.index, 11);\n\n assert_eq!(token_stream.location.line_number, 2);\n\n assert_eq!(token_stream.location.offset, 5);\n\n }\n\n\n\n #[test]\n\n fn token_stream_basics() {\n\n #[derive(PartialEq, Eq, Clone, Copy, Hash, Debug, PartialOrd, Ord)]\n\n enum Handle {\n\n If,\n\n When,\n\n Ident,\n\n End,\n\n }\n\n\n\n impl std::fmt::Display for Handle {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n use Handle::*;\n", "file_path": "lexan/src/analyzer.rs", "rank": 29, "score": 34215.39603698842 }, { "content": " Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"so\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":4:13\");\n\n }\n\n _ => assert!(false),\n\n };\n\n\n\n let mut second_token_stream = lexan.token_stream(\n\n \"if iffy\\n \\\"quoted\\\" \\\"if\\\" \\n9 $ \\tname &{ one \\n two &} and so ?{on?}\".to_string(),\n\n \"raw text\".to_string(),\n\n );\n\n\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), If);\n\n assert_eq!(token.lexeme(), \"if\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":1:1\");\n\n }\n\n _ => assert!(false),\n", "file_path": "lexan/src/lib.rs", "rank": 30, "score": 34215.37085771342 }, { "content": " assert_eq!(*token.tag(), Btextl);\n\n assert_eq!(token.lexeme(), \"&{ one \\n two &}\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":3:11\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"and\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":4:9\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"so\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":4:13\");\n\n }\n", "file_path": "lexan/src/lib.rs", "rank": 31, "score": 34213.334433734984 }, { "content": " pub fn is_advance_when_empty(&self) -> bool {\n\n match self {\n\n Error::AdvancedWhenEmpty(_) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Debug + Display + Copy> fmt::Display for Error<T> {\n\n fn fmt(&self, dest: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::UnexpectedText(text, location) => {\n\n write!(dest, \"Unexpected text \\\"{}\\\" at: {}.\", text, location)\n\n }\n\n Error::AmbiguousMatches(tags, text, location) => write!(\n\n dest,\n\n \"Ambiguous matches {:#?} \\\"{}\\\" at: {}.\",\n\n tags, text, location\n\n ),\n\n Error::AdvancedWhenEmpty(location) => {\n", "file_path": "lexan/src/analyzer.rs", "rank": 32, "score": 34213.334433734984 }, { "content": " self.front = token_stream.front().unwrap();\n\n self.token_stream_stack.push(token_stream);\n\n }\n\n }\n\n\n\n pub fn advance(&mut self) {\n\n let mut i = self.token_stream_stack.len();\n\n if i > 0 {\n\n self.token_stream_stack[i - 1].advance();\n\n let mut popped = None;\n\n while i > 0 && self.token_stream_stack[i - 1].is_empty() {\n\n popped = self.token_stream_stack.pop();\n\n i -= 1;\n\n }\n\n self.front = if i > 0 {\n\n self.token_stream_stack[i - 1].front().unwrap()\n\n } else {\n\n let end_location = popped.unwrap().location();\n\n Ok(Token {\n\n tag: self.lexicon.end_marker(),\n", "file_path": "lexan/src/analyzer.rs", "rank": 33, "score": 34213.334433734984 }, { "content": " match second_token_stream.front_advance() {\n\n Err(err) => match err {\n\n Error::UnexpectedText(text, location) => {\n\n assert_eq!(text, \"$\");\n\n assert_eq!(format!(\"{}\", location), \"\\\"raw text\\\":3:3\");\n\n }\n\n _ => assert!(false),\n\n },\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"name\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":3:6\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n", "file_path": "lexan/src/lib.rs", "rank": 34, "score": 34213.334433734984 }, { "content": " }\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Btextl);\n\n assert_eq!(token.lexeme(), \"&{ one \\n two &}\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":3:11\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"and\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":4:9\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n", "file_path": "lexan/src/lib.rs", "rank": 35, "score": 34213.334433734984 }, { "content": " } else {\n\n let key = string.as_bytes()[s_index];\n\n // Couldn't do this with match because of ownership issues with \"tails\"\n\n if self.tails.contains_key(&key) {\n\n self.tails\n\n .get_mut(&key)\n\n .unwrap()\n\n .add(tag, string, s_index + 1)?;\n\n } else {\n\n self.tails\n\n .insert(key, LiteralMatcherNode::<T>::new(tag, string, s_index + 1));\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub(crate) struct LiteralMatcher<T: PartialEq + Debug + Copy> {\n\n lexemes: HashMap<u8, LiteralMatcherNode<T>>,\n", "file_path": "lexan/src/matcher.rs", "rank": 36, "score": 34213.334433734984 }, { "content": " current_location,\n\n )))\n\n }\n\n }\n\n}\n\n\n\npub struct TokenStream<T>\n\nwhere\n\n T: Debug + Display + Copy + Eq + Ord,\n\n{\n\n lexicon: Arc<Lexicon<T>>,\n\n token_stream_stack: Vec<BasicTokenStream<T>>,\n\n front: Result<Token<T>, Error<T>>,\n\n}\n\n\n\nimpl<'a, T> TokenStream<T>\n\nwhere\n\n T: Debug + Display + Copy + Eq + Ord,\n\n{\n\n pub fn new(lexicon: &Arc<Lexicon<T>>, text: String, label: String) -> Self {\n", "file_path": "lexan/src/analyzer.rs", "rank": 37, "score": 34213.334433734984 }, { "content": " };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"iffy\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":1:4\");\n\n }\n\n _ => assert!(false),\n\n };\n\n\n\n match token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Pred);\n\n assert_eq!(token.lexeme(), \"?{on?}\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":4:16\");\n\n }\n\n _ => assert!(false),\n\n };\n\n\n\n match token_stream.front_advance() {\n", "file_path": "lexan/src/lib.rs", "rank": 38, "score": 34213.334433734984 }, { "content": " }\n\n _ => assert!(false),\n\n },\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n\n Err(err) => match err {\n\n Error::UnexpectedText(text, location) => {\n\n assert_eq!(text, \"$\");\n\n assert_eq!(format!(\"{}\", location), \"\\\"raw text\\\":3:3\");\n\n }\n\n _ => assert!(false),\n\n },\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"name\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":3:6\");\n", "file_path": "lexan/src/lib.rs", "rank": 39, "score": 34213.334433734984 }, { "content": " self.incr_index_and_location(lrems.1);\n\n Some(Ok(Token {\n\n tag: lrems.0[0],\n\n lexeme: (&self.text[start..self.index]).to_string(),\n\n location: current_location,\n\n }))\n\n } else if lrems.0.len() > 1 {\n\n self.incr_index_and_location(lrems.1);\n\n Some(Err(Error::AmbiguousMatches(\n\n lrems.0,\n\n (&self.text[start..self.index]).to_string(),\n\n current_location,\n\n )))\n\n } else {\n\n let distance = self\n\n .lexicon\n\n .distance_to_next_valid_byte(&self.text[self.index..]);\n\n self.incr_index_and_location(distance);\n\n Some(Err(Error::UnexpectedText(\n\n (&self.text[start..self.index]).to_string(),\n", "file_path": "lexan/src/analyzer.rs", "rank": 40, "score": 34213.334433734984 }, { "content": " let mut stream = Self {\n\n lexicon: Arc::clone(lexicon),\n\n token_stream_stack: vec![],\n\n front: Err(Error::AdvancedWhenEmpty(Location::default())),\n\n };\n\n stream.inject(text, label);\n\n stream\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.token_stream_stack.len() == 0\n\n }\n\n\n\n pub fn front(&self) -> Result<Token<T>, Error<T>> {\n\n self.front.clone()\n\n }\n\n\n\n pub fn inject(&mut self, text: String, label: String) {\n\n let token_stream = BasicTokenStream::new(&self.lexicon, text, label);\n\n if !token_stream.is_empty() {\n", "file_path": "lexan/src/analyzer.rs", "rank": 41, "score": 34213.334433734984 }, { "content": "\n\n pub fn longest_match(&self, string: &str) -> Option<(T, usize)> {\n\n let mut rval: Option<(T, usize)> = None;\n\n let mut lexemes = &self.lexemes;\n\n for key in string.as_bytes().iter() {\n\n match lexemes.get(&key) {\n\n None => break,\n\n Some(node) => {\n\n if let Some(tag) = node.tag {\n\n rval = Some((tag, node.length));\n\n }\n\n lexemes = &node.tails;\n\n }\n\n }\n\n }\n\n rval\n\n }\n\n\n\n pub fn matches(&self, string: &str) -> bool {\n\n let mut lexemes = &self.lexemes;\n", "file_path": "lexan/src/matcher.rs", "rank": 42, "score": 34213.334433734984 }, { "content": " Ok(token) => {\n\n assert_eq!(*token.tag(), Literal);\n\n assert_eq!(token.lexeme(), \"\\\"quoted\\\"\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":2:2\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Literal);\n\n assert_eq!(token.lexeme(), \"\\\"if\\\"\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":2:11\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n\n Err(err) => match err {\n\n Error::UnexpectedText(text, location) => {\n\n assert_eq!(text, \"9\");\n\n assert_eq!(format!(\"{}\", location), \"\\\"raw text\\\":3:1\");\n", "file_path": "lexan/src/lib.rs", "rank": 43, "score": 34213.334433734984 }, { "content": " Self {\n\n line_number: 1,\n\n offset: 1,\n\n label: label,\n\n }\n\n }\n\n\n\n pub fn line_number(&self) -> usize {\n\n self.line_number\n\n }\n\n\n\n pub fn offset(&self) -> usize {\n\n self.offset\n\n }\n\n\n\n pub fn label<'a>(&'a self) -> &'a String {\n\n &self.label\n\n }\n\n}\n\n\n", "file_path": "lexan/src/analyzer.rs", "rank": 44, "score": 34213.334433734984 }, { "content": " token_stream.advance();\n\n let token = Token {\n\n tag: Ident,\n\n lexeme: \"nothing\".to_string(),\n\n location: Location {\n\n line_number: 1,\n\n offset: 5,\n\n label: \"another\".to_string(),\n\n },\n\n };\n\n assert_eq!((token_stream.front().clone()).unwrap(), token.clone());\n\n token_stream.advance();\n\n assert!(token_stream.front().clone().is_ok());\n\n token_stream.advance();\n\n assert!(token_stream.front().clone().is_err());\n\n token_stream.advance();\n\n let token = Token {\n\n tag: End,\n\n lexeme: \"\".to_string(),\n\n location: Location {\n", "file_path": "lexan/src/analyzer.rs", "rank": 45, "score": 34213.334433734984 }, { "content": " location,\n\n index: 0,\n\n front: None,\n\n };\n\n bts.advance();\n\n bts\n\n }\n\n\n\n fn front(&self) -> Option<Result<Token<T>, Error<T>>> {\n\n self.front.clone()\n\n }\n\n\n\n fn is_empty(&self) -> bool {\n\n self.front.is_none()\n\n }\n\n\n\n fn advance(&mut self) {\n\n self.front = self.next();\n\n }\n\n\n", "file_path": "lexan/src/analyzer.rs", "rank": 46, "score": 34213.334433734984 }, { "content": " UnexpectedText(String, Location),\n\n AmbiguousMatches(Vec<T>, String, Location),\n\n AdvancedWhenEmpty(Location),\n\n}\n\n\n\nimpl<T: Display + Copy> Error<T> {\n\n pub fn is_unexpected_text(&self) -> bool {\n\n match self {\n\n Error::UnexpectedText(_, _) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_ambiguous_match(&self) -> bool {\n\n match self {\n\n Error::AmbiguousMatches(_, _, _) => true,\n\n _ => false,\n\n }\n\n }\n\n\n", "file_path": "lexan/src/analyzer.rs", "rank": 47, "score": 34213.334433734984 }, { "content": " line_number: 1,\n\n offset: 23,\n\n label: \"another\".to_string(),\n\n },\n\n };\n\n assert_eq!(token_stream.front().clone().unwrap(), token);\n\n assert!(token_stream.advance_front().is_err());\n\n }\n\n}\n", "file_path": "lexan/src/analyzer.rs", "rank": 48, "score": 34213.334433734984 }, { "content": " _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Pred);\n\n assert_eq!(token.lexeme(), \"?{on?}\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":4:16\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), End);\n\n assert_eq!(token.lexeme(), \"\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":4:22\");\n\n }\n\n _ => assert!(false),\n\n };\n\n assert!(second_token_stream.advance_front().is_err());\n\n }\n\n}\n", "file_path": "lexan/src/lib.rs", "rank": 49, "score": 34213.334433734984 }, { "content": " lexeme: String::new(),\n\n location: end_location,\n\n })\n\n }\n\n } else {\n\n let location = match &self.front {\n\n Ok(token) => token.location(),\n\n Err(err) => match err {\n\n Error::UnexpectedText(_, location) => location,\n\n Error::AmbiguousMatches(_, _, location) => location,\n\n Error::AdvancedWhenEmpty(location) => location,\n\n },\n\n };\n\n self.front = Err(Error::AdvancedWhenEmpty(location.clone()))\n\n }\n\n }\n\n\n\n pub fn front_advance(&mut self) -> Result<Token<T>, Error<T>> {\n\n let front = self.front.clone();\n\n self.advance();\n", "file_path": "lexan/src/analyzer.rs", "rank": 50, "score": 34213.334433734984 }, { "content": " }\n\n }\n\n false\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn literal_matcher() {\n\n let lm = super::LiteralMatcher::new(&[\n\n (0, \"test\"),\n\n (1, \"whatever\"),\n\n (2, \"anything\"),\n\n (3, \"anything at all\"),\n\n ])\n\n .unwrap();\n\n assert!(lm.longest_match(\"something\").is_none());\n\n assert_eq!(lm.longest_match(\"anything at all something\"), Some((3, 15)));\n\n assert_eq!(\n\n lm.longest_match(&\"anything at all whatever something\"[16..]),\n\n Some((1, 8))\n\n );\n\n }\n\n}\n", "file_path": "lexan/src/matcher.rs", "rank": 51, "score": 34213.334433734984 }, { "content": "\n\n /// Returns number of skippable bytes at start of `text`.\n\n pub fn skippable_count(&self, text: &str) -> usize {\n\n let mut index = 0;\n\n 'outer: while index < text.len() {\n\n for regex in self.regexes.iter() {\n\n if let Some(m) = regex.find(&text[index..]) {\n\n index += m.end();\n\n continue 'outer;\n\n }\n\n }\n\n break;\n\n }\n\n index\n\n }\n\n\n\n pub fn matches(&self, text: &str) -> bool {\n\n for regex in self.regexes.iter() {\n\n if regex.find(text).is_some() {\n\n return true;\n", "file_path": "lexan/src/matcher.rs", "rank": 52, "score": 34213.334433734984 }, { "content": " write!(dest, \"Advanced past end of text at: {}.\", location,)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Debug + Display + Copy> std::error::Error for Error<T> {}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Token<T: Display + Copy + Eq> {\n\n tag: T,\n\n lexeme: String,\n\n location: Location,\n\n}\n\n\n\nimpl<T: Display + Copy + Eq> Token<T> {\n\n pub fn tag<'a>(&'a self) -> &'a T {\n\n &self.tag\n\n }\n\n\n\n pub fn lexeme<'a>(&'a self) -> &'a String {\n\n &self.lexeme\n\n }\n\n\n\n pub fn location<'a>(&'a self) -> &'a Location {\n\n &self.location\n\n }\n\n}\n\n\n", "file_path": "lexan/src/analyzer.rs", "rank": 53, "score": 34213.334433734984 }, { "content": " for key in string.as_bytes().iter() {\n\n match lexemes.get(&key) {\n\n None => break,\n\n Some(node) => {\n\n if node.tag.is_some() {\n\n return true;\n\n }\n\n lexemes = &node.tails;\n\n }\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub(crate) struct RegexMatcher<T: Copy + Debug> {\n\n lexemes: Vec<(T, Regex)>,\n\n}\n\n\n", "file_path": "lexan/src/matcher.rs", "rank": 54, "score": 34213.334433734984 }, { "content": " assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":2:11\");\n\n }\n\n _ => assert!(false),\n\n };\n\n second_token_stream.inject(\n\n \"if one \\\"name\\\"\".to_string(),\n\n \"\\\"injected text\\\"\".to_string(),\n\n );\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), If);\n\n assert_eq!(token.lexeme(), \"if\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"\\\"injected text\\\"\\\":1:1\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"one\");\n", "file_path": "lexan/src/lib.rs", "rank": 55, "score": 34213.334433734984 }, { "content": " assert!(token_stream.front().is_err());\n\n let text = \" if nothing happens 9 \".to_string();\n\n let label = \"another\".to_string();\n\n token_stream.inject(text, label);\n\n assert!(!token_stream.is_empty());\n\n let token = Token {\n\n tag: If,\n\n lexeme: \"if\".to_string(),\n\n location: Location {\n\n line_number: 1,\n\n offset: 2,\n\n label: \"another\".to_string(),\n\n },\n\n };\n\n assert_eq!((token_stream.front().clone()).unwrap(), token.clone());\n\n assert_eq!((token_stream.front().clone()).unwrap(), token.clone());\n\n token_stream.advance();\n\n let token = Token {\n\n tag: Ident,\n\n lexeme: \"nothing\".to_string(),\n", "file_path": "lexan/src/analyzer.rs", "rank": 56, "score": 34213.334433734984 }, { "content": " };\n\n }\n\n self.index = next_index;\n\n }\n\n\n\n fn next(&mut self) -> Option<Result<Token<T>, Error<T>>> {\n\n self.incr_index_and_location(self.lexicon.skippable_count(&self.text[self.index..]));\n\n if self.index >= self.text.len() {\n\n return None;\n\n }\n\n\n\n let current_location = self.location();\n\n let start = self.index;\n\n let o_llm = self.lexicon.longest_literal_match(&self.text[self.index..]);\n\n let lrems = self.lexicon.longest_regex_matches(&self.text[self.index..]);\n\n\n\n if let Some(llm) = o_llm {\n\n if lrems.0.len() > 1 && lrems.1 > llm.1 {\n\n self.incr_index_and_location(lrems.1);\n\n Some(Err(Error::AmbiguousMatches(\n", "file_path": "lexan/src/analyzer.rs", "rank": 57, "score": 34213.334433734984 }, { "content": "impl<T> LexicalAnalyzer<T>\n\nwhere\n\n T: Ord + Copy + PartialEq + Debug + Display,\n\n{\n\n pub fn new<'a>(\n\n literal_lexemes: &[(T, &'a str)],\n\n regex_lexemes: &[(T, &'a str)],\n\n skip_regex_strs: &[&'a str],\n\n end_marker: T,\n\n ) -> Self {\n\n let lexicon =\n\n match Lexicon::new(literal_lexemes, regex_lexemes, skip_regex_strs, end_marker) {\n\n Ok(lexicon) => Arc::new(lexicon),\n\n Err(err) => panic!(\"Fatal Error: {:?}\", err),\n\n };\n\n Self { lexicon }\n\n }\n\n\n\n pub fn token_stream(&self, text: String, label: String) -> TokenStream<T> {\n\n TokenStream::new(&self.lexicon, text, label)\n", "file_path": "lexan/src/lib.rs", "rank": 58, "score": 34213.334433734984 }, { "content": " Ok(token) => {\n\n assert_eq!(*token.tag(), End);\n\n assert_eq!(token.lexeme(), \"\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":4:22\");\n\n }\n\n _ => assert!(false),\n\n };\n\n\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Literal);\n\n assert_eq!(token.lexeme(), \"\\\"quoted\\\"\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":2:2\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Literal);\n\n assert_eq!(token.lexeme(), \"\\\"if\\\"\");\n", "file_path": "lexan/src/lib.rs", "rank": 59, "score": 34213.334433734984 }, { "content": " location: Location {\n\n line_number: 1,\n\n offset: 5,\n\n label: \"another\".to_string(),\n\n },\n\n };\n\n assert_eq!((token_stream.front().clone()).unwrap(), token.clone());\n\n let text = \"just\".to_string();\n\n let label = \"more\".to_string();\n\n token_stream.inject(text, label);\n\n let token = Token {\n\n tag: Ident,\n\n lexeme: \"just\".to_string(),\n\n location: Location {\n\n line_number: 1,\n\n offset: 1,\n\n label: \"more\".to_string(),\n\n },\n\n };\n\n assert_eq!((token_stream.front().clone()).unwrap(), token.clone());\n", "file_path": "lexan/src/analyzer.rs", "rank": 60, "score": 34213.334433734984 }, { "content": " fn location(&self) -> Location {\n\n self.location.clone()\n\n }\n\n\n\n fn incr_index_and_location(&mut self, length: usize) {\n\n let next_index = self.index + length;\n\n let slice = &self.text[self.index..next_index];\n\n let mut i = 0;\n\n while i < length {\n\n if let Some(eol_i) = slice[i..].find(\"\\r\\n\") {\n\n self.location.line_number += 1;\n\n self.location.offset = 1;\n\n i += eol_i + 2;\n\n } else if let Some(eol_i) = slice[i..].find(\"\\n\") {\n\n self.location.line_number += 1;\n\n self.location.offset = 1;\n\n i += eol_i + 1;\n\n } else {\n\n self.location.offset += length - i;\n\n i = length;\n", "file_path": "lexan/src/analyzer.rs", "rank": 61, "score": 34213.334433734984 }, { "content": " \"raw text\".to_string(),\n\n );\n\n\n\n match token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), If);\n\n assert_eq!(token.lexeme(), \"if\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":1:1\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"iffy\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"raw text\\\":1:4\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match token_stream.front_advance() {\n", "file_path": "lexan/src/lib.rs", "rank": 62, "score": 34213.334433734984 }, { "content": " assert_eq!(format!(\"{}\", token.location()), \"\\\"\\\"injected text\\\"\\\":1:4\");\n\n }\n\n _ => assert!(false),\n\n };\n\n second_token_stream.inject(\" two\".to_string(), \"another text\".to_string());\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"two\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"another text\\\":1:3\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Literal);\n\n assert_eq!(token.lexeme(), \"\\\"name\\\"\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"\\\"injected text\\\"\\\":1:8\");\n\n }\n\n _ => assert!(false),\n", "file_path": "lexan/src/lib.rs", "rank": 63, "score": 34213.334433734984 }, { "content": " };\n\n second_token_stream.inject(\" three\".to_string(), \"yet another text\".to_string());\n\n match second_token_stream.front_advance() {\n\n Ok(token) => {\n\n assert_eq!(*token.tag(), Ident);\n\n assert_eq!(token.lexeme(), \"three\");\n\n assert_eq!(format!(\"{}\", token.location()), \"\\\"yet another text\\\":1:4\");\n\n }\n\n _ => assert!(false),\n\n };\n\n match second_token_stream.front_advance() {\n\n Err(err) => match err {\n\n Error::UnexpectedText(text, location) => {\n\n assert_eq!(text, \"9\");\n\n assert_eq!(format!(\"{}\", location), \"\\\"raw text\\\":3:1\");\n\n }\n\n _ => assert!(false),\n\n },\n\n _ => assert!(false),\n\n };\n", "file_path": "lexan/src/lib.rs", "rank": 64, "score": 34213.334433734984 }, { "content": " lrems.0,\n\n (&self.text[start..self.index]).to_string(),\n\n current_location,\n\n )))\n\n } else if lrems.0.len() == 1 && lrems.1 > llm.1 {\n\n self.incr_index_and_location(lrems.1);\n\n Some(Ok(Token {\n\n tag: lrems.0[0],\n\n lexeme: (&self.text[start..self.index]).to_string(),\n\n location: current_location,\n\n }))\n\n } else {\n\n self.incr_index_and_location(llm.1);\n\n Some(Ok(Token {\n\n tag: llm.0,\n\n lexeme: (&self.text[start..self.index]).to_string(),\n\n location: current_location,\n\n }))\n\n }\n\n } else if lrems.0.len() == 1 {\n", "file_path": "lexan/src/analyzer.rs", "rank": 65, "score": 34213.334433734984 }, { "content": " };\n\n assert_eq!(format!(\"{}\", location), \"whatever:10:15\");\n\n let location = Location {\n\n line_number: 9,\n\n offset: 23,\n\n label: \"\".to_string(),\n\n };\n\n assert_eq!(format!(\"{}\", location), \"9:23\");\n\n }\n\n\n\n #[test]\n\n fn incr_index_and_location() {\n\n let lexicon = Arc::new(Lexicon::<u32>::new(&[], &[], &[], 0).unwrap());\n\n let mut token_stream = BasicTokenStream {\n\n lexicon: lexicon,\n\n text: \"String\\nwith a new line in it\".to_string(),\n\n location: Location::new(\"whatever\".to_string()),\n\n index: 0,\n\n front: None,\n\n };\n", "file_path": "lexan/src/analyzer.rs", "rank": 66, "score": 34213.334433734984 }, { "content": " if m.end() == largest_end {\n\n matches.push(*tag);\n\n } else if m.end() > largest_end {\n\n largest_end = m.end();\n\n matches = vec![*tag];\n\n }\n\n }\n\n }\n\n (matches, largest_end)\n\n }\n\n\n\n /// Returns `true` if we match the start of the text\n\n pub fn matches(&self, text: &str) -> bool {\n\n for (_, regex) in self.lexemes.iter() {\n\n if regex.find(text).is_some() {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n", "file_path": "lexan/src/matcher.rs", "rank": 67, "score": 34213.334433734984 }, { "content": "impl fmt::Display for Location {\n\n fn fmt(&self, dest: &mut fmt::Formatter) -> fmt::Result {\n\n if self.label.len() > 0 {\n\n if self.label.contains(' ') || self.label.contains('\\t') {\n\n write!(\n\n dest,\n\n \"\\\"{}\\\":{}:{}\",\n\n self.label, self.line_number, self.offset\n\n )\n\n } else {\n\n write!(dest, \"{}:{}:{}\", self.label, self.line_number, self.offset)\n\n }\n\n } else {\n\n write!(dest, \"{}:{}\", self.line_number, self.offset)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum Error<T: Display + Copy> {\n", "file_path": "lexan/src/analyzer.rs", "rank": 68, "score": 34213.334433734984 }, { "content": " /// Returns number of skippable bytes at start of `text`.\n\n pub fn skippable_count(&self, text: &str) -> usize {\n\n self.skip_matcher.skippable_count(text)\n\n }\n\n\n\n /// Returns the longest literal match at start of `text`.\n\n pub fn longest_literal_match(&self, text: &str) -> Option<(T, usize)> {\n\n self.literal_matcher.longest_match(text)\n\n }\n\n\n\n /// Returns the longest regular expression match at start of `text`.\n\n pub fn longest_regex_matches(&self, text: &str) -> (Vec<T>, usize) {\n\n self.regex_matcher.longest_matches(text)\n\n }\n\n\n\n /// Returns the distance in bytes to the next valid content in `text`\n\n pub fn distance_to_next_valid_byte(&self, text: &str) -> usize {\n\n for index in 0..text.len() {\n\n if self.literal_matcher.matches(&text[index..]) {\n\n return index;\n", "file_path": "lexan/src/lexicon.rs", "rank": 69, "score": 34213.334433734984 }, { "content": "pub fn report_warning(location: &lexan::Location, what: &str) {\n\n writeln!(stderr(), \"{}: Warning: {}.\", location, what).expect(\"what?\");\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct Specification {\n\n pub symbol_table: SymbolTable,\n\n productions: Vec<Production>,\n\n preamble: String,\n\n pub attribute_type: String,\n\n pub target_type: String,\n\n pub error_count: u32,\n\n pub warning_count: u32,\n\n pub expected_rr_conflicts: u32,\n\n pub expected_sr_conflicts: u32,\n\n}\n\n\n\nimpl lalr1_plus::ReportError<AATerminal> for Specification {}\n\n\n\nimpl Specification {\n", "file_path": "alap_gen/src/grammar.rs", "rank": 70, "score": 33453.918870728994 }, { "content": "pub fn report_error(location: &lexan::Location, what: &str) {\n\n writeln!(stderr(), \"{}: Error: {}.\", location, what).expect(\"what?\");\n\n}\n\n\n", "file_path": "alap_gen/src/grammar.rs", "rank": 71, "score": 33453.918870728994 }, { "content": " Symbol::Terminal(token.clone())\n\n } else if let Some(non_terminal) = self.non_terminals.get(name) {\n\n non_terminal.add_used_at(used_at);\n\n Symbol::NonTerminal(non_terminal.clone())\n\n } else {\n\n let non_terminal = NonTerminal::new_used(name, used_at);\n\n self.non_terminals\n\n .insert(name.to_string(), non_terminal.clone());\n\n Symbol::NonTerminal(non_terminal)\n\n }\n\n }\n\n\n\n pub fn error_symbol_used_at(&self, used_at: &lexan::Location) -> Symbol {\n\n self.error_non_terminal.add_used_at(used_at);\n\n Symbol::from(&self.error_non_terminal)\n\n }\n\n\n\n pub fn start_non_terminal_used_at(&self, used_at: &lexan::Location) -> NonTerminal {\n\n self.start_non_terminal.add_used_at(used_at);\n\n self.start_non_terminal.clone()\n", "file_path": "alap_gen/src/symbol.rs", "rank": 72, "score": 32848.532614340125 }, { "content": " .insert(name.to_string(), non_terminal.clone());\n\n Ok(non_terminal)\n\n }\n\n }\n\n\n\n pub fn non_terminals(&self) -> impl Iterator<Item = &NonTerminal> {\n\n self.non_terminals.values()\n\n }\n\n\n\n pub fn undefined_non_terminals(&self) -> impl Iterator<Item = &NonTerminal> {\n\n self.non_terminals.values().filter(|n| n.is_undefined())\n\n }\n\n\n\n pub fn unused_non_terminals(&self) -> impl Iterator<Item = &NonTerminal> {\n\n self.non_terminals.values().filter(|n| n.is_unused())\n\n }\n\n\n\n pub fn symbol_used_at(&mut self, name: &str, used_at: &lexan::Location) -> Symbol {\n\n if let Some(token) = self.tokens.get(name) {\n\n token.add_used_at(used_at);\n", "file_path": "alap_gen/src/symbol.rs", "rank": 73, "score": 32845.51641017153 }, { "content": "// Copyright 2021 Peter Williams <[email protected]> <[email protected]>\n\nuse std::{collections::BTreeMap, fmt};\n\n\n\nuse crate::symbol::non_terminal::NonTerminal;\n\nuse crate::symbol::tag::{Tag, TagOrToken};\n\nuse crate::symbol::terminal::Token;\n\n\n\npub mod non_terminal;\n\npub mod tag;\n\npub mod terminal;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum Associativity {\n\n NonAssoc,\n\n Left,\n\n Right,\n\n}\n\n\n\nimpl Default for Associativity {\n\n fn default() -> Self {\n", "file_path": "alap_gen/src/symbol.rs", "rank": 74, "score": 32844.20226885055 }, { "content": " string += \" Non Terminal Symbols:\\n\";\n\n for non_terminal in self\n\n .used_non_terminal_specials()\n\n .iter()\n\n .chain(self.non_terminals())\n\n {\n\n string += &format!(\n\n \" {}: {}\\n\",\n\n non_terminal.name(),\n\n non_terminal.firsts_data()\n\n );\n\n }\n\n string\n\n }\n\n}\n", "file_path": "alap_gen/src/symbol.rs", "rank": 75, "score": 32842.82016966141 }, { "content": " Associativity::NonAssoc\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Associativity {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n use Associativity::*;\n\n match self {\n\n NonAssoc => write!(f, \"NonAssoc\"),\n\n Left => write!(f, \"Left\"),\n\n Right => write!(f, \"Right\"),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum Symbol {\n\n Terminal(Token),\n\n NonTerminal(NonTerminal),\n\n}\n", "file_path": "alap_gen/src/symbol.rs", "rank": 76, "score": 32841.3737421238 }, { "content": "\n\nimpl From<&Token> for Symbol {\n\n fn from(token: &Token) -> Self {\n\n Symbol::Terminal(token.clone())\n\n }\n\n}\n\n\n\nimpl From<&NonTerminal> for Symbol {\n\n fn from(non_terminal: &NonTerminal) -> Self {\n\n Symbol::NonTerminal(non_terminal.clone())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Symbol {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Symbol::NonTerminal(non_terminal) => write!(f, \"{}\", non_terminal.name()),\n\n Symbol::Terminal(token) => match token {\n\n Token::Literal(token_data) => write!(f, \"{}\", token_data.text),\n\n _ => write!(f, \"{}\", token.name()),\n", "file_path": "alap_gen/src/symbol.rs", "rank": 77, "score": 32841.30698782455 }, { "content": " &self.error_non_terminal\n\n }\n\n\n\n pub fn used_non_terminal_specials(&self) -> Vec<NonTerminal> {\n\n if self.error_non_terminal.is_unused() {\n\n vec![self.start_non_terminal.clone()]\n\n } else {\n\n vec![\n\n self.start_non_terminal.clone(),\n\n self.error_non_terminal.clone(),\n\n ]\n\n }\n\n }\n\n\n\n pub fn new_tag(&mut self, name: &str, defined_at: &lexan::Location) -> Result<Tag, Error> {\n\n let tag = Tag::new(name, defined_at);\n\n if let Some(other) = self.tags.insert(name.to_string(), tag.clone()) {\n\n Err(Error::DuplicateTag(other))\n\n } else {\n\n Ok(tag)\n", "file_path": "alap_gen/src/symbol.rs", "rank": 78, "score": 32840.95407990117 }, { "content": " },\n\n }\n\n }\n\n}\n\n\n\nimpl Symbol {\n\n pub fn is_non_terminal(&self) -> bool {\n\n match self {\n\n Symbol::NonTerminal(_) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n DuplicateTag(Tag),\n\n DuplicateToken(Token),\n\n DuplicateTokenDefinition(Token),\n\n ConflictsWithToken(Token),\n", "file_path": "alap_gen/src/symbol.rs", "rank": 79, "score": 32840.535157058686 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct SymbolTable {\n\n tags: BTreeMap<String, Tag>,\n\n tokens: BTreeMap<String, Token>,\n\n literal_tokens: BTreeMap<String, Token>,\n\n regex_tokens: BTreeMap<String, Token>,\n\n non_terminals: BTreeMap<String, NonTerminal>,\n\n skip_rules: Vec<String>,\n\n last_precedence: u16,\n\n start_non_terminal: NonTerminal,\n\n pub error_non_terminal: NonTerminal,\n\n}\n\n\n\nimpl Default for SymbolTable {\n\n fn default() -> Self {\n", "file_path": "alap_gen/src/symbol.rs", "rank": 80, "score": 32839.680185934034 }, { "content": " let precedence = self.last_precedence + 1;\n\n self.last_precedence = precedence;\n\n for tag_or_token in tag_or_token_list.iter() {\n\n match tag_or_token {\n\n TagOrToken::Tag(tag) => {\n\n tag.set_associativity(associativity);\n\n tag.set_precedence(precedence);\n\n }\n\n TagOrToken::Token(token) => {\n\n token.set_associativity(associativity);\n\n token.set_precedence(precedence);\n\n }\n\n TagOrToken::Invalid => (),\n\n }\n\n }\n\n }\n\n\n\n pub fn description(&self) -> String {\n\n let mut string = \"Symbols:\\n\".to_string();\n\n string += \" Tokens:\\n\";\n", "file_path": "alap_gen/src/symbol.rs", "rank": 81, "score": 32838.943719835625 }, { "content": " }\n\n }\n\n\n\n pub fn get_tag(&self, name: &str) -> Option<&Tag> {\n\n self.tags.get(name)\n\n }\n\n\n\n pub fn unused_tags(&self) -> impl Iterator<Item = &Tag> {\n\n self.tags.values().filter(|t| t.is_unused())\n\n }\n\n\n\n pub fn new_literal_token(\n\n &mut self,\n\n name: &str,\n\n text: &str,\n\n defined_at: &lexan::Location,\n\n ) -> Result<Token, Error> {\n\n let token = Token::new_literal_token(name, text, defined_at);\n\n if let Some(other) = self.tokens.insert(name.to_string(), token.clone()) {\n\n Err(Error::DuplicateToken(other))\n", "file_path": "alap_gen/src/symbol.rs", "rank": 82, "score": 32838.93568368823 }, { "content": " } else if let Some(other) = self.literal_tokens.insert(text.to_string(), token.clone()) {\n\n Err(Error::DuplicateTokenDefinition(other))\n\n } else {\n\n Ok(token)\n\n }\n\n }\n\n\n\n pub fn new_regex_token(\n\n &mut self,\n\n name: &str,\n\n text: &str,\n\n defined_at: &lexan::Location,\n\n ) -> Result<Token, Error> {\n\n let token = Token::new_regex_token(name, text, defined_at);\n\n if let Some(other) = self.tokens.insert(name.to_string(), token.clone()) {\n\n Err(Error::DuplicateToken(other))\n\n } else if let Some(other) = self.regex_tokens.insert(text.to_string(), token.clone()) {\n\n Err(Error::DuplicateTokenDefinition(other))\n\n } else {\n\n Ok(token)\n", "file_path": "alap_gen/src/symbol.rs", "rank": 83, "score": 32838.796909807905 }, { "content": " self.literal_tokens.values()\n\n }\n\n\n\n pub fn regex_tokens(&self) -> impl Iterator<Item = &Token> {\n\n self.regex_tokens.values()\n\n }\n\n\n\n pub fn non_terminal_defined_at(\n\n &mut self,\n\n name: &str,\n\n defined_at: &lexan::Location,\n\n ) -> Result<NonTerminal, Error> {\n\n if let Some(non_terminal) = self.non_terminals.get(name) {\n\n non_terminal.add_defined_at(defined_at);\n\n Ok(non_terminal.clone())\n\n } else if let Some(token) = self.tokens.get(name) {\n\n Err(Error::ConflictsWithToken(token.clone()))\n\n } else {\n\n let non_terminal = NonTerminal::new_defined(name, defined_at);\n\n self.non_terminals\n", "file_path": "alap_gen/src/symbol.rs", "rank": 84, "score": 32838.77850547742 }, { "content": " Self {\n\n tags: BTreeMap::new(),\n\n tokens: BTreeMap::new(),\n\n literal_tokens: BTreeMap::new(),\n\n regex_tokens: BTreeMap::new(),\n\n non_terminals: BTreeMap::new(),\n\n skip_rules: Vec::new(),\n\n last_precedence: 0,\n\n start_non_terminal: NonTerminal::new_start(),\n\n error_non_terminal: NonTerminal::new_error(),\n\n }\n\n }\n\n}\n\n\n\nimpl SymbolTable {\n\n pub fn start_non_terminal(&self) -> &NonTerminal {\n\n &self.start_non_terminal\n\n }\n\n\n\n pub fn error_non_terminal(&self) -> &NonTerminal {\n", "file_path": "alap_gen/src/symbol.rs", "rank": 85, "score": 32838.64173558683 }, { "content": " }\n\n\n\n pub fn add_skip_rule(&mut self, skip_rule: &String) -> Result<(), Error> {\n\n if self.skip_rules.contains(skip_rule) {\n\n Err(Error::DuplicateSkipRule(skip_rule.to_string()))\n\n } else {\n\n self.skip_rules.push(skip_rule.to_string());\n\n Ok(())\n\n }\n\n }\n\n\n\n pub fn skip_rules(&self) -> impl Iterator<Item = &String> {\n\n self.skip_rules.iter()\n\n }\n\n\n\n pub fn set_precedences(\n\n &mut self,\n\n associativity: Associativity,\n\n tag_or_token_list: &[TagOrToken],\n\n ) {\n", "file_path": "alap_gen/src/symbol.rs", "rank": 86, "score": 32836.74208149899 }, { "content": " )\n\n }\n\n Error::DuplicateTokenDefinition(token) => {\n\n write!(\n\n f,\n\n \"Token \\\"{}\\\" defined at {} has same definition\",\n\n token.name(),\n\n token.defined_at(),\n\n )\n\n }\n\n Error::ConflictsWithToken(token) => {\n\n write!(\n\n f,\n\n \"NonTerminal \\\"{}\\\" conflicts with token defined at {}.\",\n\n token.name(),\n\n token.defined_at(),\n\n )\n\n }\n\n Error::DuplicateSkipRule(string) => {\n\n write!(f, \"Skip rule \\\"{}\\\" already defined.\", string,)\n", "file_path": "alap_gen/src/symbol.rs", "rank": 87, "score": 32836.74208149899 }, { "content": " for token in [Token::EndToken].iter().chain(self.tokens()) {\n\n string += &format!(\n\n \" {}({}): #({}, {})\\n\",\n\n token.name(),\n\n token.text(),\n\n token.associativity(),\n\n token.precedence()\n\n );\n\n }\n\n if self.tags.len() > 0 {\n\n string += \" Tags:\\n\";\n\n for tag in self.tags.values() {\n\n string += &format!(\n\n \" {}: #({}, {})\\n\",\n\n tag.name(),\n\n tag.associativity(),\n\n tag.precedence()\n\n );\n\n }\n\n }\n", "file_path": "alap_gen/src/symbol.rs", "rank": 88, "score": 32836.74208149899 }, { "content": " DuplicateSkipRule(String),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::DuplicateTag(tag) => {\n\n write!(\n\n f,\n\n \"Tag \\\"{}\\\" already defined at {}\",\n\n tag.name(),\n\n tag.defined_at(),\n\n )\n\n }\n\n Error::DuplicateToken(token) => {\n\n write!(\n\n f,\n\n \"Token \\\"{}\\\" already defined at {}\",\n\n token.name(),\n\n token.defined_at(),\n", "file_path": "alap_gen/src/symbol.rs", "rank": 89, "score": 32836.74208149899 }, { "content": " }\n\n }\n\n\n\n pub fn get_token(&self, name: &str) -> Option<&Token> {\n\n self.tokens.get(name)\n\n }\n\n\n\n pub fn get_literal_token(&self, lexeme: &str) -> Option<&Token> {\n\n self.literal_tokens.get(lexeme)\n\n }\n\n\n\n pub fn tokens(&self) -> impl Iterator<Item = &Token> {\n\n self.tokens.values()\n\n }\n\n\n\n pub fn unused_tokens(&self) -> impl Iterator<Item = &Token> {\n\n self.tokens.values().filter(|t| t.is_unused())\n\n }\n\n\n\n pub fn literal_tokens(&self) -> impl Iterator<Item = &Token> {\n", "file_path": "alap_gen/src/symbol.rs", "rank": 90, "score": 32836.74208149899 }, { "content": " if let Some(token) = self.symbol_table.get_literal_token(lexeme) {\n\n token.add_used_at(location);\n\n aa_lhs = AttributeData::Symbol(token.into());\n\n } else {\n\n self.error(location, &format!(\"{}: unknown literal)\", lexeme));\n\n let symbol = self.symbol_table.error_symbol_used_at(location);\n\n aa_lhs = AttributeData::Symbol(symbol);\n\n }\n\n }\n\n 66 => {\n\n // Symbol: \"%error\" #(NonAssoc, 0)\n\n\n\n let location = aa_rhs[0].location();\n\n let symbol = self.symbol_table.error_symbol_used_at(location);\n\n aa_lhs = AttributeData::Symbol(symbol);\n\n }\n\n _ => aa_inject(String::new(), String::new()),\n\n };\n\n aa_lhs\n\n }\n\n}\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 91, "score": 32348.090536816173 }, { "content": "// generated by alap_gen_ng.\n\n\n\nuse std::{fs::File, io::Read, str::FromStr};\n\n\n\nuse crate::{\n\n attributes::*, grammar::Specification, production::ProductionTail, symbol::tag::TagOrToken,\n\n symbol::Associativity,\n\n};\n\n\n\nuse lazy_static::lazy_static;\n\n\n\nuse std::collections::BTreeSet;\n\n\n\nmacro_rules! btree_set {\n\n () => { BTreeSet::new() };\n\n ( $( $x:expr ),* ) => {\n\n {\n\n let mut set = BTreeSet::new();\n\n $( set.insert($x); )*\n\n set\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 92, "score": 32346.739964840443 }, { "content": " let symbol = aa_rhs[0].symbol();\n\n aa_lhs = AttributeData::SymbolList(vec![symbol.clone()]);\n\n }\n\n 63 => {\n\n // SymbolList: SymbolList Symbol #(NonAssoc, 0)\n\n\n\n let symbol = aa_rhs[1].symbol();\n\n aa_lhs.symbol_list_mut().push(symbol.clone());\n\n }\n\n 64 => {\n\n // Symbol: IDENT #(NonAssoc, 0)\n\n\n\n let (name, location) = aa_rhs[0].text_and_location();\n\n let symbol = self.symbol_table.symbol_used_at(name, location);\n\n aa_lhs = AttributeData::Symbol(symbol);\n\n }\n\n 65 => {\n\n // Symbol: LITERAL #(NonAssoc, 0)\n\n\n\n let (lexeme, location) = aa_rhs[0].text_and_location();\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 93, "score": 32346.70581139538 }, { "content": " }\n\n };\n\n ( $( $x:expr ),+ , ) => {\n\n btree_set![ $( $x ), * ]\n\n };\n\n}\n\n\n\nuse lalr1_plus;\n\nuse lexan;\n\n\n\n#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)]\n\npub enum AATerminal {\n\n AAEnd,\n\n ACTION,\n\n ATTR,\n\n COLON,\n\n DOT,\n\n ERROR,\n\n IDENT,\n\n INJECT,\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 94, "score": 32346.20387097211 }, { "content": " AATerminal::SKIP => write!(f, r###\"\"%skip\"\"###),\n\n AATerminal::SR => write!(f, r###\"\"%shift_reduce\"\"###),\n\n AATerminal::TARGET => write!(f, r###\"\"%target\"\"###),\n\n AATerminal::TOKEN => write!(f, r###\"\"%token\"\"###),\n\n AATerminal::VBAR => write!(f, r###\"\"|\"\"###),\n\n }\n\n }\n\n}\n\n\n\nlazy_static! {\n\n static ref AALEXAN: lexan::LexicalAnalyzer<AATerminal> = {\n\n use AATerminal::*;\n\n lexan::LexicalAnalyzer::new(\n\n &[\n\n (NEWSECTION, r###\"%%\"###),\n\n (ATTR, r###\"%attr\"###),\n\n (ERROR, r###\"%error\"###),\n\n (INJECT, r###\"%inject\"###),\n\n (LEFT, r###\"%left\"###),\n\n (NONASSOC, r###\"%nonassoc\"###),\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 95, "score": 32344.94357820871 }, { "content": " let text = aa_rhs[0].matched_text();\n\n aa_lhs = AttributeData::Predicate(text[2..text.len() - 2].to_string());\n\n }\n\n 60 => {\n\n // TaggedPrecedence: \"%prec\" IDENT #(NonAssoc, 0)\n\n\n\n let (name, location) = aa_rhs[1].text_and_location();\n\n if let Some(tag) = self.symbol_table.get_tag(name) {\n\n tag.add_used_at(location);\n\n aa_lhs = AttributeData::AssociativityAndPrecedence(\n\n tag.associativity(),\n\n tag.precedence(),\n\n );\n\n } else if let Some(token) = self.symbol_table.get_token(name) {\n\n aa_lhs = AttributeData::AssociativityAndPrecedence(\n\n token.associativity(),\n\n token.precedence(),\n\n );\n\n } else {\n\n self.error(location, &format!(\"{}: unknown tag\", name));\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 96, "score": 32344.277961540876 }, { "content": " 105 => btree_set![DOT, VBAR, ACTION],\n\n 106 => btree_set![DOT, VBAR, ACTION],\n\n 107 => btree_set![INJECT, LEFT, NEWSECTION, NONASSOC, RIGHT, IDENT, LITERAL],\n\n 108 => btree_set![DOT, VBAR],\n\n _ => panic!(\"illegal state: {}\", state),\n\n };\n\n }\n\n\n\n fn next_action(\n\n &self,\n\n aa_state: u32,\n\n aa_attributes: &lalr1_plus::ParseStack<AATerminal, AANonTerminal, AttributeData>,\n\n aa_token: &lexan::Token<AATerminal>,\n\n ) -> lalr1_plus::Action {\n\n use lalr1_plus::Action;\n\n use AATerminal::*;\n\n let aa_tag = *aa_token.tag();\n\n return match aa_state {\n\n 0 => match aa_tag {\n\n INJECT => Action::Shift(4),\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 97, "score": 32344.113114788794 }, { "content": " _ => Action::SyntaxError,\n\n },\n\n 90 => match aa_tag {\n\n ACTION => Action::Shift(70),\n\n // ProductionTail: SymbolList TaggedPrecedence #(NonAssoc, 0)\n\n DOT | VBAR => Action::Reduce(55),\n\n _ => Action::SyntaxError,\n\n },\n\n 91 => match aa_tag {\n\n // ProductionTail: SymbolList Action #(NonAssoc, 0)\n\n DOT | VBAR => Action::Reduce(56),\n\n _ => Action::SyntaxError,\n\n },\n\n 92 => match aa_tag {\n\n IDENT => Action::Shift(105),\n\n LITERAL => Action::Shift(106),\n\n _ => Action::SyntaxError,\n\n },\n\n 93 => match aa_tag {\n\n // SymbolList: SymbolList Symbol #(NonAssoc, 0)\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 98, "score": 32343.687157086955 }, { "content": " }\n\n 53 => {\n\n // ProductionTail: SymbolList Predicate #(NonAssoc, 0)\n\n\n\n let tail = ProductionTail::new(\n\n aa_rhs[0].symbol_list(),\n\n Some(aa_rhs[1].predicate()),\n\n None,\n\n None,\n\n );\n\n aa_lhs = AttributeData::ProductionTail(tail)\n\n }\n\n 54 => {\n\n // ProductionTail: SymbolList TaggedPrecedence Action #(NonAssoc, 0)\n\n\n\n let tail = ProductionTail::new(\n\n aa_rhs[0].symbol_list(),\n\n None,\n\n Some(aa_rhs[1].associativity_and_precedence()),\n\n Some(aa_rhs[2].action()),\n", "file_path": "alap_gen/src/bootstrap.rs", "rank": 99, "score": 32343.61202730791 } ]
Rust
src/main.rs
romanz/trezor-sq
77b426a0ac54b348556fb0ed6baa2d493e8e693c
use std::ffi::OsString; use std::io; use std::path::Path; extern crate clap; extern crate fern; extern crate sequoia_openpgp as openpgp; extern crate subprocess; extern crate trezor; #[macro_use] extern crate log; use openpgp::armor; use openpgp::constants::{HashAlgorithm, PublicKeyAlgorithm}; use openpgp::crypto::{self, mpis}; use openpgp::packet::Key; use openpgp::parse::Parse; use openpgp::serialize::stream; use openpgp::TPK; fn handle_interaction<T, R: trezor::TrezorMessage>( resp: trezor::TrezorResponse<T, R>, ) -> Result<T, trezor::Error> { match resp { trezor::TrezorResponse::Ok(res) => Ok(res), trezor::TrezorResponse::Failure(_) => resp.ok(), trezor::TrezorResponse::ButtonRequest(req) => handle_interaction(req.ack()?), trezor::TrezorResponse::PinMatrixRequest(_req) => panic!("TREZOR is locked"), trezor::TrezorResponse::PassphraseRequest(_req) => panic!("TREZOR has passphrase"), trezor::TrezorResponse::PassphraseStateRequest(_req) => panic!("TREZOR has passphrase"), } } struct ExternalSigner { sigkey: Key, userid: String, } impl ExternalSigner { pub fn from_file(path: &Path, user_id: &str) -> openpgp::Result<Self> { let tpk = TPK::from_file(path)?; if tpk .userids() .find(|u| u.userid().value() == user_id.as_bytes()) .is_none() { let msg = format!("{:?} has no user ID {}", path, user_id); return Err(openpgp::Error::UnsupportedTPK(msg).into()); } let (_sig, _rev, key) = tpk .keys_valid() .signing_capable() .next() .expect("no valid signing key"); let userid_str = String::from_utf8( tpk.userids() .next() .expect("no user IDs") .userid() .value() .to_vec(), )?; Ok(ExternalSigner { sigkey: key.clone(), userid: userid_str, }) } } impl crypto::Signer for ExternalSigner { fn public(&self) -> &Key { &self.sigkey } fn sign( &mut self, hash_algo: HashAlgorithm, digest: &[u8], ) -> openpgp::Result<mpis::Signature> { match hash_algo { HashAlgorithm::SHA256 | HashAlgorithm::SHA512 => (), _ => return Err(openpgp::Error::UnsupportedHashAlgorithm(hash_algo).into()), } let mut digest = digest.to_vec(); assert!(digest.len() >= 32); let curve = match self.sigkey.pk_algo() { PublicKeyAlgorithm::EdDSA => "ed25519", PublicKeyAlgorithm::ECDSA => { digest.split_off(32); "nist256p1" } _ => { return Err( openpgp::Error::UnsupportedPublicKeyAlgorithm(self.sigkey.pk_algo()).into(), ) } }; let mut identity = trezor::protos::IdentityType::new(); identity.set_host(self.userid.to_owned()); identity.set_proto("gpg".to_owned()); let mut trezor = trezor::unique(false)?; trezor.init_device()?; let sig = handle_interaction(trezor.sign_identity(identity, digest, curve.to_owned())?)?; if sig.len() != 65 { return Err(openpgp::Error::BadSignature(format!( "invalid signature size: {}", sig.len() )) .into()); } Ok(mpis::Signature::ECDSA { r: mpis::MPI::new(&sig[1..33]), s: mpis::MPI::new(&sig[33..]), }) } } fn main() { let matches = clap::App::new("OpenPGP git wrapper for TREZOR") .arg( clap::Arg::with_name("userid") .short("u") .value_name("USERID") .help("User ID for signature") .takes_value(true), ) .arg( clap::Arg::with_name("detached") .short("b") .help("Make a detached signature"), ) .arg( clap::Arg::with_name("sign") .short("s") .help("Sign message from stdin"), ) .arg( clap::Arg::with_name("verify") .long("verify") .takes_value(true) .help("Verify signature"), ) .arg( clap::Arg::with_name("armor") .short("a") .help("Output armored signature"), ) .arg( clap::Arg::with_name("status_fd") .long("status-fd") .takes_value(true) .help("File descriptor for status messages"), ) .arg( clap::Arg::with_name("keyid_format") .long("keyid-format") .default_value("long") .takes_value(true) .help("TODO"), ) .arg(clap::Arg::with_name("file").index(1).required(false)) .get_matches(); let home_dir: OsString = std::env::var_os("GNUPGHOME").expect("GNUPGHOME is not set"); let pubkey_path = std::path::Path::new(&home_dir).join("trezor.asc"); trace!("pubkey_path = {:?}", pubkey_path); if matches.is_present("sign") { let userid = matches.value_of("userid").expect("missing USERID"); trace!("userid = {:?}", userid); assert!(matches.is_present("detached")); assert!(matches.is_present("armor")); assert_eq!(matches.value_of("status_fd").unwrap_or("2"), "2"); let mut signer = ExternalSigner::from_file(&pubkey_path, userid).expect("no ExternalSigner signer"); let signers: Vec<&mut dyn crypto::Signer> = vec![&mut signer]; let sink = armor::Writer::new(io::stdout(), armor::Kind::Signature, &[]) .expect("Failed to create an armored writer."); let mut signer = stream::Signer::detached(stream::Message::new(sink), signers, None) .expect("Failed to create detached signer"); io::copy(&mut io::stdin(), &mut signer).expect("Failed to sign data"); signer.finalize().expect("Failed to write data"); eprintln!("\n[GNUPG:] SIG_CREATED "); return; } if matches.is_present("verify") { assert_eq!(matches.value_of("status_fd").unwrap_or("1"), "1"); assert_eq!(matches.value_of("file").expect("missing input file"), "-"); let sigfile = matches.value_of("verify").expect("missing signature"); let result = subprocess::Exec::cmd("/home/roman/Code/sequoia/target/debug/sqv") .arg("--keyring") .arg(&pubkey_path) .arg(sigfile) .arg("/dev/stdin") .capture() .expect("Popen failed"); if result.success() { println!("\n[GNUPG:] GOODSIG "); eprint!("✓ "); std::process::exit(0); } else { println!("\n[GNUPG:] BADSIG "); eprint!("✗ "); std::process::exit(1); } } panic!("unsupported command: {:?}", matches); }
use std::ffi::OsString; use std::io; use std::path::Path; extern crate clap; extern crate fern; extern crate sequoia_openpgp as openpgp; extern crate subprocess; extern crate trezor; #[macro_use] extern crate log; use openpgp::armor; use openpgp::constants::{HashAlgorithm, PublicKeyAlgorithm}; use openpgp::crypto::{self, mpis}; use openpgp::packet::Key; use openpgp::parse::Parse; use openpgp::serialize::stream; use openpgp::TPK; fn handle_interaction<T, R: trezor::TrezorMessage>( resp: trezor::TrezorResponse<T, R>, ) -> Result<T, trezor::Error> { match resp { trezor::TrezorResponse::Ok(res) => Ok(res), trezor::TrezorResponse::Failure(_) => resp.ok(), trezor::TrezorResponse::ButtonRequest(req) => handle_interaction(req.ack()?), trezor::TrezorResponse::PinMatrixRequest(_req) => panic!("TREZOR is locked"), trezor::TrezorResponse::PassphraseRequest(_req) => panic!("TREZOR has passphrase"), trezor::TrezorResponse::PassphraseStateRequest(_req) => panic!("TREZOR has passphrase"), } } struct ExternalSigner { sigkey: Key, userid: String, } impl ExternalSigner { pub fn from_file(path: &Path, user_id: &str) -> openpgp::Result<Self> { let tpk = TPK::from_file(path)?; if tpk .userids() .find(|u| u.userid().value() == user_id.as_bytes()) .is_none() { let msg = format!("{:?} has no user ID {}", path, user_id); return Err(openpgp::Error::UnsupportedTPK(msg).into()); } let (_sig, _rev, key) = tpk .keys_valid() .signing_capable() .next() .expect("no valid signing key"); let userid_str = String::from_utf8( tpk.userids() .next() .expect("no user IDs") .userid() .value() .to_vec(), )?; Ok(ExternalSigner { sigkey: key.clone(), userid: userid_str, }) } } impl crypto::Signer for ExternalSigner { fn public(&self) -> &Key { &self.sigkey }
} fn main() { let matches = clap::App::new("OpenPGP git wrapper for TREZOR") .arg( clap::Arg::with_name("userid") .short("u") .value_name("USERID") .help("User ID for signature") .takes_value(true), ) .arg( clap::Arg::with_name("detached") .short("b") .help("Make a detached signature"), ) .arg( clap::Arg::with_name("sign") .short("s") .help("Sign message from stdin"), ) .arg( clap::Arg::with_name("verify") .long("verify") .takes_value(true) .help("Verify signature"), ) .arg( clap::Arg::with_name("armor") .short("a") .help("Output armored signature"), ) .arg( clap::Arg::with_name("status_fd") .long("status-fd") .takes_value(true) .help("File descriptor for status messages"), ) .arg( clap::Arg::with_name("keyid_format") .long("keyid-format") .default_value("long") .takes_value(true) .help("TODO"), ) .arg(clap::Arg::with_name("file").index(1).required(false)) .get_matches(); let home_dir: OsString = std::env::var_os("GNUPGHOME").expect("GNUPGHOME is not set"); let pubkey_path = std::path::Path::new(&home_dir).join("trezor.asc"); trace!("pubkey_path = {:?}", pubkey_path); if matches.is_present("sign") { let userid = matches.value_of("userid").expect("missing USERID"); trace!("userid = {:?}", userid); assert!(matches.is_present("detached")); assert!(matches.is_present("armor")); assert_eq!(matches.value_of("status_fd").unwrap_or("2"), "2"); let mut signer = ExternalSigner::from_file(&pubkey_path, userid).expect("no ExternalSigner signer"); let signers: Vec<&mut dyn crypto::Signer> = vec![&mut signer]; let sink = armor::Writer::new(io::stdout(), armor::Kind::Signature, &[]) .expect("Failed to create an armored writer."); let mut signer = stream::Signer::detached(stream::Message::new(sink), signers, None) .expect("Failed to create detached signer"); io::copy(&mut io::stdin(), &mut signer).expect("Failed to sign data"); signer.finalize().expect("Failed to write data"); eprintln!("\n[GNUPG:] SIG_CREATED "); return; } if matches.is_present("verify") { assert_eq!(matches.value_of("status_fd").unwrap_or("1"), "1"); assert_eq!(matches.value_of("file").expect("missing input file"), "-"); let sigfile = matches.value_of("verify").expect("missing signature"); let result = subprocess::Exec::cmd("/home/roman/Code/sequoia/target/debug/sqv") .arg("--keyring") .arg(&pubkey_path) .arg(sigfile) .arg("/dev/stdin") .capture() .expect("Popen failed"); if result.success() { println!("\n[GNUPG:] GOODSIG "); eprint!("✓ "); std::process::exit(0); } else { println!("\n[GNUPG:] BADSIG "); eprint!("✗ "); std::process::exit(1); } } panic!("unsupported command: {:?}", matches); }
fn sign( &mut self, hash_algo: HashAlgorithm, digest: &[u8], ) -> openpgp::Result<mpis::Signature> { match hash_algo { HashAlgorithm::SHA256 | HashAlgorithm::SHA512 => (), _ => return Err(openpgp::Error::UnsupportedHashAlgorithm(hash_algo).into()), } let mut digest = digest.to_vec(); assert!(digest.len() >= 32); let curve = match self.sigkey.pk_algo() { PublicKeyAlgorithm::EdDSA => "ed25519", PublicKeyAlgorithm::ECDSA => { digest.split_off(32); "nist256p1" } _ => { return Err( openpgp::Error::UnsupportedPublicKeyAlgorithm(self.sigkey.pk_algo()).into(), ) } }; let mut identity = trezor::protos::IdentityType::new(); identity.set_host(self.userid.to_owned()); identity.set_proto("gpg".to_owned()); let mut trezor = trezor::unique(false)?; trezor.init_device()?; let sig = handle_interaction(trezor.sign_identity(identity, digest, curve.to_owned())?)?; if sig.len() != 65 { return Err(openpgp::Error::BadSignature(format!( "invalid signature size: {}", sig.len() )) .into()); } Ok(mpis::Signature::ECDSA { r: mpis::MPI::new(&sig[1..33]), s: mpis::MPI::new(&sig[33..]), }) }
function_block-full_function
[]
Rust
src/crypto.rs
SerhoLiu/eakio
aa7366878294a2525f8c0d32dc0079c1e8c605ee
use std::fmt; use std::io; use std::result; use ring::{aead, digest, hkdf, hmac}; use ring::rand::{SecureRandom, SystemRandom}; static CIPHER: &'static aead::Algorithm = &aead::AES_256_GCM; static DIGEST: &'static digest::Algorithm = &digest::SHA256; pub type Result<T> = result::Result<T, Error>; #[derive(Clone, Copy, Debug, PartialEq)] pub enum Error { GenSalt, SaltLenNotMatch(usize), OpenKey, SealKey, SealBufferTooSmall(usize), Open, Seal, } pub struct Salt { len: usize, bytes: [u8; digest::MAX_OUTPUT_LEN], } impl Salt { pub fn new() -> Result<Salt> { let len = Salt::len(); let mut bytes = [0u8; digest::MAX_OUTPUT_LEN]; let rng = SystemRandom::new(); rng.fill(&mut bytes[..len]).map_err(|_| Error::GenSalt)?; Ok(Salt { len, bytes }) } pub fn from_bytes(bytes: &[u8]) -> Result<Salt> { let len = Salt::len(); if bytes.len() != len { return Err(Error::SaltLenNotMatch(len)); } let mut buf = [0u8; digest::MAX_OUTPUT_LEN]; buf[..len].copy_from_slice(bytes); Ok(Salt { len, bytes: buf }) } #[inline] pub fn len() -> usize { hmac::recommended_key_len(DIGEST) } #[inline] pub fn get_bytes(&self) -> &[u8] { &self.bytes[..self.len] } #[inline] fn get_signing_key(&self) -> hmac::SigningKey { hmac::SigningKey::new(DIGEST, &self.bytes[..self.len]) } } const INFO_KEY: &str = "hello kelsi"; #[allow(dead_code)] pub struct Crypto { tag_len: usize, key_len: usize, nonce_len: usize, open_key: aead::OpeningKey, open_nonce: Vec<u8>, seal_key: aead::SealingKey, seal_nonce: Vec<u8>, } impl Crypto { pub fn new(secret: &[u8], salt: &Salt) -> Result<Crypto> { let key_len = CIPHER.key_len(); let mut key = Vec::with_capacity(key_len); unsafe { key.set_len(key_len); } hkdf::extract_and_expand( &salt.get_signing_key(), secret, INFO_KEY.as_bytes(), &mut key, ); let open_key = aead::OpeningKey::new(CIPHER, &key).map_err(|_| Error::OpenKey)?; let seal_key = aead::SealingKey::new(CIPHER, &key).map_err(|_| Error::SealKey)?; let nonce_len = CIPHER.nonce_len(); Ok(Crypto { tag_len: CIPHER.tag_len(), key_len: CIPHER.key_len(), nonce_len: CIPHER.nonce_len(), open_key, open_nonce: vec![0u8; nonce_len], seal_key, seal_nonce: vec![0u8; nonce_len], }) } #[inline] pub fn tag_len() -> usize { CIPHER.tag_len() } pub fn encrypt(&mut self, inout: &mut [u8], in_len: usize) -> Result<usize> { let out_len = in_len + self.tag_len; if inout.len() < out_len { return Err(Error::SealBufferTooSmall(out_len)); } match aead::seal_in_place( &self.seal_key, &self.seal_nonce, &[], &mut inout[..out_len], self.tag_len, ) { Ok(outlen) => debug_assert_eq!(out_len, outlen), Err(_) => return Err(Error::Seal), }; incr_nonce(&mut self.seal_nonce); Ok(out_len) } #[inline] pub fn decrypt(&mut self, inout: &mut [u8]) -> Result<usize> { match aead::open_in_place(&self.open_key, &self.open_nonce, &[], 0, inout) { Ok(buf) => { incr_nonce(&mut self.open_nonce); Ok(buf.len()) } Err(_) => Err(Error::Open), } } } fn incr_nonce(nonce: &mut [u8]) { for byte in nonce.iter_mut() { let (sum, overflow) = (*byte).overflowing_add(1); *byte = sum; if !overflow { break; } } } impl fmt::Display for Error { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { Error::GenSalt => write!(fmt, "generate salt error"), Error::SaltLenNotMatch(need) => write!(fmt, "salt length not match, need {}", need), Error::OpenKey => write!(fmt, "crypto ring open key error"), Error::SealKey => write!(fmt, "crypto ring seal key error"), Error::SealBufferTooSmall(need) => { write!(fmt, "crypto seal inout buffer too small, need {}", need) } Error::Open => write!(fmt, "crypto decrypt error"), Error::Seal => write!(fmt, "crypto encrypt error"), } } } impl From<Error> for io::Error { fn from(err: Error) -> io::Error { io::Error::new(io::ErrorKind::Other, format!("{}", err)) } } #[cfg(test)] mod test { use super::{Crypto, Error, Salt}; #[test] fn test_incr_nonce() { let mut nonce = [0u8; 4]; for i in 1..1024 { super::incr_nonce(&mut nonce); let x = (nonce[0] as usize) + ((nonce[1] as usize) << 8) + ((nonce[2] as usize) << 16) + ((nonce[3] as usize) << 24); assert_eq!(x, i); } } #[test] fn test_crypto_normal() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf = [0u8; 128]; let plain_len: usize = 24; let out_len = crypto.encrypt(&mut buf[..], plain_len).unwrap(); assert_eq!(out_len, plain_len + Crypto::tag_len()); assert!(buf[out_len..].iter().all(|&x| x == 0)); let len = crypto.decrypt(&mut buf[..out_len]).unwrap(); assert_eq!(plain_len, len); assert!(buf[..plain_len].iter().all(|&x| x == 0)); } #[test] fn test_crypto_zerosize() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf = [0u8; 128]; let out_len = crypto.encrypt(&mut buf[..], 0).unwrap(); assert_eq!(out_len, 0 + Crypto::tag_len()); let len = crypto.decrypt(&mut buf[..out_len]).unwrap(); assert_eq!(0, len); } #[test] fn test_crypto_multi_buf() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf1 = [0u8; 128]; let plain_len1: usize = 24; let mut buf2 = [1u8; 128]; let plain_len2: usize = 37; crypto.encrypt(&mut buf1[..], plain_len1).unwrap(); let out_len2 = crypto.encrypt(&mut buf2[..], plain_len2).unwrap(); let err = crypto.decrypt(&mut buf2[..out_len2]).unwrap_err(); assert_eq!(err, Error::Open); let mut crypto1 = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf3 = [0u8; 128]; let plain_len3: usize = 24; let mut buf4 = [2u8; 128]; let plain_len4: usize = 24; let out_len3 = crypto1.encrypt(&mut buf3[..], plain_len3).unwrap(); let out_len4 = crypto1.encrypt(&mut buf4[..], plain_len4).unwrap(); crypto1.decrypt(&mut buf3[..out_len3]).unwrap(); assert!(buf3[..plain_len3].iter().all(|&x| x == 0)); crypto1.decrypt(&mut buf4[..out_len4]).unwrap(); assert!(buf4[..plain_len4].iter().all(|&x| x == 2)); } }
use std::fmt; use std::io; use std::result; use ring::{aead, digest, hkdf, hmac}; use ring::rand::{SecureRandom, SystemRandom}; static CIPHER: &'static aead::Algorithm = &aead::AES_256_GCM; static DIGEST: &'static digest::Algorithm = &digest::SHA256; pub type Result<T> = result::Result<T, Error>; #[derive(Clone, Copy, Debug, PartialEq)] pub enum Error { GenSalt, SaltLenNotMatch(usize), OpenKey, SealKey, SealBufferTooSmall(usize), Open, Seal, } pub struct Salt { len: usize, bytes: [u8; digest::MAX_OUTPUT_LEN], } impl Salt { pub fn new() -> Result<Salt> { let len = Salt::len(); let mut bytes = [0u8; digest::MAX_OUTPUT_LEN]; let rng = SystemRandom::new(); rng.fill(&mut bytes[..len]).map_err(|_| Error::GenSalt)?; Ok(Salt { len, bytes }) } pub fn from_bytes(bytes: &[u8]) -> Result<Salt> { let len = Salt::len(); if bytes.len() != len { return Err(Error::SaltLenNotMatch(len)); } let mut buf = [0u8; digest::MAX_OUTPUT_LEN]; buf[..len].copy_from_slice(bytes); Ok(Salt { len, bytes: buf }) } #[inline] pub fn len() -> usize { hmac::recommended_key_len(DIGEST) } #[inline] pub fn get_bytes(&self) -> &[u8] { &self.bytes[..self.len] } #[inline] fn get_signing_key(&self) -> hmac::SigningKey { hmac::SigningKey::new(DIGEST, &self.bytes[..self.len]) } } const INFO_KEY: &str = "hello kelsi"; #[allow(dead_code)] pub struct Crypto { tag_len: usize, key_len: usize, nonce_len: usize, open_key: aead::OpeningKey, open_nonce: Vec<u8>, seal_key: aead::SealingKey, seal_nonce: Vec<u8>, } impl Crypto { pub fn new(secret: &[u8], salt: &Salt) -> Result<Crypto> { let key_len = CIPHER.key_len(); let mut key = Vec::with_capacity(key_len); unsafe { key.set_len(key_len); } hkdf::extract_and_expand( &salt.get_signing_key(), secret, INFO_KEY.as_bytes(), &mut key, ); let open_key = aead::OpeningKey::new(CIPHER, &key).map_err(|_| Error::OpenKey)?; let seal_key = aead::SealingKey::new(CIPHER, &key).map_err(|_| Error::SealKey)?; let nonce_len = CIPHER.nonce_len(); Ok(Crypto { tag_len: CIPHER.tag_len(), key_len: CIPHER.key_len(), nonce_len: CIPHER.nonce_len(), open_key, open_nonce: vec![0u8; nonce_len], seal_key, seal_nonce: vec![0u8; nonce_len], }) } #[inline] pub fn tag_len() -> usize { CIPHER.tag_len() } pub fn encrypt(&mut self, inout: &mut [u8], in_len: usize) -> Result<usize> { let out_len = in_len + self.tag_len; if inout.len() < out_len { return Err(Error::SealBufferTooSmall(out_len)); } match aead::seal_in_place( &self.seal_key, &self.seal_nonce, &[], &mut inout[..out_len], self.tag_len, ) { Ok(outlen) => debug_assert_eq!(out_len, outlen), Err(_) => return Err(Error::Seal), }; incr_nonce(&mut self.seal_nonce); Ok(out_len) } #[inline] pub fn decrypt(&mut self, inout: &mut [u8]) -> Result<usize> { match aead::open_in_place(&self.open_key, &self.open_nonce, &[], 0, inout) { Ok(buf) => { incr_nonce(&mut self.open_nonce); Ok(buf.len()) } Err(_) => Err(Error::Open), } } } fn incr_nonce(nonce: &mut [u8]) { for byte in nonce.iter_mut() { let (sum, overflow) = (*byte).overflowing_add(1); *byte = sum; if !overflow { break; } } } impl fmt::Display for Error { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { Error::GenSalt => write!(fmt, "generate salt error"), Error::SaltLenNotMatch(need) => write!(fmt, "salt length not match, need {}", need), Error::OpenKey => write!(fmt, "crypto ring open key error"), Error::SealKey => write!(fmt, "crypto ring seal key error"), Error::SealBufferTooSmall(need) => { write!(fmt, "crypto seal inout buffer too small, need {}", need) } Error::Open => write!(fmt, "crypto decrypt error"), Error::Seal => write!(fmt, "crypto encrypt error"), } } } impl From<Error> for io::Error { fn from(err: Error) -> io::Error { io::Error::new(io::ErrorKind::Other, format!("{}", err)) } } #[cfg(test)] mod test { use super::{Crypto, Error, Salt}; #[test] fn test_incr_nonce() { let mut nonce = [0u8; 4]; for i in 1..1024 { super::incr_nonce(&mut nonce); let x = (nonce[0] as usize) + ((nonce[1] as usize) << 8) + ((nonce[2] as usize) << 16) + ((nonce[3] as usize) << 24); assert_eq!(x, i); } } #[test] fn test_crypto_normal() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf = [0u8; 128]; let plain_len: usize = 24; let out_len = crypto.encrypt(&mut buf[..], plain_len).unwrap(); assert_eq!(out_len, plain_len + Crypto::tag_len()); assert!(buf[out_len..].iter().all(|&x| x == 0)); let len = crypto.decrypt(&mut buf[..out_len]).unwrap(); assert_eq!(plain_len, len); assert!(buf[..plain_len].iter().all(|&x| x == 0)); } #[test] fn test_crypto_zerosize() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf = [0u8; 128]; let out_len = crypto.encrypt(&mut buf[..], 0).unwrap(); assert_eq!(out_len, 0 + Crypto::tag_len()); let len = crypto.decrypt(&mut buf[..out_len]).unwrap(); assert_eq!(0, len); } #[test] fn test_crypto_multi_buf() { let salt = Salt::new().unwrap(); let mut crypto = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf1 = [0u8; 128]; let plain_len1: usize = 24; let mut buf2 = [1u8; 12
}
8]; let plain_len2: usize = 37; crypto.encrypt(&mut buf1[..], plain_len1).unwrap(); let out_len2 = crypto.encrypt(&mut buf2[..], plain_len2).unwrap(); let err = crypto.decrypt(&mut buf2[..out_len2]).unwrap_err(); assert_eq!(err, Error::Open); let mut crypto1 = Crypto::new(&[0u8; 8], &salt).unwrap(); let mut buf3 = [0u8; 128]; let plain_len3: usize = 24; let mut buf4 = [2u8; 128]; let plain_len4: usize = 24; let out_len3 = crypto1.encrypt(&mut buf3[..], plain_len3).unwrap(); let out_len4 = crypto1.encrypt(&mut buf4[..], plain_len4).unwrap(); crypto1.decrypt(&mut buf3[..out_len3]).unwrap(); assert!(buf3[..plain_len3].iter().all(|&x| x == 0)); crypto1.decrypt(&mut buf4[..out_len4]).unwrap(); assert!(buf4[..plain_len4].iter().all(|&x| x == 2)); }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn io_error(desc: &str) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, desc)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::env;\n\n\n\n #[test]\n\n fn test_expand_tilde_path() {\n\n let old_home = env::var(\"HOME\").ok();\n\n env::set_var(\"HOME\", \"/home/morty\");\n\n\n\n assert_eq!(\"/home/morty\", super::expand_tilde_path(\"~\"));\n\n assert_eq!(\"/home/morty/rick\", super::expand_tilde_path(\"~/rick\"));\n\n assert_eq!(\"~rick\", super::expand_tilde_path(\"~rick\"));\n\n assert_eq!(\"/home\", super::expand_tilde_path(\"/home\"));\n\n\n\n if let Some(old) = old_home {\n\n env::set_var(\"HOME\", old);\n\n }\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 0, "score": 130740.71649091828 }, { "content": "/// expand path like ~/xxx\n\npub fn expand_tilde_path(path: &str) -> Cow<str> {\n\n if !path.starts_with('~') {\n\n return path.into();\n\n }\n\n\n\n let path_after_tilde = &path[1..];\n\n if path_after_tilde.is_empty() || path_after_tilde.starts_with('/') {\n\n if let Some(hd) = env::home_dir() {\n\n let result = format!(\"{}{}\", hd.display(), path_after_tilde);\n\n result.into()\n\n } else {\n\n // home dir is not available\n\n path.into()\n\n }\n\n } else {\n\n // we cannot handle `~otheruser/` paths yet\n\n path.into()\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 2, "score": 84882.55571433855 }, { "content": "type Result<T> = result::Result<T, Error>;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n NotFile,\n\n Skip,\n\n Exists,\n\n Io(io::Error),\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum Mode {\n\n Encrypt,\n\n Decrypt,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Task {\n\n pub src: PathBuf,\n\n pub dest: PathBuf,\n", "file_path": "src/task.rs", "rank": 3, "score": 71182.79402875078 }, { "content": "pub fn init_logger() {\n\n let format = |record: &LogRecord| {\n\n let now = time::now();\n\n let ms = now.tm_nsec / 1000 / 1000;\n\n let t = time::strftime(\"%Y-%m-%d %T\", &now).unwrap();\n\n format!(\n\n \"{}.{:03} [{}] {}\",\n\n t,\n\n ms,\n\n ColorLevel(record.level()),\n\n record.args()\n\n )\n\n };\n\n\n\n let mut builder = LogBuilder::new();\n\n builder.format(format).filter(None, LogLevelFilter::Info);\n\n\n\n if env::var(\"RUST_LOG\").is_ok() {\n\n builder.parse(&env::var(\"RUST_LOG\").unwrap());\n\n }\n\n\n\n if env::var(\"EAKIO_LOG\").is_ok() {\n\n builder.parse(&env::var(\"EAKIO_LOG\").unwrap());\n\n }\n\n\n\n builder.init().unwrap();\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 4, "score": 62069.70085206961 }, { "content": "// calc crypto in_size data out size\n\nfn crypto_data_size(in_size: usize) -> usize {\n\n let nblock = if in_size == 0 {\n\n 1\n\n } else {\n\n (in_size - 1) / BLOCK_SIZE + 1\n\n };\n\n\n\n let tag_size = nblock * Crypto::tag_len();\n\n\n\n in_size + tag_size\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_crypto_data_size() {\n\n let salt = Salt::new().unwrap();\n\n let mut crypto = Crypto::new(&[0u8; 16], &salt).unwrap();\n", "file_path": "src/file.rs", "rank": 5, "score": 61895.42597536309 }, { "content": "pub fn command() -> io::Result<()> {\n\n let args: Args = Docopt::new(USAGE)\n\n .and_then(|d| d.deserialize())\n\n .unwrap_or_else(|e| e.exit());\n\n\n\n if args.flag_version {\n\n println!(\"{}\", VERSION);\n\n Ok(())\n\n } else {\n\n command_crypt(&args)\n\n }\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 6, "score": 56104.73310916609 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Args {\n\n flag_skip: bool,\n\n flag_overwrite: bool,\n\n flag_hidden: bool,\n\n flag_dryrun: bool,\n\n flag_parallel: i32,\n\n flag_version: bool,\n\n arg_src: Vec<String>,\n\n arg_dest: String,\n\n cmd_encrypt: bool,\n\n cmd_decrypt: bool,\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 7, "score": 37943.4979650574 }, { "content": "#[derive(Debug)]\n\nstruct PathGroup {\n\n path: PathBuf,\n\n is_file: bool,\n\n subs: Vec<PathBuf>,\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 8, "score": 36685.801591738316 }, { "content": "struct ColorLevel(LogLevel);\n\n\n\nimpl fmt::Display for ColorLevel {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self.0 {\n\n LogLevel::Trace => Color::Purple.paint(\"TRACE\"),\n\n LogLevel::Debug => Color::Blue.paint(\"DEBUG\"),\n\n LogLevel::Info => Color::Green.paint(\"INFO \"),\n\n LogLevel::Warn => Color::Yellow.paint(\"WARN \"),\n\n LogLevel::Error => Color::Red.paint(\"ERROR\"),\n\n }.fmt(f)\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 9, "score": 33012.045124741766 }, { "content": "fn main() {\n\n eakio::init_logger();\n\n\n\n if let Err(e) = eakio::command() {\n\n println!(\"Error: {}\", e);\n\n process::exit(1)\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 10, "score": 31879.69485091018 }, { "content": "fn build_tasks(srcs: &[PathGroup], dest: &PathBuf, dest_is_dir: bool) -> Vec<Task> {\n\n let mut tasks = Vec::<Task>::new();\n\n\n\n // 这里目标文件的路径由以下方式决定\n\n // - src 是文件\n\n // 1. dest 是文件, 则 dest\n\n // 2. dest 是目录, 则 dest/filename(src)\n\n // - src 是目录, 将 src 到 dest/src\n\n for pg in srcs.iter() {\n\n for path in &pg.subs {\n\n let mut task_dest = PathBuf::from(&dest);\n\n\n\n if pg.is_file {\n\n if dest_is_dir {\n\n let filename = path.file_name().unwrap();\n\n task_dest.push(filename);\n\n }\n\n } else {\n\n // remove prefix\n\n let filename = path.strip_prefix(&pg.path).unwrap();\n", "file_path": "src/cli.rs", "rank": 11, "score": 29771.4215969998 }, { "content": "fn input_password() -> io::Result<String> {\n\n let pass = rpassword::prompt_password_stdout(\" Password: \")?;\n\n let pass2 = rpassword::prompt_password_stdout(\"Confirm Password: \")?;\n\n\n\n if pass != pass2 {\n\n Err(io_error(\"passwords you provided do not match\"))\n\n } else {\n\n Ok(pass)\n\n }\n\n}\n", "file_path": "src/cli.rs", "rank": 12, "score": 25036.587863694585 }, { "content": "fn is_hidden(entry: &DirEntry) -> bool {\n\n entry\n\n .file_name()\n\n .to_str()\n\n .map(|s| s.starts_with('.'))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 13, "score": 25036.587863694585 }, { "content": "fn command_crypt(args: &Args) -> io::Result<()> {\n\n let mode = if args.cmd_encrypt {\n\n Mode::Encrypt\n\n } else if args.cmd_decrypt {\n\n Mode::Decrypt\n\n } else {\n\n return Err(io_error(\"only support encrypt and decrypt\"));\n\n };\n\n\n\n let dest_is_dir = args.arg_dest.ends_with(MAIN_SEPARATOR);\n\n let dest = PathBuf::from(&args.arg_dest);\n\n\n\n let files = list_src_files(&args.arg_src, args.flag_hidden)?;\n\n let count: usize = files.iter().map(|pg| pg.subs.len()).sum();\n\n\n\n info!(\"Found {} files to {}\", count, mode);\n\n if count == 0 {\n\n return Ok(());\n\n }\n\n if count > 1 && !dest_is_dir {\n", "file_path": "src/cli.rs", "rank": 14, "score": 23606.826914804784 }, { "content": "fn list_src_files(srcs: &[String], hidden: bool) -> io::Result<Vec<PathGroup>> {\n\n let mut globs = Vec::<PathBuf>::new();\n\n for src in srcs.iter() {\n\n let expand_src = expand_tilde_path(src);\n\n let paths = glob::glob(&expand_src).map_err(|e| io_error(&format!(\"{}\", e)))?;\n\n for entry in paths {\n\n let path = entry.map_err(|e| io_error(&format!(\"{}\", e)))?;\n\n globs.push(path);\n\n }\n\n }\n\n let mut path_groups = Vec::<PathGroup>::new();\n\n\n\n for path in globs {\n\n let mut subs = Vec::<PathBuf>::new();\n\n if path.is_file() {\n\n subs.push(path.clone());\n\n path_groups.push(PathGroup {\n\n path,\n\n is_file: true,\n\n subs,\n", "file_path": "src/cli.rs", "rank": 28, "score": 18301.53178269408 }, { "content": " let mut reader = BufReader::new(src_f);\n\n\n\n let dest_f = File::create(dest)?;\n\n let mut writer = BufWriter::new(dest_f);\n\n\n\n reader.read_exact(&mut self.buffer[..MAGIC.len()])?;\n\n if &self.buffer[..MAGIC.len()] != MAGIC {\n\n return Err(io_error(\"magic not match\"));\n\n }\n\n\n\n let mut version = [0u8];\n\n reader.read_exact(&mut version)?;\n\n if version[0] != VERSION_1 && version[0] != VERSION_2 {\n\n return Err(io_error(&format!(\"version '{}' not support\", version[0])));\n\n }\n\n\n\n reader.read_exact(&mut self.buffer[..Salt::len()])?;\n\n let salt = Salt::from_bytes(&self.buffer[..Salt::len()])?;\n\n let mut crypto = Crypto::new(self.secret, &salt)?;\n\n\n", "file_path": "src/file.rs", "rank": 29, "score": 20.771253411942716 }, { "content": "impl<'a> FileCrypt<'a> {\n\n pub fn new(secret: &'a [u8]) -> FileCrypt {\n\n let size = BLOCK_SIZE + Crypto::tag_len();\n\n\n\n FileCrypt {\n\n secret,\n\n buffer: vec![0u8; size],\n\n }\n\n }\n\n\n\n pub fn encrypt(&mut self, src: &Path, dest: &Path) -> io::Result<()> {\n\n let salt = Salt::new()?;\n\n let mut crypto = Crypto::new(self.secret, &salt)?;\n\n\n\n let src_f = File::open(src)?;\n\n let mut size = src_f.metadata()?.len() as usize;\n\n let mut reader = BufReader::new(src_f);\n\n\n\n let dest_f = File::create(dest)?;\n\n let mut writer = BufWriter::new(dest_f);\n", "file_path": "src/file.rs", "rank": 30, "score": 20.31058121793204 }, { "content": " size -= BLOCK_SIZE;\n\n }\n\n Err(e) => if e.kind() == io::ErrorKind::UnexpectedEof {\n\n if size != 0 {\n\n let len = crypto.encrypt(&mut self.buffer, size)?;\n\n writer.write_all(&self.buffer[..len])?;\n\n }\n\n break;\n\n } else {\n\n return Err(e);\n\n },\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn decrypt(&mut self, src: &Path, dest: &Path) -> io::Result<()> {\n\n let src_f = File::open(src)?;\n\n let mut size = src_f.metadata()?.len() as usize;\n", "file_path": "src/file.rs", "rank": 31, "score": 18.41837465076961 }, { "content": " match *self {\n\n Mode::Encrypt => write!(f, \"encrypt\"),\n\n Mode::Decrypt => write!(f, \"decrypt\"),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Task {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:?} -> {:?}\", self.src, self.dest)\n\n }\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(err: io::Error) -> Error {\n\n Error::Io(err)\n\n }\n\n}\n", "file_path": "src/task.rs", "rank": 32, "score": 18.332490958844488 }, { "content": " size -= header_len;\n\n\n\n loop {\n\n match reader.read_exact(&mut self.buffer) {\n\n Ok(()) => {\n\n let len = crypto.decrypt(&mut self.buffer)?;\n\n writer.write_all(&self.buffer[..len])?;\n\n size -= self.buffer.len();\n\n }\n\n Err(e) => if e.kind() == io::ErrorKind::UnexpectedEof {\n\n if size != 0 {\n\n let len = crypto.decrypt(&mut self.buffer[..size])?;\n\n writer.write_all(&self.buffer[..len])?;\n\n }\n\n\n\n break;\n\n } else {\n\n return Err(e);\n\n },\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n// calc crypto in_size data out size\n", "file_path": "src/file.rs", "rank": 33, "score": 16.796936059715012 }, { "content": " if version[0] == VERSION_2 {\n\n let size_len = 8 + Crypto::tag_len();\n\n reader.read_exact(&mut self.buffer[..size_len])?;\n\n crypto.decrypt(&mut self.buffer[..size_len])?;\n\n\n\n let mut rdr = Cursor::new(&self.buffer[..8]);\n\n let len = rdr.read_u64::<BigEndian>()?;\n\n if len != size as u64 {\n\n return Err(io_error(&format!(\n\n \"file size not match, {} != {}\",\n\n size, len\n\n )));\n\n }\n\n }\n\n\n\n let header_len = match version[0] {\n\n VERSION_1 => MAGIC.len() + 1 + Salt::len(),\n\n VERSION_2 => MAGIC.len() + 1 + Salt::len() + 8 + Crypto::tag_len(),\n\n _ => unreachable!(),\n\n };\n", "file_path": "src/file.rs", "rank": 34, "score": 16.10032259880153 }, { "content": "use std::fs::File;\n\nuse std::io;\n\nuse std::io::{BufReader, BufWriter, Cursor};\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\n\n\nuse byteorder::{BigEndian, ByteOrder, ReadBytesExt};\n\n\n\nuse super::crypto::{Crypto, Salt};\n\nuse super::util::io_error;\n\n\n\nconst MAGIC: &[u8] = b\"KELSI\";\n\nconst BLOCK_SIZE: usize = 128 * 1024;\n\n\n\n// +----+---------+\n\n// | | MAGIC |\n\n// | H +---------+\n\n// | E | VERSION |\n\n// | A +---------+\n\n// | D | SALT |\n", "file_path": "src/file.rs", "rank": 35, "score": 14.57690290696183 }, { "content": " Mode::Decrypt => self.file_crypt.decrypt(&task.src, &task.dest)?,\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Error::NotFile => write!(f, \"not file\"),\n\n Error::Skip => write!(f, \"skip exists\"),\n\n Error::Exists => write!(f, \"local file exists\"),\n\n Error::Io(ref e) => write!(f, \"{}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Mode {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "src/task.rs", "rank": 36, "score": 13.582587089289863 }, { "content": "\n\n // write header metadata\n\n writer.write_all(MAGIC)?;\n\n writer.write_all(&[VERSION_2])?;\n\n writer.write_all(salt.get_bytes())?;\n\n\n\n // write placeholder size data\n\n let size_start = MAGIC.len() + 1 + Salt::len();\n\n let size_len = 8 + Crypto::tag_len();\n\n let dest_size = size_start + size_len + crypto_data_size(size);\n\n\n\n BigEndian::write_u64(&mut self.buffer, dest_size as u64);\n\n let len = crypto.encrypt(&mut self.buffer, 8)?;\n\n writer.write_all(&self.buffer[..len])?;\n\n\n\n loop {\n\n match reader.read_exact(&mut self.buffer[..BLOCK_SIZE]) {\n\n Ok(()) => {\n\n let len = crypto.encrypt(&mut self.buffer, BLOCK_SIZE)?;\n\n writer.write_all(&self.buffer[..len])?;\n", "file_path": "src/file.rs", "rank": 37, "score": 12.849747823747915 }, { "content": " fn do_task(&mut self, task: &Task) -> Result<()> {\n\n if !task.src.is_file() {\n\n return Err(Error::NotFile);\n\n }\n\n\n\n if task.dest.exists() {\n\n if self.skip_exists {\n\n return Err(Error::Skip);\n\n }\n\n\n\n if !self.overwrite {\n\n return Err(Error::Exists);\n\n }\n\n }\n\n\n\n let dest_dir = task.dest.parent().unwrap();\n\n fs::create_dir_all(dest_dir)?;\n\n\n\n match self.mode {\n\n Mode::Encrypt => self.file_crypt.encrypt(&task.src, &task.dest)?,\n", "file_path": "src/task.rs", "rank": 38, "score": 10.009564600119942 }, { "content": "// +----+---------+\n\nconst VERSION_1: u8 = 0x01;\n\n\n\n// +----+---------+\n\n// | | MAGIC |\n\n// | +---------+\n\n// | H | VERSION |\n\n// | E +---------+\n\n// | A | SALT |\n\n// | D +---------+\n\n// | | SIZE |\n\n// +----+---------+\n\nconst VERSION_2: u8 = 0x02;\n\n\n\n#[derive(Clone)]\n\npub struct FileCrypt<'a> {\n\n secret: &'a [u8],\n\n buffer: Vec<u8>,\n\n}\n\n\n", "file_path": "src/file.rs", "rank": 39, "score": 9.236274434130896 }, { "content": "extern crate ansi_term;\n\nextern crate byteorder;\n\nextern crate crossbeam;\n\nextern crate docopt;\n\nextern crate env_logger;\n\nextern crate glob;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate num_cpus;\n\nextern crate ring;\n\nextern crate rpassword;\n\nextern crate scoped_threadpool;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate time;\n\nextern crate walkdir;\n\n\n\nmod crypto;\n\nmod file;\n\nmod task;\n\nmod util;\n\nmod cli;\n\n\n\npub use cli::command;\n\npub use util::init_logger;\n\n\n\npub const VERSION: &str = \"1.0\";\n", "file_path": "src/lib.rs", "rank": 40, "score": 8.93046816497224 }, { "content": "use std::io;\n\nuse std::path::{PathBuf, MAIN_SEPARATOR};\n\n\n\nuse docopt::Docopt;\n\nuse glob;\n\nuse rpassword;\n\nuse walkdir::{DirEntry, WalkDir};\n\n\n\nuse super::VERSION;\n\nuse super::task::{Mode, Task, TaskRuner};\n\nuse super::util::{expand_tilde_path, io_error};\n\n\n\nconst USAGE: &str = \"\n\nEakio, encrypt your file.\n\n\n\nUsage:\n\n eakio encrypt <src>... <dest> [-n] [--skip | --overwrite] [--hidden] [--parallel=<N>]\n\n eakio decrypt <src>... <dest> [-n] [--skip | --overwrite] [--hidden] [--parallel=<N>]\n\n eakio (-h | --help)\n\n eakio (-v | --version)\n", "file_path": "src/cli.rs", "rank": 41, "score": 8.708385389386038 }, { "content": " return Err(io_error(&format!(\n\n \"multiple files dest must a dir, '{}' need endswith '{}'\",\n\n dest.display(),\n\n MAIN_SEPARATOR\n\n )));\n\n }\n\n\n\n let tasks = build_tasks(&files, &dest, dest_is_dir);\n\n\n\n let secret = input_password()?.into_bytes();\n\n let mut runer = TaskRuner::new(\n\n &secret,\n\n mode,\n\n args.flag_skip,\n\n args.flag_overwrite,\n\n args.flag_dryrun,\n\n );\n\n\n\n if args.flag_parallel == 0 {\n\n runer.simple_run(&tasks);\n\n } else {\n\n runer.parallel_run(&tasks, args.flag_parallel);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 42, "score": 8.698196048588304 }, { "content": " }\n\n\n\n fn run_task(&mut self, index: usize, total: usize, task: &Task) {\n\n if self.dry_run {\n\n info!(\"({}/{}) {}: {} (dry run)\", index, total, self.mode, task);\n\n return;\n\n }\n\n match self.do_task(task) {\n\n Ok(()) => info!(\"({}/{}) {}: {} (success)\", index, total, self.mode, task),\n\n Err(e) => {\n\n if let Error::Io(_) = e {\n\n if task.dest.is_file() {\n\n fs::remove_file(&task.dest).unwrap();\n\n }\n\n }\n\n error!(\"({}/{}) {}: {} ({})\", index, total, self.mode, task, e)\n\n }\n\n };\n\n }\n\n\n", "file_path": "src/task.rs", "rank": 43, "score": 8.450845193731494 }, { "content": "\n\n let mut buf = [0u8; BLOCK_SIZE * 2];\n\n\n\n // in size = 0\n\n let in_size = 0;\n\n let out_size = crypto.encrypt(&mut buf[..], in_size).unwrap();\n\n assert_eq!(out_size, crypto_data_size(in_size));\n\n\n\n // in size < BLOCK_SIZE\n\n let in_size1 = BLOCK_SIZE - 3;\n\n let out_size1 = crypto.encrypt(&mut buf[..], in_size1).unwrap();\n\n assert_eq!(out_size1, crypto_data_size(in_size1));\n\n\n\n // in size = BLOCK_SIZE\n\n let in_size2 = BLOCK_SIZE;\n\n let out_size2 = crypto.encrypt(&mut buf[..], in_size2).unwrap();\n\n assert_eq!(out_size2, crypto_data_size(in_size2));\n\n\n\n // BLOCK_SIZE < in size < 2 * BLOCK_SIZE\n\n let in_size3 = in_size1 + in_size2;\n\n assert_eq!(out_size1 + out_size2, crypto_data_size(in_size3));\n\n\n\n // in size = n * BLOCK_SIZE\n\n let in_size4 = 7 * in_size2;\n\n assert_eq!(7 * out_size2, crypto_data_size(in_size4));\n\n }\n\n}\n", "file_path": "src/file.rs", "rank": 44, "score": 8.15218369663253 }, { "content": " });\n\n } else if path.is_dir() {\n\n for entry in WalkDir::new(&path)\n\n .into_iter()\n\n .filter_entry(|e| hidden || !is_hidden(e))\n\n {\n\n let de = entry.map_err(|e| io_error(&format!(\"{}\", e)))?;\n\n if de.file_type().is_file() {\n\n subs.push(de.path().to_path_buf());\n\n }\n\n }\n\n\n\n path_groups.push(PathGroup {\n\n path,\n\n is_file: false,\n\n subs,\n\n });\n\n }\n\n }\n\n\n\n Ok(path_groups)\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 45, "score": 7.380998534699531 }, { "content": "}\n\n\n\n#[derive(Clone)]\n\npub struct TaskRuner<'a> {\n\n mode: Mode,\n\n skip_exists: bool,\n\n overwrite: bool,\n\n dry_run: bool,\n\n file_crypt: FileCrypt<'a>,\n\n}\n\n\n\nimpl<'a> TaskRuner<'a> {\n\n pub fn new(\n\n secret: &'a [u8],\n\n mode: Mode,\n\n skip_exists: bool,\n\n overwrite: bool,\n\n dry_run: bool,\n\n ) -> TaskRuner<'a> {\n\n TaskRuner {\n", "file_path": "src/task.rs", "rank": 46, "score": 6.93826875485734 }, { "content": " mode,\n\n skip_exists,\n\n overwrite,\n\n dry_run,\n\n file_crypt: FileCrypt::new(secret),\n\n }\n\n }\n\n\n\n pub fn simple_run(&mut self, tasks: &[Task]) {\n\n let total = tasks.len();\n\n for (index, task) in tasks.iter().enumerate() {\n\n self.run_task(index + 1, total, task);\n\n }\n\n }\n\n\n\n pub fn parallel_run(&mut self, tasks: &[Task], parallel: i32) {\n\n let num_threads = if parallel > 0 {\n\n parallel as u32\n\n } else {\n\n num_cpus::get() as u32\n", "file_path": "src/task.rs", "rank": 47, "score": 6.630357381071717 }, { "content": "use std::fmt;\n\nuse std::fs;\n\nuse std::io;\n\nuse std::path::PathBuf;\n\nuse std::result;\n\nuse std::sync::Arc;\n\n\n\nuse crossbeam::sync::MsQueue;\n\nuse num_cpus;\n\nuse scoped_threadpool;\n\n\n\nuse super::file::FileCrypt;\n\n\n", "file_path": "src/task.rs", "rank": 48, "score": 5.8151489571709885 }, { "content": " };\n\n\n\n let cache = MsQueue::<Self>::new();\n\n for _ in 0..num_threads {\n\n cache.push(self.clone());\n\n }\n\n let cache = Arc::new(cache);\n\n\n\n let mut pool = scoped_threadpool::Pool::new(num_threads);\n\n pool.scoped(|scoped| {\n\n let total = tasks.len();\n\n for (index, task) in tasks.iter().enumerate() {\n\n let cache = Arc::clone(&cache);\n\n scoped.execute(move || {\n\n let mut this = cache.pop();\n\n this.run_task(index + 1, total, task);\n\n cache.push(this);\n\n });\n\n }\n\n });\n", "file_path": "src/task.rs", "rank": 49, "score": 5.783281705767173 }, { "content": "use std::borrow::Cow;\n\nuse std::env;\n\nuse std::fmt;\n\nuse std::io;\n\n\n\nuse ansi_term::Color;\n\nuse env_logger::LogBuilder;\n\nuse log::{LogLevel, LogLevelFilter, LogRecord};\n\nuse time;\n\n\n", "file_path": "src/util.rs", "rank": 50, "score": 4.508668658446378 }, { "content": "\n\nOptions:\n\n -h --help Show this screen.\n\n -v --version Show version.\n\n -n --dryrun Only show what should be do.\n\n --skip Skip exists dest file.\n\n --overwrite Overwrite exists dest file.\n\n --hidden Include hidden files.\n\n --parallel=<N> Parallel run, -1 use cpu count.\n\n\";\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/cli.rs", "rank": 51, "score": 2.9188770132280233 }, { "content": "extern crate eakio;\n\n\n\nuse std::process;\n\n\n", "file_path": "src/main.rs", "rank": 52, "score": 2.3763691289469246 }, { "content": "## Eakio\n\n\n\n[![Build Status](https://travis-ci.org/SerhoLiu/eakio.svg?branch=develop)](https://travis-ci.org/SerhoLiu/eakio)\n\n[![Build status](https://ci.appveyor.com/api/projects/status/wjno5p5kj1dctwv6?svg=true)](https://ci.appveyor.com/project/SerhoLiu/eakio)\n\n\n\nEncrypt your data.\n\n\n\n## License\n\n\n\nMIT LICENSE, see MIT-LICENSE.txt\n\n\n\n## Warning\n\n\n\nPlease Remember Your Secret\n", "file_path": "README.md", "rank": 53, "score": 2.190290707942134 } ]
Rust
artichoke-backend/src/extn/core/math/mruby.rs
Talljoe/artichoke
36ed5eba078a9fbf3cb4d5c8f7407d0a773d2d6e
use crate::extn::core::math; use crate::extn::prelude::*; pub fn init(interp: &mut Artichoke) -> InitializeResult<()> { if interp.is_module_defined::<math::Math>() { return Ok(()); } let spec = module::Spec::new(interp, "Math", None)?; module::Builder::for_spec(interp, &spec) .add_module_method("acos", artichoke_math_acos, sys::mrb_args_req(1))? .add_module_method("acosh", artichoke_math_acosh, sys::mrb_args_req(1))? .add_module_method("asin", artichoke_math_asin, sys::mrb_args_req(1))? .add_module_method("asinh", artichoke_math_asinh, sys::mrb_args_req(1))? .add_module_method("atan", artichoke_math_atan, sys::mrb_args_req(1))? .add_module_method("atan2", artichoke_math_atan2, sys::mrb_args_req(2))? .add_module_method("atanh", artichoke_math_atanh, sys::mrb_args_req(1))? .add_module_method("cbrt", artichoke_math_cbrt, sys::mrb_args_req(1))? .add_module_method("cos", artichoke_math_cos, sys::mrb_args_req(1))? .add_module_method("cosh", artichoke_math_cosh, sys::mrb_args_req(1))? .add_module_method("erf", artichoke_math_erf, sys::mrb_args_req(1))? .add_module_method("erfc", artichoke_math_erfc, sys::mrb_args_req(1))? .add_module_method("exp", artichoke_math_exp, sys::mrb_args_req(1))? .add_module_method("frexp", artichoke_math_frexp, sys::mrb_args_req(1))? .add_module_method("gamma", artichoke_math_gamma, sys::mrb_args_req(1))? .add_module_method("hypot", artichoke_math_hypot, sys::mrb_args_req(2))? .add_module_method("ldexp", artichoke_math_ldexp, sys::mrb_args_req(2))? .add_module_method("lgamma", artichoke_math_lgamma, sys::mrb_args_req(1))? .add_module_method("log", artichoke_math_log, sys::mrb_args_req_and_opt(1, 1))? .add_module_method("log10", artichoke_math_log10, sys::mrb_args_req(1))? .add_module_method("log2", artichoke_math_log2, sys::mrb_args_req(1))? .add_module_method("sin", artichoke_math_sin, sys::mrb_args_req(1))? .add_module_method("sinh", artichoke_math_sinh, sys::mrb_args_req(1))? .add_module_method("sqrt", artichoke_math_sqrt, sys::mrb_args_req(1))? .add_module_method("tan", artichoke_math_tan, sys::mrb_args_req(1))? .add_module_method("tanh", artichoke_math_tanh, sys::mrb_args_req(1))? .define()?; let domainerror = class::Spec::new("DomainError", Some(EnclosingRubyScope::module(&spec)), None)?; class::Builder::for_spec(interp, &domainerror) .with_super_class::<StandardError, _>("StandardError")? .define()?; interp.def_class::<math::DomainError>(domainerror)?; interp.def_module::<math::Math>(spec)?; let e = interp.convert_mut(math::E); interp.define_module_constant::<math::Math>("E", e)?; let pi = interp.convert_mut(math::PI); interp.define_module_constant::<math::Math>("PI", pi)?; Ok(()) } unsafe extern "C" fn artichoke_math_acos( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::acos(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_acosh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::acosh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_asin( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::asin(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_asinh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::asinh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atan( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::atan(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atan2( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, other) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let other = Value::from(other); let result = math::atan2(&mut guard, value, other).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atanh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::atanh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_cbrt( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cbrt(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_cos( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cos(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_cosh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cosh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_erf( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::erf(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_erfc( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::erfc(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_exp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::exp(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_frexp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::frexp(&mut guard, value).and_then(|(fraction, exponent)| { let fraction = guard.convert_mut(fraction); let exponent = guard.convert(exponent); guard.try_convert_mut(&[fraction, exponent][..]) }); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_gamma( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::gamma(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_hypot( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, other) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let other = Value::from(other); let result = math::hypot(&mut guard, value, other).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_ldexp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (fraction, exponent) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let fraction = Value::from(fraction); let exponent = Value::from(exponent); let result = math::ldexp(&mut guard, fraction, exponent).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_lgamma( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::lgamma(&mut guard, value).and_then(|(result, sign)| { let result = guard.convert_mut(result); let sign = guard.convert(sign); guard.try_convert_mut(&[result, sign][..]) }); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, base) = mrb_get_args!(mrb, required = 1, optional = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let base = base.map(Value::from); let result = math::log(&mut guard, value, base).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log10( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::log10(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log2( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::log2(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sin( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sin(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sinh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sinh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sqrt( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sqrt(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_tan( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::tan(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_tanh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::tanh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } }
use crate::extn::core::math; use crate::extn::prelude::*; pub fn init(interp: &mut Artichoke) -> InitializeResult<()> { if interp.is_module_defined::<math::Math>() { return Ok(()); } let spec = module::Spec::new(interp, "Math", None)?; module::Builder::for_spec(interp, &spec) .add_module_method("acos", artichoke_math_acos, sys::mrb_args_req(1))? .add_module_method("acosh", artichoke_math_acosh, sys::mrb_args_req(1))? .add_module_method("asin", artichoke_math_asin, sys::mrb_args_req(1))? .add_module_method("asinh", artichoke_math_asinh, sys::mrb_args_req(1))? .add_module_method("atan", artichoke_math_atan, sys::mrb_args_req(1))? .add_module_method("atan2", artichoke_math_atan2, sys::mrb_args_req(2))? .add_module_method("atanh", artichoke_math_atanh, sys::mrb_args_req(1))? .add_module_method("cbrt", artichoke_math_cbrt, sys::mrb_args_req(1))? .add_module_method("cos", artichoke_math_cos, sys::mrb_args_req(1))? .add_module_method("cosh", artichoke_math_cosh, sys::mrb_args_req(1))? .add_module_method("erf", artichoke_math_erf, sys::mrb_args_req(1))? .add_module_method("erfc", artichoke_math_erfc, sys::mrb_args_req(1))? .add_module_method("exp", artichoke_math_exp, sys::mrb_args_req(1))? .add_module_method("frexp", artichoke_math_frexp, sys::mrb_args_req(1))? .add_module_method("gamma", artichoke_math_gamma, sys::mrb_args_req(1))? .add_module_method("hypot", artichoke_math_hypot, sys::mrb_args_req(2))? .add_module_method("ldexp", artichoke_math_ldexp, sys::mrb_args_req(2))? .add_module_method("lgamma", artichoke_math_lgamma, sys::mrb_args_req(1))? .add_module_method("log", artichoke_math_log, sys::mrb_args_req_and_opt(1, 1))? .add_module_method("log10", artichoke_math_log10, sys::mrb_args_req(1))? .add_module_method("log2", artichoke_math_log2, sys::mrb_args_req(1))? .add_module_method("sin", artichoke_math_sin, sys::mrb_args_req(1))? .add_module_method("sinh", artichoke_math_sinh, sys::mrb_args_req(1))? .add_module_method("sqrt", artichoke_math_sqrt, sys::mrb_args_req(1))? .add_module_method("tan", artichoke_math_tan, sys::mrb_args_req(1))? .add_module_method("tanh", artichoke_math_tanh, sys::mrb_args_req(1))? .define()?; let domainerror = class::Spec::new("DomainError", Some(EnclosingRubyScope::module(&spec)), None)?; class::Builder::for_spec(interp, &domainerror) .with_super_class::<StandardError, _>("StandardError")? .define()?; interp.def_class::<math::DomainError>(domainerror)?; interp.def_module::<math::Math>(spec)?; let e = interp.convert_mut(math::E); interp.define_module_constant::<math::Math>("E", e)?; let pi = interp.convert_mut(math::PI); interp.define_module_constant::<math::Math>("PI", pi)?; Ok(()) } unsafe extern "C" fn artichoke_math_acos( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::acos(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_acosh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::acosh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_asin( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::asin(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_asinh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::asinh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atan( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::atan(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atan2( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, other) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let other = Value::from(other); let result = math::atan2(&mut guard, value, other).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_atanh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::atanh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_cbrt( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cbrt(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } }
unsafe extern "C" fn artichoke_math_cosh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cosh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_erf( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::erf(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_erfc( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::erfc(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_exp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::exp(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_frexp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::frexp(&mut guard, value).and_then(|(fraction, exponent)| { let fraction = guard.convert_mut(fraction); let exponent = guard.convert(exponent); guard.try_convert_mut(&[fraction, exponent][..]) }); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_gamma( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::gamma(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_hypot( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, other) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let other = Value::from(other); let result = math::hypot(&mut guard, value, other).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_ldexp( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (fraction, exponent) = mrb_get_args!(mrb, required = 2); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let fraction = Value::from(fraction); let exponent = Value::from(exponent); let result = math::ldexp(&mut guard, fraction, exponent).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_lgamma( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::lgamma(&mut guard, value).and_then(|(result, sign)| { let result = guard.convert_mut(result); let sign = guard.convert(sign); guard.try_convert_mut(&[result, sign][..]) }); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let (value, base) = mrb_get_args!(mrb, required = 1, optional = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let base = base.map(Value::from); let result = math::log(&mut guard, value, base).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log10( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::log10(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_log2( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::log2(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sin( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sin(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sinh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sinh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_sqrt( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::sqrt(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_tan( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::tan(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } } unsafe extern "C" fn artichoke_math_tanh( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::tanh(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } }
unsafe extern "C" fn artichoke_math_cos( mrb: *mut sys::mrb_state, _slf: sys::mrb_value, ) -> sys::mrb_value { let value = mrb_get_args!(mrb, required = 1); let mut interp = unwrap_interpreter!(mrb); let mut guard = Guard::new(&mut interp); let value = Value::from(value); let result = math::cos(&mut guard, value).map(|result| guard.convert_mut(result)); match result { Ok(value) => value.inner(), Err(exception) => exception::raise(guard, exception), } }
function_block-full_function
[ { "content": "pub fn post_match(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let post = data.post();\n\n Ok(interp.convert_mut(post))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 0, "score": 501623.0496868548 }, { "content": "pub fn pre_match(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let pre = data.pre();\n\n Ok(interp.convert_mut(pre))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 1, "score": 501623.04968685494 }, { "content": "pub fn require(interp: &mut Artichoke, path: Value) -> Result<Value, Exception> {\n\n let success = kernel::require::require(interp, path, None)?;\n\n Ok(interp.convert(success))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/kernel/trampoline.rs", "rank": 2, "score": 487668.4160726309 }, { "content": "#[cfg(not(feature = \"core-math-extra\"))]\n\npub fn gamma(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let _ = interp;\n\n let _ = value;\n\n Err(Exception::from(NotImplementedError::from(\n\n \"enable 'core-math-extra' feature when building Artichoke\",\n\n )))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 3, "score": 487553.9256152389 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn erf(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = libm::erf(value);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 4, "score": 487553.92561523884 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn erfc(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = libm::erfc(value);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 5, "score": 487553.9256152389 }, { "content": "pub fn asinh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.asinh();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 6, "score": 487548.66679579625 }, { "content": "pub fn tan(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.tan();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 7, "score": 487548.6667957963 }, { "content": "pub fn log10(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.log10();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"log10\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 8, "score": 487548.66679579625 }, { "content": "pub fn asin(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.asin();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"asin\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 9, "score": 487548.66679579625 }, { "content": "pub fn atanh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.atanh();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"atanh\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 10, "score": 487548.66679579625 }, { "content": "pub fn cos(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.cos();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 11, "score": 487548.66679579625 }, { "content": "pub fn sqrt(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.sqrt();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"sqrt\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 12, "score": 487548.66679579625 }, { "content": "pub fn atan(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.atan();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 13, "score": 487548.66679579625 }, { "content": "pub fn exp(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.exp();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 14, "score": 487548.66679579625 }, { "content": "pub fn sin(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.sin();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 15, "score": 487548.6667957963 }, { "content": "pub fn acos(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.acos();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"acos\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 16, "score": 487548.66679579625 }, { "content": "pub fn log2(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.log2();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"log2\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 17, "score": 487548.6667957963 }, { "content": "pub fn cbrt(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.cbrt();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 18, "score": 487548.66679579625 }, { "content": "pub fn acosh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.acosh();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"acosh\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 19, "score": 487548.6667957963 }, { "content": "pub fn sinh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.sinh();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 20, "score": 487548.66679579625 }, { "content": "pub fn cosh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.cosh();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 21, "score": 487548.6667957963 }, { "content": "pub fn tanh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.tanh();\n\n Ok(result)\n\n}\n\n\n\n#[derive(Default, Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct DomainError(Cow<'static, str>);\n\n\n\nimpl From<String> for DomainError {\n\n fn from(message: String) -> Self {\n\n Self(message.into())\n\n }\n\n}\n\n\n\nimpl From<&'static str> for DomainError {\n\n fn from(message: &'static str) -> Self {\n\n Self(message.into())\n\n }\n\n}\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 22, "score": 487548.6667957963 }, { "content": "/// Load ruby/spec sources into the Artichoke virtual filesystem.\n\n///\n\n/// # Errors\n\n///\n\n/// If an exception is raised on the Artichoke interpreter, it is returned.\n\npub fn init(interp: &mut Artichoke) -> Result<(), Exception> {\n\n for source in Specs::iter() {\n\n if let Some(content) = Specs::get(&source) {\n\n interp.def_rb_source_file(source.as_ref(), content)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n/// ruby/spec source code.\n\n#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, RustEmbed)]\n\n#[folder = \"vendor/spec\"]\n\npub struct Specs;\n", "file_path": "spec-runner/src/rubyspec.rs", "rank": 23, "score": 483797.67943492695 }, { "content": "/// Load `MSpec` sources into the Artichoke virtual filesystem.\n\n///\n\n/// # Errors\n\n///\n\n/// If an exception is raised on the Artichoke interpreter, it is returned.\n\npub fn init(interp: &mut Artichoke) -> Result<(), Exception> {\n\n for source in Sources::iter() {\n\n if let Some(content) = Sources::get(&source) {\n\n interp.def_rb_source_file(source.as_ref(), content)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n/// `MSpec` source code.\n\n#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, RustEmbed)]\n\n#[folder = \"vendor/mspec/lib\"]\n\npub struct Sources;\n\n\n", "file_path": "spec-runner/src/mspec.rs", "rank": 24, "score": 483797.67943492695 }, { "content": "pub fn require_relative(interp: &mut Artichoke, path: Value) -> Result<Value, Exception> {\n\n let relative_base = RelativePath::try_from_interp(interp)?;\n\n let success = kernel::require::require(interp, path, Some(relative_base))?;\n\n Ok(interp.convert(success))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/kernel/trampoline.rs", "rank": 25, "score": 481961.7203105424 }, { "content": "pub fn atan2(interp: &mut Artichoke, value: Value, other: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let other = value_to_float(interp, other)?;\n\n let result = value.atan2(other);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 26, "score": 477345.04055893223 }, { "content": "pub fn hypot(interp: &mut Artichoke, value: Value, other: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let other = value_to_float(interp, other)?;\n\n let result = value.hypot(other);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 27, "score": 477345.04055893223 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn frexp(interp: &mut Artichoke, value: Value) -> Result<(Fp, Int), Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let (fraction, exponent) = libm::frexp(value);\n\n Ok((fraction, exponent.into()))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 28, "score": 476181.92091057095 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn lgamma(interp: &mut Artichoke, value: Value) -> Result<(Fp, Int), Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_infinite() && value.is_sign_negative() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"lgamma\"\"#).into())\n\n } else {\n\n let (result, sign) = libm::lgamma_r(value);\n\n Ok((result, Int::from(sign)))\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 29, "score": 476181.9209105709 }, { "content": "pub fn load(interp: &mut Artichoke, filename: Value) -> Result<bool, Exception> {\n\n let filename = filename.implicitly_convert_to_string(interp)?;\n\n if filename.find_byte(b'\\0').is_some() {\n\n return Err(ArgumentError::from(\"path name contains null byte\").into());\n\n }\n\n let file = ffi::bytes_to_os_str(filename)?;\n\n let pathbuf;\n\n let mut path = Path::new(file);\n\n if path.is_relative() {\n\n pathbuf = Path::new(RUBY_LOAD_PATH).join(file);\n\n path = pathbuf.as_path();\n\n }\n\n if !interp.source_is_file(path)? {\n\n let mut message = b\"cannot load such file -- \".to_vec();\n\n message.extend_from_slice(filename);\n\n return Err(LoadError::from(message).into());\n\n }\n\n let context = Context::new(ffi::os_str_to_bytes(path.as_os_str())?.to_vec())\n\n .ok_or_else(|| ArgumentError::from(\"path name contains null byte\"))?;\n\n interp.push_context(context)?;\n\n let result = interp.load_source(path);\n\n let _ = interp.pop_context()?;\n\n result\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/kernel/require.rs", "rank": 30, "score": 474979.98040166055 }, { "content": "pub fn to_s(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let display = data.to_s()?;\n\n Ok(interp.convert_mut(display))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 31, "score": 472552.9177341187 }, { "content": "pub fn to_a(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n if let Some(ary) = data.to_a()? {\n\n interp.try_convert_mut(ary)\n\n } else {\n\n Ok(Value::nil())\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 32, "score": 472552.9177341187 }, { "content": "pub fn regexp(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let regexp = data.regexp();\n\n // TODO(GH-614): MatchData#regexp needs to return an identical Regexp to the\n\n // one used to create the match (same object ID).\n\n //\n\n // The `Regexp::alloc_value` here should be replaced with\n\n // `Regexp::box_into_value`.\n\n //\n\n // See: https://github.com/ruby/spec/pull/727\n\n let regexp = Regexp::alloc_value(regexp.clone(), interp)?;\n\n Ok(regexp)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 33, "score": 467711.9540405075 }, { "content": "pub fn bytes(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let symbol = unsafe { Symbol::unbox_from_value(&mut value, interp)? };\n\n // These bytes must be cloned because they are owned by the interpreter.\n\n let bytes = symbol.bytes(interp).to_vec();\n\n Ok(interp.convert_mut(bytes))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/symbol/trampoline.rs", "rank": 34, "score": 467711.95404050755 }, { "content": "pub fn is_empty(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let symbol = unsafe { Symbol::unbox_from_value(&mut value, interp)? };\n\n let is_empty = symbol.is_empty(interp);\n\n Ok(interp.convert(is_empty))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/symbol/trampoline.rs", "rank": 35, "score": 467711.9540405075 }, { "content": "pub fn string(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let mut string = interp.convert_mut(data.string());\n\n string.freeze(interp)?;\n\n Ok(string)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 36, "score": 467711.95404050755 }, { "content": "pub fn length(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let len = data.len()?;\n\n if let Ok(len) = Int::try_from(len) {\n\n Ok(interp.convert(len))\n\n } else {\n\n Err(ArgumentError::from(\"input string too long\").into())\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 37, "score": 467711.95404050744 }, { "content": "pub fn length(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let symbol = unsafe { Symbol::unbox_from_value(&mut value, interp)? };\n\n let len = symbol.len(interp);\n\n interp.try_convert(len)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/symbol/trampoline.rs", "rank": 38, "score": 467711.95404050755 }, { "content": "pub fn names(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let names = data.names();\n\n interp.try_convert_mut(names)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 39, "score": 467711.9540405075 }, { "content": "pub fn captures(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n if let Some(captures) = data.captures()? {\n\n interp.try_convert_mut(captures)\n\n } else {\n\n Ok(Value::nil())\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 40, "score": 467711.9540405075 }, { "content": "pub fn to_h(interp: &mut Artichoke, mut environ: Value) -> Result<Value, Exception> {\n\n let environ = unsafe { Environ::unbox_from_value(&mut environ, interp) }?;\n\n let result = environ.to_map()?;\n\n Ok(interp.convert_mut(result))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/env/trampoline.rs", "rank": 41, "score": 466296.4394446551 }, { "content": "pub fn to_s(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let s = regexp.string();\n\n Ok(interp.convert_mut(s))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 42, "score": 466296.4394446551 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn ldexp(interp: &mut Artichoke, fraction: Value, exponent: Value) -> Result<Fp, Exception> {\n\n use std::convert::TryFrom;\n\n\n\n let fraction = value_to_float(interp, fraction)?;\n\n let exponent = exponent.implicitly_convert_to_int(interp).or_else(|err| {\n\n if let Ok(exponent) = exponent.try_into::<Fp>(interp) {\n\n if exponent.is_nan() {\n\n Err(RangeError::from(\"float NaN out of range of integer\").into())\n\n } else {\n\n // TODO: use `approx_unchecked_to` once stabilized.\n\n #[allow(clippy::cast_possible_truncation)]\n\n Ok(exponent as Int)\n\n }\n\n } else {\n\n Err(Exception::from(err))\n\n }\n\n })?;\n\n if let Ok(exponent) = i32::try_from(exponent) {\n\n Ok(libm::ldexp(fraction, exponent))\n\n } else if exponent < 0 {\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 43, "score": 465474.88764255773 }, { "content": "pub fn named_captures(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let named_captures = data.named_captures()?;\n\n interp.try_convert_mut(named_captures)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 44, "score": 463015.3444205938 }, { "content": "/// Transform a `Exception` Ruby `Value` into an [`Exception`].\n\n///\n\n/// # Errors\n\n///\n\n/// This function makes funcalls on the interpreter which are fallible.\n\npub fn last_error(interp: &mut Artichoke, exception: Value) -> Result<Exception, Exception> {\n\n let mut arena = interp.create_arena_savepoint();\n\n // Clear the current exception from the mruby interpreter so subsequent\n\n // calls to the mruby VM are not tainted by an error they did not\n\n // generate.\n\n //\n\n // We must clear the pointer at the beginning of this function so we can\n\n // use the mruby VM to inspect the exception once we turn it into an\n\n // `mrb_value`. `Value::funcall` handles errors by calling this\n\n // function, so not clearing the exception results in a stack overflow.\n\n\n\n // Generate exception metadata in by executing the Ruby code:\n\n //\n\n // ```ruby\n\n // clazz = exception.class.name\n\n // message = exception.message\n\n // ```\n\n\n\n // Sometimes when hacking on extn/core it is possible to enter a\n\n // crash loop where an exception is captured by this handler, but\n", "file_path": "artichoke-backend/src/exception_handler.rs", "rank": 45, "score": 462736.2261375355 }, { "content": "pub fn log(interp: &mut Artichoke, value: Value, base: Option<Value>) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = if let Some(base) = base {\n\n let base = value_to_float(interp, base)?;\n\n if base.is_nan() {\n\n return Ok(base);\n\n }\n\n value.log(base)\n\n } else {\n\n value.ln()\n\n };\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"log\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 46, "score": 461890.03725721553 }, { "content": "pub fn day(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let day = time.inner().day();\n\n let result = interp.convert(day);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 47, "score": 461359.26958341117 }, { "content": "pub fn clear(interp: &mut Artichoke, mut ary: Value) -> Result<Value, Exception> {\n\n if ary.is_frozen(interp) {\n\n return Err(FrozenError::from(\"can't modify frozen Array\").into());\n\n }\n\n let mut array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n array.clear();\n\n Ok(ary)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 48, "score": 461359.2695834112 }, { "content": "pub fn inspect(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let inspect = regexp.inspect();\n\n Ok(interp.convert_mut(inspect))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 49, "score": 461359.2695834112 }, { "content": "pub fn hash(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let hash = regexp.hash();\n\n #[allow(clippy::cast_possible_wrap)]\n\n Ok(interp.convert(hash as Int))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 50, "score": 461359.2695834112 }, { "content": "pub fn nanosecond(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let nanosecond = time.inner().nanosecond();\n\n let result = interp.convert(nanosecond);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 51, "score": 461359.2695834112 }, { "content": "pub fn minute(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let minute = time.inner().minute();\n\n let result = interp.convert(minute);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 52, "score": 461359.2695834112 }, { "content": "pub fn hour(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let hour = time.inner().hour();\n\n let result = interp.convert(hour);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 53, "score": 461359.2695834112 }, { "content": "pub fn month(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let month = time.inner().month();\n\n let result = interp.convert(month);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 54, "score": 461359.2695834113 }, { "content": "pub fn pop(interp: &mut Artichoke, mut ary: Value) -> Result<Value, Exception> {\n\n if ary.is_frozen(interp) {\n\n return Err(FrozenError::from(\"can't modify frozen Array\").into());\n\n }\n\n let mut array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n let result = array.pop();\n\n Ok(interp.convert(result))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 55, "score": 461359.26958341117 }, { "content": "pub fn names(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let names = regexp.names();\n\n interp.try_convert_mut(names)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 56, "score": 461359.2695834112 }, { "content": "pub fn options(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let opts = regexp.options();\n\n Ok(interp.convert(opts))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 57, "score": 461359.2695834112 }, { "content": "pub fn is_casefold(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let is_casefold = regexp.is_casefold();\n\n Ok(interp.convert(is_casefold))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 58, "score": 461359.26958341117 }, { "content": "pub fn microsecond(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let microsecond = time.inner().microsecond();\n\n let result = interp.convert(microsecond);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 59, "score": 461359.2695834112 }, { "content": "pub fn source(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let source = regexp.source();\n\n Ok(interp.convert_mut(source))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 60, "score": 461359.2695834112 }, { "content": "pub fn year(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let year = time.inner().year();\n\n let result = interp.convert(year);\n\n Ok(result)\n\n}\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 61, "score": 461359.2695834112 }, { "content": "pub fn second(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let second = time.inner().second();\n\n let result = interp.convert(second);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 62, "score": 461359.2695834113 }, { "content": "pub fn weekday(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let weekday = time.inner().weekday();\n\n let result = interp.convert(weekday);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 63, "score": 461359.2695834112 }, { "content": "pub fn seed(interp: &mut Artichoke, mut rand: Value) -> Result<Value, Exception> {\n\n let rand = unsafe { Random::unbox_from_value(&mut rand, interp)? };\n\n let seed = rand.seed(interp)?;\n\n Ok(interp.convert(seed))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 64, "score": 461359.2695834112 }, { "content": "pub fn offset(interp: &mut Artichoke, mut value: Value, mut at: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let capture = match interp.try_convert_mut(&mut at)? {\n\n CaptureExtract::GroupIndex(idx) => Capture::GroupIndex(idx),\n\n CaptureExtract::GroupName(name) => Capture::GroupName(name),\n\n CaptureExtract::Symbol(symbol) => Capture::GroupName(symbol.bytes(interp)),\n\n };\n\n if let Some([begin, end]) = data.offset(capture)? {\n\n if let (Ok(begin), Ok(end)) = (Int::try_from(begin), Int::try_from(end)) {\n\n let ary = Array::assoc(interp.convert(begin), interp.convert(end));\n\n Array::alloc_value(ary, interp)\n\n } else {\n\n Err(ArgumentError::from(\"input string too long\").into())\n\n }\n\n } else {\n\n let ary = Array::assoc(Value::nil(), Value::nil());\n\n Array::alloc_value(ary, interp)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 65, "score": 461145.2886748499 }, { "content": "pub fn end(interp: &mut Artichoke, mut value: Value, mut at: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let capture = match interp.try_convert_mut(&mut at)? {\n\n CaptureExtract::GroupIndex(idx) => Capture::GroupIndex(idx),\n\n CaptureExtract::GroupName(name) => Capture::GroupName(name),\n\n CaptureExtract::Symbol(symbol) => Capture::GroupName(symbol.bytes(interp)),\n\n };\n\n let end = data.end(capture)?;\n\n match end.map(Int::try_from) {\n\n Some(Ok(end)) => Ok(interp.convert(end)),\n\n Some(Err(_)) => Err(ArgumentError::from(\"input string too long\").into()),\n\n None => Ok(Value::nil()),\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 66, "score": 461145.2886748499 }, { "content": "pub fn begin(interp: &mut Artichoke, mut value: Value, mut at: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let capture = match interp.try_convert_mut(&mut at)? {\n\n CaptureExtract::GroupIndex(idx) => Capture::GroupIndex(idx),\n\n CaptureExtract::GroupName(name) => Capture::GroupName(name),\n\n CaptureExtract::Symbol(symbol) => Capture::GroupName(symbol.bytes(interp)),\n\n };\n\n let begin = data.begin(capture)?;\n\n match begin.map(Int::try_from) {\n\n Some(Ok(begin)) => Ok(interp.convert(begin)),\n\n Some(Err(_)) => Err(ArgumentError::from(\"input string too long\").into()),\n\n None => Ok(Value::nil()),\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 67, "score": 461145.28867484984 }, { "content": "pub fn all_symbols(interp: &mut Artichoke) -> Result<Value, Exception> {\n\n let all_symbols = Symbol::all_symbols(interp)?;\n\n Array::alloc_value(all_symbols, interp)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/symbol/trampoline.rs", "rank": 68, "score": 458823.23871841107 }, { "content": "#[inline]\n\npub fn uuid(interp: &mut Artichoke) -> Result<Value, Exception> {\n\n let uuid = securerandom::uuid();\n\n Ok(interp.convert_mut(uuid))\n\n}\n", "file_path": "artichoke-backend/src/extn/stdlib/securerandom/trampoline.rs", "rank": 69, "score": 458823.2387184112 }, { "content": "pub fn now(interp: &mut Artichoke) -> Result<Value, Exception> {\n\n let now = Time::now();\n\n let result = Time::alloc_value(now, interp)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 70, "score": 458823.23871841107 }, { "content": "pub fn is_fixed_encoding(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let is_fixed_encoding = regexp.is_fixed_encoding();\n\n Ok(interp.convert(is_fixed_encoding))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 71, "score": 456571.4271348963 }, { "content": "pub fn named_captures(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let named_captures = regexp.named_captures()?;\n\n interp.try_convert_mut(named_captures)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 72, "score": 456571.4271348963 }, { "content": "pub fn reverse_bang(interp: &mut Artichoke, mut ary: Value) -> Result<Value, Exception> {\n\n if ary.is_frozen(interp) {\n\n return Err(FrozenError::from(\"can't modify frozen Array\").into());\n\n }\n\n let mut array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n array.reverse();\n\n Ok(ary)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 73, "score": 456571.4271348963 }, { "content": "pub fn year_day(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let year_day = time.inner().year_day();\n\n let result = interp.convert(year_day);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 74, "score": 456571.4271348963 }, { "content": "pub fn ord(interp: &mut Artichoke, value: Value) -> Result<Value, Exception> {\n\n let string = value.try_into_mut::<&[u8]>(interp)?;\n\n\n\n let ord = if let Some((start, end, ch)) = string.char_indices().next() {\n\n if ch == '\\u{FFFD}' {\n\n let slice = &string[start..end];\n\n match slice {\n\n [] => 0,\n\n [a] => u32::from_le_bytes([*a, 0, 0, 0]),\n\n [a, b] => u32::from_le_bytes([*a, *b, 0, 0]),\n\n [a, b, c] => u32::from_le_bytes([*a, *b, *c, 0]),\n\n [a, b, c, d] => u32::from_le_bytes([*a, *b, *c, *d]),\n\n _ => return Err(ArgumentError::from(\"Unicode out of range\").into()),\n\n }\n\n } else {\n\n // All `char`s are valid `u32`s\n\n // https://github.com/rust-lang/rust/blob/1.41.0/src/libcore/char/convert.rs#L12-L20\n\n ch as u32\n\n }\n\n } else {\n\n return Err(ArgumentError::from(\"empty string\").into());\n\n };\n\n Ok(interp.convert(ord))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/string/trampoline.rs", "rank": 75, "score": 453938.6400786502 }, { "content": "pub fn new_seed(interp: &mut Artichoke) -> Result<Value, Exception> {\n\n let seed = Random::new_seed();\n\n Ok(interp.convert(seed))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 76, "score": 453356.04612276563 }, { "content": "pub fn initialize(interp: &mut Artichoke, into: Value) -> Result<Value, Exception> {\n\n let environ = Environ::initialize();\n\n let result = Environ::box_into_value(environ, into, interp)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/env/trampoline.rs", "rank": 77, "score": 452869.5225548884 }, { "content": "pub fn equal(interp: &mut Artichoke, mut rand: Value, other: Value) -> Result<Value, Exception> {\n\n let rand = unsafe { Random::unbox_from_value(&mut rand, interp)? };\n\n let eql = rand.eql(interp, other)?;\n\n Ok(interp.convert(eql))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 78, "score": 452854.8707034724 }, { "content": "pub fn eql(interp: &mut Artichoke, mut regexp: Value, other: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let cmp = regexp.eql(interp, other);\n\n Ok(interp.convert(cmp))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 79, "score": 452854.8707034723 }, { "content": "pub fn push(interp: &mut Artichoke, mut ary: Value, value: Value) -> Result<Value, Exception> {\n\n if ary.is_frozen(interp) {\n\n return Err(FrozenError::from(\"can't modify frozen Array\").into());\n\n }\n\n let mut array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n array.push(value);\n\n Ok(ary)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 80, "score": 452238.7372482858 }, { "content": "pub fn len(interp: &mut Artichoke, mut ary: Value) -> Result<usize, Exception> {\n\n let array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n Ok(array.len())\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 81, "score": 448587.8049497788 }, { "content": "pub fn bytes(interp: &mut Artichoke, mut rand: Value, size: Value) -> Result<Value, Exception> {\n\n let mut rand = unsafe { Random::unbox_from_value(&mut rand, interp)? };\n\n let size = size.implicitly_convert_to_int(interp)?;\n\n let buf = rand.bytes(interp, size)?;\n\n Ok(interp.convert_mut(buf))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 82, "score": 448295.9863702013 }, { "content": "pub fn escape(interp: &mut Artichoke, pattern: Value) -> Result<Value, Exception> {\n\n let pattern = pattern.implicitly_convert_to_string(interp)?;\n\n let pattern = Regexp::escape(pattern)?;\n\n Ok(interp.convert_mut(pattern))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 83, "score": 447682.16178918665 }, { "content": "pub fn load(interp: &mut Artichoke, path: Value) -> Result<Value, Exception> {\n\n let success = kernel::require::load(interp, path)?;\n\n Ok(interp.convert(success))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/kernel/trampoline.rs", "rank": 84, "score": 447682.1617891867 }, { "content": "pub fn urandom(interp: &mut Artichoke, size: Value) -> Result<Value, Exception> {\n\n let size = size.implicitly_convert_to_int(interp)?;\n\n let buf = random::urandom(size)?;\n\n Ok(interp.convert_mut(buf))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 85, "score": 447682.16178918665 }, { "content": "/// Load the Artichoke `MSpec` entry point end execute the specs.\n\n///\n\n/// # Errors\n\n///\n\n/// If an exception is raised on the Artichoke interpreter, it is returned.\n\npub fn run<'a, T>(interp: &mut Artichoke, specs: T) -> Result<bool, Exception>\n\nwhere\n\n T: IntoIterator<Item = &'a str>,\n\n{\n\n interp.def_rb_source_file(\"/src/lib/spec_helper.rb\", &b\"\"[..])?;\n\n interp.def_rb_source_file(\n\n \"/src/lib/test/spec_runner\",\n\n &include_bytes!(\"spec_runner.rb\")[..],\n\n )?;\n\n interp.eval_file(Path::new(\"/src/lib/test/spec_runner\"))?;\n\n let specs = interp.try_convert_mut(specs.into_iter().collect::<Vec<_>>())?;\n\n let result = interp\n\n .top_self()\n\n .funcall(interp, \"run_specs\", &[specs], None)?;\n\n interp.try_convert(result)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n // TODO(GH-528): fix failing tests on Windows.\n\n #[cfg_attr(target_os = \"windows\", should_panic)]\n\n fn mspec_framework_loads() {\n\n let mut interp = artichoke::interpreter().unwrap();\n\n super::init(&mut interp).unwrap();\n\n // should not panic\n\n assert!(super::run(&mut interp, vec![]).unwrap());\n\n }\n\n}\n", "file_path": "spec-runner/src/mspec.rs", "rank": 86, "score": 440570.990472061 }, { "content": "pub fn init(interp: &mut Artichoke) -> InitializeResult<()> {\n\n let exception_spec = class::Spec::new(\"Exception\", None, None)?;\n\n class::Builder::for_spec(interp, &exception_spec).define()?;\n\n interp.def_class::<Exception>(exception_spec)?;\n\n\n\n let nomemory_spec = class::Spec::new(\"NoMemoryError\", None, None)?;\n\n class::Builder::for_spec(interp, &nomemory_spec)\n\n .with_super_class::<Exception, _>(\"Exception\")?\n\n .define()?;\n\n interp.def_class::<NoMemoryError>(nomemory_spec)?;\n\n\n\n let script_spec = class::Spec::new(\"ScriptError\", None, None)?;\n\n class::Builder::for_spec(interp, &script_spec)\n\n .with_super_class::<Exception, _>(\"Exception\")?\n\n .define()?;\n\n interp.def_class::<ScriptError>(script_spec)?;\n\n\n\n let load_spec = class::Spec::new(\"LoadError\", None, None)?;\n\n class::Builder::for_spec(interp, &load_spec)\n\n .with_super_class::<ScriptError, _>(\"ScriptError\")?\n", "file_path": "artichoke-backend/src/extn/core/exception/mod.rs", "rank": 88, "score": 439263.54148285603 }, { "content": "pub fn div(interp: &mut Artichoke, value: Value, denominator: Value) -> Result<Value, Exception> {\n\n let value = value.try_into::<Integer>(interp)?;\n\n let quotient = value.div(interp, denominator)?;\n\n Ok(interp.convert_mut(quotient))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/integer/trampoline.rs", "rank": 89, "score": 437265.54756759375 }, { "content": "pub fn srand(interp: &mut Artichoke, seed: Option<Value>) -> Result<Value, Exception> {\n\n let seed = interp.try_convert_mut(seed)?;\n\n let old_seed = random::srand(interp, seed)?;\n\n Ok(interp.convert(old_seed))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 90, "score": 436994.3403732732 }, { "content": "#[inline]\n\npub fn base64(interp: &mut Artichoke, len: Option<Value>) -> Result<Value, Exception> {\n\n let base64 = if let Some(len) = len {\n\n let len = len.implicitly_convert_to_int(interp)?;\n\n securerandom::base64(Some(len))?\n\n } else {\n\n securerandom::base64(None)?\n\n };\n\n Ok(interp.convert_mut(base64))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/stdlib/securerandom/trampoline.rs", "rank": 91, "score": 436994.3403732732 }, { "content": "#[inline]\n\npub fn hex(interp: &mut Artichoke, len: Option<Value>) -> Result<Value, Exception> {\n\n let hex = if let Some(len) = len {\n\n let len = len.implicitly_convert_to_int(interp)?;\n\n securerandom::hex(Some(len))?\n\n } else {\n\n securerandom::hex(None)?\n\n };\n\n Ok(interp.convert_mut(hex))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/stdlib/securerandom/trampoline.rs", "rank": 92, "score": 436994.3403732731 }, { "content": "#[inline]\n\npub fn alphanumeric(interp: &mut Artichoke, len: Option<Value>) -> Result<Value, Exception> {\n\n let alpha = if let Some(len) = len {\n\n let len = len.implicitly_convert_to_int(interp)?;\n\n securerandom::alphanumeric(Some(len))?\n\n } else {\n\n securerandom::alphanumeric(None)?\n\n };\n\n Ok(interp.convert_mut(alpha))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/stdlib/securerandom/trampoline.rs", "rank": 93, "score": 436994.3403732732 } ]
Rust
lumol-sim/src/output/custom.rs
Luthaf/lumol
3ef0809b421a574c3604e611372ef6644c251184
use std::error; use std::fmt; use std::fs::File; use std::io::{self, BufWriter}; use std::io::prelude::*; use std::path::{Path, PathBuf}; use caldyn::{Context, Expr}; use caldyn::Error as CaldynError; use log::error; use log_once::{warn_once, error_once}; use super::Output; use lumol_core::{units, System}; #[derive(Debug)] pub enum CustomOutputError { Io(io::Error), Expr(CaldynError), Custom(String), } impl From<io::Error> for CustomOutputError { fn from(error: io::Error) -> CustomOutputError { CustomOutputError::Io(error) } } impl From<CaldynError> for CustomOutputError { fn from(error: CaldynError) -> CustomOutputError { CustomOutputError::Expr(error) } } impl From<String> for CustomOutputError { fn from(error: String) -> CustomOutputError { CustomOutputError::Custom(error) } } impl fmt::Display for CustomOutputError { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { match *self { CustomOutputError::Io(ref err) => write!(fmt, "{}", err)?, CustomOutputError::Expr(ref err) => write!(fmt, "{}", err)?, CustomOutputError::Custom(ref err) => write!(fmt, "{}", err)?, } Ok(()) } } impl error::Error for CustomOutputError { fn description(&self) -> &str { match *self { CustomOutputError::Io(ref err) => err.description(), CustomOutputError::Expr(ref err) => err.description(), CustomOutputError::Custom(ref err) => err, } } fn cause(&self) -> Option<&dyn error::Error> { match *self { CustomOutputError::Io(ref err) => Some(err), CustomOutputError::Expr(ref err) => Some(err), CustomOutputError::Custom(_) => None, } } } struct FormatArgs { args: Vec<(String, Expr)>, tail: String, } impl FormatArgs { fn new(format: &str) -> Result<FormatArgs, CustomOutputError> { let mut args = Vec::new(); let mut expr = String::new(); let mut tail = String::new(); let mut in_expr = false; for c in format.chars() { match c { '{' if !in_expr => { in_expr = true; } '}' if in_expr => { in_expr = false; let sub_expr = Expr::parse(&expr)?; args.push((tail.clone(), sub_expr)); tail.clear(); expr.clear(); } '{' if in_expr => { return Err(CustomOutputError::Custom("found { in an expression".into())); } '}' if !in_expr => { return Err( CustomOutputError::Custom("found } outside of an expression".into()), ); } c => { if in_expr { expr.push(c); } else { tail.push(c); } } } } if in_expr { return Err(CustomOutputError::Custom("mismatched braces".into())); } Ok(FormatArgs { args: args, tail: tail, }) } fn get_context<'a>(&self, system: &'a System) -> Context<'a> { let mut context = Context::new(); context.set_query(move |name| { units::CONVERSION_FACTORS.get(name).cloned().or_else(|| { macro_rules! get_particle_data { ($index: ident, $data: ident) => ( system.particles() .$data .get($index) .cloned() .unwrap_or_else(|| { warn_once!( "index out of bound in custom output: \ index is {}, but we only have {} atoms", $index, system.size() ); return num_traits::Zero::zero(); }) ); } if name.contains('[') { let (name, index) = parse_index(name); match name { "x" => Some(get_particle_data!(index, position)[0]), "y" => Some(get_particle_data!(index, position)[1]), "z" => Some(get_particle_data!(index, position)[2]), "vx" => Some(get_particle_data!(index, velocity)[0]), "vy" => Some(get_particle_data!(index, velocity)[1]), "vz" => Some(get_particle_data!(index, velocity)[2]), "mass" => Some(get_particle_data!(index, mass)), "charge" => Some(get_particle_data!(index, charge)), _ => None, } } else { match name { "step" => Some(system.step as f64), "pressure" => Some(system.pressure()), "volume" => Some(system.volume()), "temperature" => Some(system.temperature()), "natoms" => Some(system.size() as f64), "cell.a" => Some(system.cell.a()), "cell.b" => Some(system.cell.b()), "cell.c" => Some(system.cell.c()), "cell.alpha" => Some(system.cell.alpha()), "cell.beta" => Some(system.cell.beta()), "cell.gamma" => Some(system.cell.gamma()), "stress.xx" => Some(system.stress()[0][0]), "stress.yy" => Some(system.stress()[1][1]), "stress.zz" => Some(system.stress()[2][2]), "stress.xy" => Some(system.stress()[0][1]), "stress.xz" => Some(system.stress()[0][2]), "stress.yz" => Some(system.stress()[1][2]), _ => None, } } }) }); return context; } fn format(&self, system: &System) -> Result<String, CustomOutputError> { let context = self.get_context(system); let mut output = String::new(); for &(ref string, ref expr) in &self.args { output.push_str(string); let value = expr.eval(&context)?; output.push_str(&value.to_string()); } output.push_str(&self.tail); return Ok(output); } } fn parse_index(input: &str) -> (&str, usize) { let l_brackets = input.match_indices('[').collect::<Vec<_>>(); let r_brackets = input.match_indices(']').collect::<Vec<_>>(); if l_brackets.len() != 1 || r_brackets.len() != 1 { return (input, 0); } let start = l_brackets[0].0; let end = r_brackets[0].0; if start > end { return (input, 0); } if let Ok(index) = input[(start + 1)..end].parse() { return (&input[..start], index); } else { return (input, 0); } } pub struct CustomOutput { file: BufWriter<File>, path: PathBuf, template: String, args: FormatArgs, } impl CustomOutput { pub fn new<P: AsRef<Path>>( filename: P, template: &str, ) -> Result<CustomOutput, CustomOutputError> { Ok(CustomOutput { file: BufWriter::new(File::create(filename.as_ref())?), path: filename.as_ref().to_owned(), template: template.into(), args: FormatArgs::new(template)?, }) } } impl Output for CustomOutput { fn setup(&mut self, _: &System) { writeln_or_log!(self, "# Custom output"); writeln_or_log!(self, "# {}", self.template); } fn write(&mut self, system: &System) { if let Ok(formatted) = self.args.format(system) { writeln_or_log!(self, "{}", formatted); } else { error_once!("Could not evaluate custom output {}", self.template); } } } #[cfg(test)] mod tests { use super::*; use super::super::tests::{test_output, testing_system}; fn format(input: &str) -> String { FormatArgs::new(input).unwrap().format(&testing_system()).unwrap() } #[test] fn parsing_index() { assert_eq!(parse_index("a[6]"), ("a", 6)); assert_eq!(parse_index("a"), ("a", 0)); assert_eq!(parse_index("a][6"), ("a][6", 0)); assert_eq!(parse_index("a[6][2]"), ("a[6][2]", 0)); assert_eq!(parse_index("a[6]2]"), ("a[6]2]", 0)); assert_eq!(parse_index("a[6][2"), ("a[6][2", 0)); assert_eq!(parse_index("a[b]"), ("a[b]", 0)); } #[test] fn format_args_parsing() { assert!(FormatArgs::new("one {test} two {5 } three!").is_ok()); assert!(FormatArgs::new("{3 + 4} {").is_err()); assert!(FormatArgs::new("{3 + 4} }").is_err()); assert!(FormatArgs::new("{3 + { 4}").is_err()); assert!(FormatArgs::new("{3 + {} }").is_err()); } #[test] fn formating() { assert_eq!(format("{3 + 4}"), "7"); assert_eq!(format("{pressure / bar}"), "10299.991728079816"); assert_eq!(format("{temperature / K}"), "38083.04389172312"); assert_eq!(format("{volume / A^3}"), "1000"); assert_eq!(format("{cell.a / A}"), "10"); assert_eq!(format("{cell.b / A}"), "10"); assert_eq!(format("{cell.c / A}"), "10"); assert_eq!(format("{cell.alpha}"), "90"); assert_eq!(format("{cell.beta}"), "90"); assert_eq!(format("{cell.gamma}"), "90"); assert_eq!(format("{stress.xx / bar}"), "30899.975184239443"); assert_eq!(format("{stress.yy / bar}"), "0"); assert_eq!(format("{stress.zz / bar}"), "0"); assert_eq!(format("{stress.xy / bar}"), "0"); assert_eq!(format("{stress.xz / bar}"), "0"); assert_eq!(format("{stress.yz / bar}"), "0"); assert_eq!(format("{x[1]}"), "1.3"); assert_eq!(format("{vy[1]}"), "0"); assert_eq!(format("{vx[0]}"), "0.1"); assert_eq!(format("{cell.a / bohr}"), "18.897261328856434"); assert_eq!(format("{cell.a / nm}"), "1"); assert_eq!(format("{cell.a / m}"), "0.000000001"); assert_eq!(format("{step}"), "42"); } #[test] fn custom() { let template = "p {pressure/bar} t {3 * 5} \tff"; test_output( |path| Box::new(CustomOutput::new(path, template).unwrap()), "# Custom output # p {pressure/bar} t {3 * 5} \tff p 10299.991728079816 t 15 \tff ", ); } }
use std::error; use std::fmt; use std::fs::File; use std::io::{self, BufWriter}; use std::io::prelude::*; use std::path::{Path, PathBuf}; use caldyn::{Context, Expr}; use caldyn::Error as CaldynError; use log::error; use log_once::{warn_once, error_once}; use super::Output; use lumol_core::{units, System}; #[derive(Debug)] pub enum CustomOutputError { Io(io::Error), Expr(CaldynError), Custom(String), } impl From<io::Error> for CustomOutputError { fn from(error: io::Error) -> CustomOutputError { CustomOutputError::Io(error) } } impl From<CaldynError> for CustomOutputError { fn from(error: CaldynError) -> CustomOutputError { CustomOutputError::Expr(error) } } impl From<String> for CustomOutputError { fn from(error: String) -> CustomOutputError { CustomOutputError::Custom(error) } } impl fmt::Display for CustomOutputError { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { match *self { CustomOutputError::Io(ref err) => write!(fmt, "{}", err)?, CustomOutputError::Expr(ref err) => write!(fmt, "{}", err)?, CustomOutputError::Custom(ref err) => write!(fmt, "{}", err)?, } Ok(()) } } impl error::Error for CustomOutputError { fn description(&self) -> &str { match *self { CustomOutputError::Io(ref err) => err.description(), CustomOutputError::Expr(ref err) => err.description(), CustomOutputError::Custom(ref err) => err, } } fn cause(&self) -> Option<&dyn error::Error> { match *self { CustomOutputError::Io(ref err) => Some(err), CustomOutputError::Expr(ref err) => Some(err), CustomOutputError::Custom(_) => None, } } } struct FormatArgs { args: Vec<(String, Expr)>, tail: String, } impl FormatArgs { fn new(format: &str) -> Result<FormatArgs, CustomOutputError> { let mut args = Vec::new(); let mut expr = String::new(); let mut tail = String::new(); let mut in_expr = false; for c in format.chars() { match c { '{' if !in_expr => { in_expr = true; } '}' if in_expr => { in_expr = false; let sub_expr = Expr::parse(&expr)?; args.push((tail.clone(), sub_expr)); tail.clear(); expr.clear(); } '{' if in_expr => { return Err(CustomOutputError::Custom("found { in an expression".into())); } '}' if !in_expr => { return Err( CustomOutputError::Custom("found } outside of an expression".into()), ); } c => { if in_expr { expr.push(c); } else { tail.push(c); } } } } if in_expr { return Err(CustomOutputError::Custom("mismatched braces".into())); } Ok(FormatArgs { args: args, tail: tail, }) } fn get_context<'a>(&self, system: &'a System) -> Context<'a> { let mut context = Context::new(); context.set_query(move |name| { units::CONVERSION_FACTORS.get(name).cloned().or_else(|| { macro_rules! get_particle_data { ($index: ident, $data: ident) => ( system.particles() .$data .get($index) .cloned() .unwrap_or_else(|| { warn_once!( "index out of bound in custom output: \ index is {}, but we only have {} atoms", $index, system.size() ); return num_traits::Zero::zero(); }) ); } if name.contains('[') { let (name, index) = parse_index(name); match name { "x" => Some(get_particle_data!(index, position)[0]), "y" => Some(get_particle_data!(index, position)[1]), "z" => Some(get_particle_data!(index, position)[2]), "vx" => Some(get_particle_data!(index, velocity)[0]), "vy" => Some(get_particle_data!(index, velocity)[1]), "vz" => Some(get_particle_data!(index, velocity)[2]), "mass" => Some(get_particle_data!(index, mass)), "charge" => Some(get_particle_data!(index, charge)), _ => None, } } else { match name { "step" => Some(system.step as f64), "pressure" => Some(system.pressure()), "volume" => Some(system.volume()), "temperature" => Some(system.temperature()), "natoms" => Some(system.size() as f64), "cell.a" => Some(system.cell.a()), "cell.b" => Some(system.cell.b()), "cell.c" => Some(system.cell.c()), "cell.alpha" => Some(system.cell.alpha()), "cell.beta" => Some(system.cell.beta()), "cell.gamma" => Some(system.cell.gamma()), "stress.xx" => Some(system.stress()[0][0]), "stress.yy" => Some(system.stress()[1][1]), "stress.zz" => Some(system.stress()[2][2]), "stress.xy" => Some(system.stress()[0][1]), "stress.xz" => Some(system.stress()[0][2]), "stress.yz" => Some(system.stress()[1][2]), _ => None, } } }) }); return context; } fn format(&self, system: &System) -> Result<String, CustomOutputError> { let context = self.get_context(system); let mut output = String::new(); for &(ref string, ref expr) in &self.args { output.push_str(string); let value = expr.eval(&context)?; output.push_str(&value.to_string()); } output.push_str(&self.tail); return Ok(output); } } fn parse_index(input: &str) -> (&str, usize) { let l_brackets = input.match_indices('[').collect::<Vec<_>>(); let r_brackets = input.match_indices(']').collect::<Vec<_>>(); if l_brackets.len() != 1 || r_brackets.len() != 1 { return (input, 0); } let start = l_brackets[0].0; let end = r_brackets[0].0; if start > end { return (input, 0); } if let Ok(index) = input[(start + 1)..end].parse() { return (&input[..start], index); } else { return (input, 0); } } pub struct CustomOutput { file: BufWriter<File>, path: PathBuf, template: String, args: FormatArgs, } impl CustomOutput { pub fn new<P: AsRef<Path>>( filename: P, template: &str, ) -> Result<CustomOutput, CustomOutputError> {
} } impl Output for CustomOutput { fn setup(&mut self, _: &System) { writeln_or_log!(self, "# Custom output"); writeln_or_log!(self, "# {}", self.template); } fn write(&mut self, system: &System) { if let Ok(formatted) = self.args.format(system) { writeln_or_log!(self, "{}", formatted); } else { error_once!("Could not evaluate custom output {}", self.template); } } } #[cfg(test)] mod tests { use super::*; use super::super::tests::{test_output, testing_system}; fn format(input: &str) -> String { FormatArgs::new(input).unwrap().format(&testing_system()).unwrap() } #[test] fn parsing_index() { assert_eq!(parse_index("a[6]"), ("a", 6)); assert_eq!(parse_index("a"), ("a", 0)); assert_eq!(parse_index("a][6"), ("a][6", 0)); assert_eq!(parse_index("a[6][2]"), ("a[6][2]", 0)); assert_eq!(parse_index("a[6]2]"), ("a[6]2]", 0)); assert_eq!(parse_index("a[6][2"), ("a[6][2", 0)); assert_eq!(parse_index("a[b]"), ("a[b]", 0)); } #[test] fn format_args_parsing() { assert!(FormatArgs::new("one {test} two {5 } three!").is_ok()); assert!(FormatArgs::new("{3 + 4} {").is_err()); assert!(FormatArgs::new("{3 + 4} }").is_err()); assert!(FormatArgs::new("{3 + { 4}").is_err()); assert!(FormatArgs::new("{3 + {} }").is_err()); } #[test] fn formating() { assert_eq!(format("{3 + 4}"), "7"); assert_eq!(format("{pressure / bar}"), "10299.991728079816"); assert_eq!(format("{temperature / K}"), "38083.04389172312"); assert_eq!(format("{volume / A^3}"), "1000"); assert_eq!(format("{cell.a / A}"), "10"); assert_eq!(format("{cell.b / A}"), "10"); assert_eq!(format("{cell.c / A}"), "10"); assert_eq!(format("{cell.alpha}"), "90"); assert_eq!(format("{cell.beta}"), "90"); assert_eq!(format("{cell.gamma}"), "90"); assert_eq!(format("{stress.xx / bar}"), "30899.975184239443"); assert_eq!(format("{stress.yy / bar}"), "0"); assert_eq!(format("{stress.zz / bar}"), "0"); assert_eq!(format("{stress.xy / bar}"), "0"); assert_eq!(format("{stress.xz / bar}"), "0"); assert_eq!(format("{stress.yz / bar}"), "0"); assert_eq!(format("{x[1]}"), "1.3"); assert_eq!(format("{vy[1]}"), "0"); assert_eq!(format("{vx[0]}"), "0.1"); assert_eq!(format("{cell.a / bohr}"), "18.897261328856434"); assert_eq!(format("{cell.a / nm}"), "1"); assert_eq!(format("{cell.a / m}"), "0.000000001"); assert_eq!(format("{step}"), "42"); } #[test] fn custom() { let template = "p {pressure/bar} t {3 * 5} \tff"; test_output( |path| Box::new(CustomOutput::new(path, template).unwrap()), "# Custom output # p {pressure/bar} t {3 * 5} \tff p 10299.991728079816 t 15 \tff ", ); } }
Ok(CustomOutput { file: BufWriter::new(File::create(filename.as_ref())?), path: filename.as_ref().to_owned(), template: template.into(), args: FormatArgs::new(template)?, })
call_expression
[ { "content": "/// Scale all velocities in the `System` such that the `system` temperature\n\n/// is `temperature`.\n\npub fn scale(system: &mut System, temperature: f64) {\n\n let instant_temperature = system.temperature();\n\n let factor = f64::sqrt(temperature / instant_temperature);\n\n for velocity in system.particles_mut().velocity {\n\n *velocity *= factor;\n\n }\n\n}\n\n\n", "file_path": "lumol-sim/src/velocities.rs", "rank": 0, "score": 429496.15212337946 }, { "content": "// For comparaison of forces, as LAMMPS does not allow to set all parameters\n\n// to the same one used by NIST\n\npub fn set_lammps_interactions(system: &mut System, cutoff: f64, kmax: usize, alpha: f64) {\n\n let lj = PairInteraction::new(\n\n Box::new(LennardJones {\n\n epsilon: 78.19743111 * K_BOLTZMANN,\n\n sigma: 3.16555789,\n\n }),\n\n cutoff,\n\n );\n\n system.set_pair_potential((\"O\", \"O\"), lj);\n\n system.set_pair_potential((\"O\", \"H\"), PairInteraction::new(Box::new(NullPotential), cutoff));\n\n system.set_pair_potential((\"H\", \"H\"), PairInteraction::new(Box::new(NullPotential), cutoff));\n\n\n\n let mut ewald = SharedEwald::new(Ewald::new(cutoff, kmax, alpha));\n\n ewald.set_restriction(PairRestriction::InterMolecular);\n\n system.set_coulomb_potential(Box::new(ewald));\n\n}\n\n\n", "file_path": "tests/nist-spce.rs", "rank": 1, "score": 408490.85215199477 }, { "content": "/// Get the mass of the element with the given atomic `name`\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use lumol_core::sys::get_atomic_mass;\n\n/// assert_eq!(get_atomic_mass(\"Ti\"), Some(47.867));\n\n/// assert_eq!(get_atomic_mass(\"Ow\"), None);\n\n/// ```\n\npub fn get_atomic_mass(name: &str) -> Option<f64> {\n\n for (symbol, mass) in ATOMIC_MASSES.iter() {\n\n if name == *symbol {\n\n return Some(*mass);\n\n }\n\n }\n\n return None;\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn mass() {\n\n assert_eq!(get_atomic_mass(\"O\"), Some(15.999));\n\n assert_eq!(get_atomic_mass(\"HOH\"), None);\n\n }\n\n}\n", "file_path": "lumol-core/src/sys/config/mass.rs", "rank": 2, "score": 401948.8110842284 }, { "content": "/// Extract a number (integer or float) at the given `key`, from the `config`\n\n/// TOML table interpreted as a `context`\n\npub fn number(key: &str, config: &Table, context: &str) -> Result<f64, Error> {\n\n let number = config.get(key).ok_or(\n\n Error::from(format!(\"missing '{}' key in {}\", key, context))\n\n )?;\n\n match *number {\n\n ::toml::Value::Integer(v) => Ok(v as f64),\n\n ::toml::Value::Float(v) => Ok(v),\n\n _ => Err(Error::from(format!(\"'{}' must be a number in {}\", key, context))),\n\n }\n\n}\n\n\n", "file_path": "lumol-input/src/extract.rs", "rank": 3, "score": 400711.12315535435 }, { "content": "pub fn get_system(path: &str) -> System {\n\n let path = Path::new(file!()).parent().unwrap().join(\"data\").join(\"nist-spce\").join(path);\n\n let mut system = TrajectoryBuilder::new().open(&path).and_then(|mut traj| traj.read()).unwrap();\n\n\n\n let mut file = File::open(path).unwrap();\n\n let mut buffer = String::new();\n\n file.read_to_string(&mut buffer).unwrap();\n\n let line = buffer.lines().nth(1).unwrap();\n\n let mut splited = line.split_whitespace();\n\n assert_eq!(splited.next(), Some(\"cell:\"));\n\n let a: f64 = splited.next()\n\n .expect(\"Missing 'a' cell parameter\")\n\n .parse()\n\n .expect(\"'a' cell parameter is not a float\");\n\n let b: f64 = splited.next()\n\n .expect(\"Missing 'b' cell parameter\")\n\n .parse()\n\n .expect(\"'b' cell parameter is not a float\");\n\n let c: f64 = splited.next()\n\n .expect(\"Missing 'c' cell parameter\")\n", "file_path": "tests/nist-spce.rs", "rank": 4, "score": 400690.4087205428 }, { "content": "pub fn get_system(name: &str) -> System {\n\n let data = Path::new(file!()).parent().unwrap().join(\"..\").join(\"data\");\n\n\n\n let system = data.join(String::from(name) + \".pdb\");\n\n let mut system = TrajectoryBuilder::new().open(system)\n\n .and_then(|mut trajectory| trajectory.read())\n\n .unwrap();\n\n\n\n let interactions = data.join(String::from(name) + \".toml\");\n\n InteractionsInput::new(interactions).and_then(|input| input.read(&mut system))\n\n .unwrap();\n\n\n\n return system;\n\n}\n\n\n", "file_path": "benches/utils/mod.rs", "rank": 5, "score": 400690.40872054273 }, { "content": "pub fn get_system(name: &str) -> System {\n\n let path = Path::new(file!()).parent().unwrap().join(\"data\").join(\"nist-lj\").join(name);\n\n return Input::new(path).unwrap().read_system().unwrap();\n\n}\n\n\n", "file_path": "tests/nist-lj.rs", "rank": 6, "score": 400690.40872054273 }, { "content": "pub fn set_nist_interactions(system: &mut System, cutoff: f64) {\n\n let mut lj = PairInteraction::new(\n\n Box::new(LennardJones {\n\n epsilon: 78.19743111 * K_BOLTZMANN,\n\n sigma: 3.16555789,\n\n }),\n\n cutoff,\n\n );\n\n lj.enable_tail_corrections();\n\n system.set_pair_potential((\"O\", \"O\"), lj);\n\n system.set_pair_potential((\"O\", \"H\"), PairInteraction::new(Box::new(NullPotential), cutoff));\n\n system.set_pair_potential((\"H\", \"H\"), PairInteraction::new(Box::new(NullPotential), cutoff));\n\n\n\n let alpha = 5.6 / f64::min(f64::min(system.cell.a(), system.cell.b()), system.cell.c());\n\n let mut ewald = SharedEwald::new(Ewald::new(cutoff, 5, alpha));\n\n ewald.set_restriction(PairRestriction::InterMolecular);\n\n system.set_coulomb_potential(Box::new(ewald));\n\n}\n\n\n\n\n", "file_path": "tests/nist-spce.rs", "rank": 7, "score": 383960.1604626195 }, { "content": "/// Extract an array at the given `key`, from the `config` TOML table\n\n/// interpreted as a `context`\n\npub fn slice<'a>(key: &str, config: &'a Table, context: &str) -> Result<&'a [Value], Error> {\n\n let array = config.get(key).ok_or(\n\n Error::from(format!(\"missing '{}' key in {}\", key, context))\n\n )?;\n\n let array = array.as_array().ok_or(\n\n Error::from(format!(\"'{}' must be an array in {}\", key, context))\n\n );\n\n return array.map(|arr| arr.as_slice());\n\n}\n\n\n", "file_path": "lumol-input/src/extract.rs", "rank": 9, "score": 370721.6194545822 }, { "content": "/// Parse the string `val` and convert it to the corresponding internal unit\n\n///\n\n/// ```\n\n/// use lumol_core::units;\n\n/// let internal = units::from_str(\"10 A\").unwrap();\n\n/// assert!(internal == 10.0);\n\n/// ```\n\npub fn from_str(value: &str) -> Result<f64, ParseError> {\n\n let splitted = value.split_whitespace().collect::<Vec<&str>>();\n\n let unit = splitted[1..].join(\" \");\n\n let unit = if unit.is_empty() {\n\n UnitExpr::Val(1.0)\n\n } else {\n\n UnitExpr::parse(&unit)?\n\n };\n\n let value = splitted[0].parse::<f64>()?;\n\n return Ok(unit.eval() * value);\n\n}\n\n\n", "file_path": "lumol-core/src/units.rs", "rank": 10, "score": 364380.6489036419 }, { "content": "/// Convert the numeric value `val` from the unit `unit` to the internal unit.\n\n///\n\n/// ```\n\n/// use lumol_core::units;\n\n/// let internal = units::from(10.0, \"A\").unwrap();\n\n/// assert!(internal == 10.0);\n\n/// ```\n\npub fn from(value: f64, unit: &str) -> Result<f64, ParseError> {\n\n let unit = UnitExpr::parse(unit)?;\n\n return Ok(unit.eval() * value);\n\n}\n\n\n", "file_path": "lumol-core/src/units.rs", "rank": 11, "score": 359030.94251445506 }, { "content": "/// Convert the numeric value `val` (in internal units) to the unit `unit`.\n\n///\n\n/// ```\n\n/// use lumol_core::units;\n\n/// let real = units::to(10.0, \"A\").unwrap();\n\n/// assert!(real == 10.0);\n\n/// ```\n\npub fn to(value: f64, unit: &str) -> Result<f64, ParseError> {\n\n let unit = UnitExpr::parse(unit)?;\n\n return Ok(value / unit.eval());\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::{Token, UnitExpr};\n\n use super::{shunting_yard, tokenize};\n\n use approx::assert_ulps_eq;\n\n\n\n #[test]\n\n fn tokens() {\n\n assert_eq!(tokenize(\"(\")[0], Token::LParen);\n\n assert_eq!(tokenize(\")\")[0], Token::RParen);\n\n assert_eq!(tokenize(\"*\")[0], Token::Mul);\n\n assert_eq!(tokenize(\"/\")[0], Token::Div);\n\n assert_eq!(tokenize(\"^\")[0], Token::Pow);\n\n assert_eq!(tokenize(\"foo\")[0], Token::Value(String::from(\"foo\")));\n", "file_path": "lumol-core/src/units.rs", "rank": 12, "score": 359030.94251445506 }, { "content": "/// Read a the first molecule from the file at `path`. If no bond information\n\n/// exists in the file, bonds are guessed.\n\npub fn read_molecule<P: AsRef<Path>>(path: P) -> Result<Molecule, chemfiles::Error> {\n\n let mut trajectory = chemfiles::Trajectory::open(&path, 'r')?;\n\n let mut frame = chemfiles::Frame::new();\n\n trajectory.read(&mut frame)?;\n\n\n\n // Only guess the topology when we have no bond information\n\n if frame.topology().bonds_count() == 0 {\n\n frame.guess_bonds()?;\n\n }\n\n\n\n let system: System = frame.into();\n\n assert!(!system.is_empty(), \"No molecule in the file at {}\", path.as_ref().display());\n\n\n\n return Ok(system.molecule(0).to_owned());\n\n}\n\n\n\nstatic REDIRECT_CHEMFILES_WARNING: Once = Once::new();\n\n\n", "file_path": "lumol-core/src/sys/chfl.rs", "rank": 13, "score": 356919.08441067685 }, { "content": "fn get_cell_number(value: &Value) -> Result<f64, Error> {\n\n if let Some(value) = value.as_integer() {\n\n Ok(value as f64)\n\n } else if let Some(value) = value.as_float() {\n\n Ok(value)\n\n } else {\n\n Err(Error::from(\"values must be numbers in 'cell' array\"))\n\n }\n\n}\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 14, "score": 356828.87847085315 }, { "content": "/// Extract the string at the given `key`, from the `config` TOML table\n\n/// interpreted as a `context`\n\npub fn str<'a>(key: &str, config: &'a Table, context: &str) -> Result<&'a str, Error> {\n\n let string = config.get(key).ok_or(\n\n Error::from(format!(\"missing '{}' key in {}\", key, context))\n\n )?;\n\n return string.as_str().ok_or(\n\n Error::from(format!(\"'{}' must be a string in {}\", key, context))\n\n );\n\n}\n\n\n", "file_path": "lumol-input/src/extract.rs", "rank": 15, "score": 348942.9248299028 }, { "content": "/// Extract the string 'type' key in a TOML table\n\npub fn typ<'a>(config: &'a Table, context: &str) -> Result<&'a str, Error> {\n\n let typ = config.get(\"type\").ok_or(\n\n Error::from(format!(\"missing 'type' key in {}\", context))\n\n )?;\n\n return typ.as_str().ok_or(Error::from(format!(\"'type' key must be a string in {}\", context)));\n\n}\n", "file_path": "lumol-input/src/extract.rs", "rank": 16, "score": 346718.71929396526 }, { "content": "/// Extract a unsigned integer at the given `key`, from the `config`\n\n/// TOML table interpreted as a `context`\n\npub fn uint(key: &str, config: &Table, context: &str) -> Result<u64, Error> {\n\n let number = config.get(key).ok_or(\n\n Error::from(format!(\"missing '{}' key in {}\", key, context))\n\n )?;\n\n match *number {\n\n ::toml::Value::Integer(v) => {\n\n if v < 0 {\n\n Err(Error::from(format!(\"'{}' must be a positive integer in {}\", key, context)))\n\n } else {\n\n Ok(v as u64)\n\n }\n\n }\n\n _ => Err(Error::from(format!(\"'{}' must be a positive integer in {}\", key, context))),\n\n }\n\n}\n\n\n", "file_path": "lumol-input/src/extract.rs", "rank": 17, "score": 342440.29004708584 }, { "content": "pub fn move_all_rigid_molecule(mut system: System) -> System {\n\n let mut rng = get_rng();\n\n\n\n for mut molecule in system.molecules_mut() {\n\n let delta = Vector3D::new(rng.gen(), rng.gen(), rng.gen());\n\n for position in molecule.particles_mut().position {\n\n *position += delta;\n\n }\n\n }\n\n\n\n return system;\n\n}\n", "file_path": "benches/utils/mod.rs", "rank": 18, "score": 341660.77146686293 }, { "content": "/// Extract the table at the given `key`, from the `config` TOML table\n\n/// interpreted as a `context`.\n\npub fn table<'a>(key: &str, config: &'a Table, context: &str) -> Result<&'a Table, Error> {\n\n let table = config.get(key).ok_or(\n\n Error::from(format!(\"missing '{}' key in {}\", key, context))\n\n )?;\n\n return table.as_table().ok_or(\n\n Error::from(format!(\"'{}' must be a table in {}\", key, context))\n\n );\n\n}\n\n\n", "file_path": "lumol-input/src/extract.rs", "rank": 19, "score": 332243.66542524204 }, { "content": "/// Read the `content` string, assuming XYZ format, and create the corresponding\n\n/// system. This function is intended for testing purposes only, and will\n\n/// panic if the string is not well-formatted.\n\n///\n\n/// If the comment line contains `cell: <a>`, the system will have a cubic unit\n\n/// cell of size a.\n\npub fn system_from_xyz(content: &str) -> System {\n\n let mut system = System::new();\n\n\n\n let lines = content.lines().collect::<Vec<_>>();\n\n let natoms = lines[0].trim().parse::<usize>().expect(\"Could not parse integer\");\n\n for i in 0..natoms {\n\n let splitted = lines[i + 2].split_whitespace().collect::<Vec<_>>();\n\n let name = splitted[0];\n\n let x = splitted[1].parse::<f64>().expect(\"Could not parse float\");\n\n let y = splitted[2].parse::<f64>().expect(\"Could not parse float\");\n\n let z = splitted[3].parse::<f64>().expect(\"Could not parse float\");\n\n let mut particle = Particle::with_position(name, [x, y, z].into());\n\n if splitted.len() == 7 {\n\n let vx = splitted[4].parse::<f64>().expect(\"Could not parse float\");\n\n let vy = splitted[5].parse::<f64>().expect(\"Could not parse float\");\n\n let vz = splitted[6].parse::<f64>().expect(\"Could not parse float\");\n\n particle.velocity = [vx, vy, vz].into();\n\n }\n\n system.add_molecule(Molecule::new(particle));\n\n }\n", "file_path": "lumol-core/src/utils/xyz.rs", "rank": 20, "score": 324060.3896942893 }, { "content": "fn check_file_content(mut file: File, content: &str) {\n\n let mut buffer = String::new();\n\n let _ = file.read_to_string(&mut buffer).unwrap();\n\n\n\n for (l1, l2) in buffer.lines().zip(content.lines()) {\n\n assert_eq!(l1, l2.trim_start());\n\n }\n\n}\n", "file_path": "lumol-sim/src/output/tests.rs", "rank": 21, "score": 316993.9577642748 }, { "content": "pub fn testing_system() -> System {\n\n let mut system = System::with_cell(UnitCell::cubic(10.0));\n\n system.add_molecule(Molecule::new(Particle::with_position(\"F\", [0.0, 0.0, 0.0].into())));\n\n system.add_molecule(Molecule::new(Particle::with_position(\"F\", [1.3, 0.0, 0.0].into())));\n\n\n\n system.particles_mut().velocity[0] = [0.1, 0.0, 0.0].into();\n\n system.particles_mut().velocity[1] = [0.0, 0.0, 0.0].into();\n\n\n\n let harmonic = Box::new(Harmonic {\n\n k: units::from(300.0, \"kJ/mol/A^2\").unwrap(),\n\n x0: units::from(1.2, \"A\").unwrap(),\n\n });\n\n system.set_pair_potential((\"F\", \"F\"), PairInteraction::new(harmonic, 5.0));\n\n system.step = 42;\n\n return system;\n\n}\n\n\n", "file_path": "lumol-sim/src/output/tests.rs", "rank": 22, "score": 313702.7561470653 }, { "content": "fn get_file(config: &Table) -> Result<&str, Error> {\n\n let file = config.get(\"file\").ok_or(\n\n Error::from(\"missing 'file' key in output\")\n\n )?;\n\n\n\n file.as_str().ok_or(Error::from(\"'file' must be a string in output\"))\n\n}\n\n\n\nimpl FromToml for TrajectoryOutput {\n\n fn from_toml(config: &Table) -> Result<TrajectoryOutput, Error> {\n\n let path = get_file(config)?;\n\n let output = TrajectoryOutput::new(path)?;\n\n Ok(output)\n\n }\n\n}\n\n\n\nimpl FromToml for CellOutput {\n\n fn from_toml(config: &Table) -> Result<CellOutput, Error> {\n\n let path = get_file(config)?;\n\n let output = try_io!(CellOutput::new(path), PathBuf::from(path));\n", "file_path": "lumol-input/src/simulations/outputs.rs", "rank": 23, "score": 310089.7572260164 }, { "content": "pub fn move_rigid_molecule(system: &System) -> (usize, Vec<Vector3D>) {\n\n let mut rng = get_rng();\n\n\n\n let molid = rng.gen_range(0, system.molecules().count());\n\n let molecule = system.molecule(molid);\n\n let delta = Vector3D::new(rng.gen(), rng.gen(), rng.gen());\n\n let mut positions = Vec::new();\n\n for position in molecule.particles().position {\n\n positions.push(position + delta);\n\n }\n\n\n\n return (molid, positions);\n\n}\n\n\n", "file_path": "benches/utils/mod.rs", "rank": 24, "score": 293133.44602684135 }, { "content": "fn read_appender(config: &Table, name: &str) -> Result<Appender, Error> {\n\n let allowed_keys = [\"target\", \"targets\", \"level\", \"append\"];\n\n for key in config.keys() {\n\n if !allowed_keys.contains(&&**key) {\n\n return Err(Error::from(format!(\"unknown '{}' key in log section\", key)));\n\n }\n\n }\n\n\n\n let level = config.get(\"level\")\n\n .map_or(Some(\"info\"), |level| level.as_str())\n\n .ok_or(Error::from(\"'level' must be a string in log target\"))?;\n\n\n\n let level = match level {\n\n \"trace\" => log::LevelFilter::Trace,\n\n \"debug\" => log::LevelFilter::Debug,\n\n \"info\" => log::LevelFilter::Info,\n\n \"warning\" => log::LevelFilter::Warn,\n\n \"error\" => log::LevelFilter::Error,\n\n other => return Err(Error::from(format!(\"unknown logging level '{}'\", other))),\n\n };\n", "file_path": "lumol-input/src/simulations/logging.rs", "rank": 25, "score": 279318.06293813954 }, { "content": "pub fn mean(data: SharedVec) -> f64 {\n\n let data = data.read().unwrap();\n\n data.iter().sum::<f64>() / data.len() as f64\n\n}\n", "file_path": "tests/utils/mod.rs", "rank": 26, "score": 268706.9864152888 }, { "content": "#[inline(always)]\n\npub fn erfc(value: f64) -> f64 {\n\n f64::compl_error(value)\n\n}\n", "file_path": "lumol-core/src/math.rs", "rank": 27, "score": 260697.06077800476 }, { "content": "#[inline(always)]\n\npub fn erf(value: f64) -> f64 {\n\n f64::error(value)\n\n}\n\n\n", "file_path": "lumol-core/src/math.rs", "rank": 28, "score": 260697.06077800476 }, { "content": "/// Select a random molecule in the system using `rng` as random number\n\n/// generator. If `hash` is `None`, any molecule can be chosen. If `hash` is\n\n/// `Some(hash)`, then a molecule with matching hash is selected.\n\n///\n\n/// This function returns `None` if no matching molecule was found, and\n\n/// `Some(molid)` with `molid` the index of the molecule if a molecule was\n\n/// selected.\n\nfn select_molecule(system: &System, hash: Option<MoleculeHash>, rng: &mut dyn RngCore) -> Option<usize> {\n\n if let Some(hash) = hash {\n\n // Pick a random molecule with matching moltype\n\n let mols = system.molecules()\n\n .enumerate()\n\n .filter(|(_, m)| m.hash() == hash)\n\n .map(|(i, _)| i)\n\n .collect::<Vec<_>>();\n\n return mols.choose(rng).cloned();\n\n } else {\n\n let nmols = system.molecules().count();\n\n if nmols == 0 {\n\n return None;\n\n } else {\n\n return Some(rng.gen_range(0, nmols));\n\n }\n\n }\n\n}\n\n\n\nmod translate;\n\npub use self::translate::Translate;\n\n\n\nmod rotate;\n\npub use self::rotate::Rotate;\n\n\n\nmod resize;\n\npub use self::resize::Resize;\n", "file_path": "lumol-sim/src/mc/moves/mod.rs", "rank": 29, "score": 258446.18236764314 }, { "content": "pub fn get_forces(path: &str) -> Vec<Vector3D> {\n\n let path = Path::new(file!()).parent().unwrap().join(\"data\").join(\"nist-spce\").join(path);\n\n\n\n let mut file = File::open(path).unwrap();\n\n let mut buffer = String::new();\n\n file.read_to_string(&mut buffer).unwrap();\n\n let mut lines = buffer.lines();\n\n\n\n let natoms: usize = lines.next().unwrap().parse().unwrap();\n\n lines.next();\n\n\n\n let mut forces = vec![Vector3D::new(0.0, 0.0, 0.0); natoms];\n\n for (i, line) in lines.enumerate() {\n\n let mut splitted = line.split_whitespace();\n\n let num: usize = splitted.next().unwrap().parse().unwrap();\n\n assert_eq!(num, i + 1);\n\n\n\n forces[i][0] = splitted.next().unwrap().parse().unwrap();\n\n forces[i][1] = splitted.next().unwrap().parse().unwrap();\n\n forces[i][2] = splitted.next().unwrap().parse().unwrap();\n", "file_path": "tests/nist-spce.rs", "rank": 30, "score": 257034.96580395213 }, { "content": "fn get_input_path<P1: AsRef<Path>, P2: AsRef<Path>>(root: P1, path: P2) -> PathBuf {\n\n let path = PathBuf::from(path.as_ref());\n\n if path.is_absolute() {\n\n path\n\n } else {\n\n let parent = root.as_ref().parent().expect(\"Could not get parent path\");\n\n parent.join(path)\n\n }\n\n}\n", "file_path": "lumol-input/src/simulations/mod.rs", "rank": 31, "score": 254757.71376200806 }, { "content": "fn get_error_message(content: &str) -> String {\n\n for line in content.lines() {\n\n let line = line.trim();\n\n if line.starts_with(\"#^ \") {\n\n return String::from(&line[3..]);\n\n }\n\n }\n\n\n\n panic!(\"No error message found. Please add one with the '#^ <message>' syntax.\");\n\n}\n", "file_path": "lumol-input/tests/input.rs", "rank": 32, "score": 254537.30710113738 }, { "content": "pub fn test_output<F>(function: F, expected: &str)\n\nwhere\n\n F: Fn(&Path) -> Box<dyn Output>,\n\n{\n\n let tempfile = NamedTempFile::new().unwrap();\n\n let system = testing_system();\n\n {\n\n let mut output = function(tempfile.path());\n\n output.setup(&system);\n\n output.write(&system);\n\n output.finish(&system);\n\n }\n\n\n\n let file = tempfile.reopen().unwrap();\n\n check_file_content(file, expected);\n\n}\n\n\n", "file_path": "lumol-sim/src/output/tests.rs", "rank": 33, "score": 233546.8206599724 }, { "content": "/// Read and pop (recursively) a single expression from the `stream`.\n\n/// The `stream` must be in reverse polish notation.\n\nfn read_expr(stream: &mut Vec<Token>) -> Result<UnitExpr, ParseError> {\n\n if let Some(token) = stream.pop() {\n\n match token {\n\n Token::Value(unit) => {\n\n match CONVERSION_FACTORS.get(&*unit) {\n\n Some(&value) => Ok(UnitExpr::Val(value)),\n\n None => Err(ParseError::NotFound { unit: unit }),\n\n }\n\n }\n\n Token::Mul => {\n\n let rhs = read_expr(stream).map_err(|err| {\n\n ParseError::MalformedExpr(format!(\"Error in unit at the right of '*': {}\", err))\n\n })?;\n\n let lhs = read_expr(stream).map_err(|err| {\n\n ParseError::MalformedExpr(format!(\"Error in unit at the left of '*': {}\", err))\n\n })?;\n\n Ok(UnitExpr::Mul(Box::new(lhs), Box::new(rhs)))\n\n }\n\n Token::Div => {\n\n let rhs = read_expr(stream).map_err(|err| {\n", "file_path": "lumol-core/src/units.rs", "rank": 34, "score": 226616.37591193125 }, { "content": "/// Callback for updating a cache. It also take an `&mut System` argument for\n\n/// updating the cache inside the global potentials.\n\ntype UpdateCallback = Box<dyn Fn(&mut EnergyCache, &mut System) + Send + Sync>;\n\n\n\n/// This is a cache for energy computation.\n\n///\n\n/// Cache integrity is left up to the user of this structure: any function with\n\n/// `update_` prefix must be called as needed to ensure cache consistency.\n\npub struct EnergyCache {\n\n /// 2-D array containing the pairs interactions between particles `i` and\n\n /// `j` at index `i, j`\n\n pairs_cache: Array2<f64>,\n\n /// Energy of all the pairs in the system\n\n pairs: f64,\n\n /// Contribution of long range corrections\n\n pairs_tail: f64,\n\n /// Energy of all the bonds in the system\n\n bonds: f64,\n\n /// Energy of all the angles in the system\n\n angles: f64,\n\n /// Energy of all the dihedrals angles in the system\n\n dihedrals: f64,\n", "file_path": "lumol-core/src/sys/cache.rs", "rank": 35, "score": 225121.20441591306 }, { "content": "/// Generate the tests by calling `callback` for every TOML files at the given\n\n/// `root`.\n\nfn generate_tests<F>(root: &str, callback: F) -> Result<Vec<TestDescAndFn>, io::Error>\n\nwhere\n\n F: Fn(PathBuf, String) -> Box<dyn FnMut() + Send>,\n\n{\n\n let mut tests = Vec::new();\n\n\n\n let dir = PathBuf::new().join(env!(\"CARGO_MANIFEST_DIR\")).join(\"tests\").join(root);\n\n for entry in WalkDir::new(dir) {\n\n let entry = entry?;\n\n let file_type = entry.file_type();\n\n if file_type.is_file() {\n\n if let Some(extension) = entry.path().extension() {\n\n if extension == \"toml\" {\n\n let path = entry.path();\n\n let name = String::from(root) + \"/\";\n\n let name = name + path.file_name()\n\n .expect(\"Missing file name\")\n\n .to_str()\n\n .expect(\"File name is invalid UTF-8\");\n\n\n", "file_path": "lumol-input/tests/input.rs", "rank": 36, "score": 222525.28859756014 }, { "content": "// An ideal gas system\n\nfn testing_system() -> System {\n\n let mut system = System::with_cell(UnitCell::cubic(20.0));\n\n\n\n for i in 0..10 {\n\n for j in 0..10 {\n\n for k in 0..10 {\n\n let mut particle = Particle::new(\"He\");\n\n particle.position = Vector3D::new(i as f64 * 2.0, j as f64 * 2.0, k as f64 * 2.0);\n\n system.add_molecule(Molecule::new(particle));\n\n }\n\n }\n\n }\n\n\n\n let mut velocities = BoltzmannVelocities::new(300.0);\n\n velocities.init(&mut system);\n\n\n\n assert_ulps_eq!(system.temperature(), 300.0, epsilon = 1e-9);\n\n return system;\n\n}\n\n\n", "file_path": "lumol-sim/tests/thermostats.rs", "rank": 38, "score": 208267.18583600345 }, { "content": "/// Convert a TOML table to a Rust type using information from an additional reference.\n\npub trait FromTomlWithRefData: Sized {\n\n /// The type of the additional data needed.\n\n type Data;\n\n /// Do the conversion from `table` and `data` to Self.\n\n fn from_toml(table: &Table, data: &Self::Data) -> Result<Self, Error>;\n\n}\n\n\n", "file_path": "lumol-input/src/lib.rs", "rank": 39, "score": 198444.70094258577 }, { "content": "/// Setup a default logger to be able to print error messages\n\npub fn setup_default_logger() {\n\n // We just log everything to stdout\n\n let stdout = ConsoleAppender::builder()\n\n .target(console::Target::Stdout)\n\n .encoder(Box::new(LogEncoder))\n\n .build();\n\n\n\n let appender = Appender::builder()\n\n .filter(Box::new(ThresholdFilter::new(log::LevelFilter::Info)))\n\n .build(\"main\", Box::new(stdout));\n\n\n\n let config = Config::builder()\n\n .appender(appender)\n\n .build(Root::builder().appender(\"main\").build(log::LevelFilter::Info))\n\n .expect(\"Error in logging initialization\");\n\n\n\n // We ignore the result of this call, because it can only fail if a\n\n // logger has already been initialized.\n\n let _ = log4rs::init_config(config);\n\n}\n\n\n", "file_path": "lumol-input/src/simulations/logging.rs", "rank": 40, "score": 180918.78835091117 }, { "content": "fn validate(config: &Table) -> Result<(), Error> {\n\n let input = config.get(\"input\").ok_or(\n\n Error::from(\"missing 'input' table\")\n\n )?;\n\n\n\n let version = input.get(\"version\").ok_or(\n\n Error::from(\"missing 'version' key in 'input' table\")\n\n )?;\n\n\n\n let version = version.as_integer().ok_or(\n\n Error::from(\"'input.version' must be an integer\")\n\n )?;\n\n\n\n if version != 1 {\n\n return Err(Error::from(\n\n format!(\"can only read version 1 of input, got version {}\", version),\n\n ));\n\n }\n\n Ok(())\n\n}\n", "file_path": "lumol-input/src/lib.rs", "rank": 41, "score": 173347.47154717284 }, { "content": "fn parse_args<'a>() -> ArgMatches<'a> {\n\n App::new(\"lumol\").version(lumol::VERSION)\n\n .about(\"An extensible molecular simulation engine\")\n\n .args_from_usage(\"<input.toml> 'Simulation input file'\")\n\n .get_matches()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 42, "score": 167368.86347041946 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut system = System::new();\n\n system.add_molecule(Molecule::new(Particle::with_position(\"F\", Vector3D::new(0.0, 0.0, 0.0))));\n\n system.add_molecule(Molecule::new(Particle::with_position(\"F\", Vector3D::new(1.5, 0.0, 0.0))));\n\n\n\n // We can now use our new potential in the system\n\n let lj = Box::new(LJ {\n\n a: units::from(675.5, \"kJ/mol/A^12\")?,\n\n b: units::from(40.26, \"kJ/mol/A^6\")?,\n\n });\n\n system.set_pair_potential((\"F\", \"F\"), PairInteraction::new(lj, 10.0));\n\n\n\n let md = MolecularDynamics::new(units::from(1.0, \"fs\")?);\n\n let mut simulation = Simulation::new(Box::new(md));\n\n simulation.run(&mut system, 1000);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/custom-potential.rs", "rank": 43, "score": 167105.83622701687 }, { "content": "fn energy_computation(c: &mut Criterion) {\n\n let system = utils::get_system(\"propane\");\n\n c.bench_function(\"propane::energy\", move |b| b.iter(|| {\n\n let _ = PotentialEnergy.compute(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"propane\");\n\n c.bench_function(\"propane::force\", move |b| b.iter(|| {\n\n let _ = Forces.compute(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"propane\");\n\n c.bench_function(\"propane::atomic_virial\", move |b| b.iter(|| {\n\n let _ = AtomicVirial.compute(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"propane\");\n\n c.bench_function(\"propane::molecular_virial\", move |b| b.iter(|| {\n\n let _ = MolecularVirial.compute(&system);\n\n }));\n\n}\n\n\n", "file_path": "benches/propane.rs", "rank": 44, "score": 166964.41201176977 }, { "content": "fn energy_computation(c: &mut Criterion) {\n\n let system = utils::get_system(\"argon\");\n\n c.bench_function(\"argon::energy\", move |b| b.iter(|| {\n\n let _ = PotentialEnergy.compute(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"argon\");\n\n c.bench_function(\"argon::force\", move |b| b.iter(|| {\n\n let _ = Forces.compute(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"argon\");\n\n c.bench_function(\"argon::atomic_virial\", move |b| b.iter(|| {\n\n let _ = AtomicVirial.compute(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"argon\");\n\n c.bench_function(\"argon::molecular_virial\", move |b| b.iter(|| {\n\n let _ = MolecularVirial.compute(&system);\n\n }));\n\n}\n\n\n", "file_path": "benches/argon.rs", "rank": 45, "score": 166964.41201176977 }, { "content": "fn ewald_energy_computation(c: &mut Criterion) {\n\n let system = utils::get_system(\"water\");\n\n let ewald = get_ewald();\n\n c.bench_function(\"water::ewald::energy\", move |b| b.iter(|| {\n\n let _ = ewald.energy(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"water\");\n\n let ewald = get_ewald();\n\n c.bench_function(\"water::ewald::force\", move |b| b.iter_batched_ref(\n\n || vec![Vector3D::zero(); system.size()],\n\n |forces| ewald.forces(&system, forces),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let system = utils::get_system(\"water\");\n\n let ewald = get_ewald();\n\n c.bench_function(\"water::ewald::atomic_virial\", move |b| b.iter(|| {\n\n let _ = ewald.atomic_virial(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"water\");\n\n let ewald = get_ewald();\n\n c.bench_function(\"water::ewald::molecular_virial\", move |b| b.iter(|| {\n\n let _ = ewald.molecular_virial(&system);\n\n }));\n\n}\n\n\n", "file_path": "benches/water.rs", "rank": 46, "score": 165165.33768945246 }, { "content": "fn wolf_energy_computation(c: &mut Criterion) {\n\n let system = utils::get_system(\"nacl\");\n\n let wolf = get_wolf();\n\n c.bench_function(\"nacl::wolf::energy\", move |b| b.iter(|| {\n\n let _ = wolf.energy(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"nacl\");\n\n let wolf = get_wolf();\n\n c.bench_function(\"nacl::wolf::force\", move |b| b.iter_batched_ref(\n\n || vec![Vector3D::zero(); system.size()],\n\n |forces| wolf.forces(&system, forces),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let system = utils::get_system(\"nacl\");\n\n let wolf = get_wolf();\n\n c.bench_function(\"nacl::wolf::atomic_virial\", move |b| b.iter(|| {\n\n let _ = wolf.atomic_virial(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"nacl\");\n\n let wolf = get_wolf();\n\n c.bench_function(\"nacl::wolf::molecular_virial\", move |b| b.iter(|| {\n\n let _ = wolf.molecular_virial(&system);\n\n }));\n\n}\n\n\n", "file_path": "benches/nacl.rs", "rank": 47, "score": 165165.33768945246 }, { "content": "fn ewald_energy_computation(c: &mut Criterion) {\n\n let system = utils::get_system(\"nacl\");\n\n let ewald = get_ewald();\n\n c.bench_function(\"nacl::ewald::energy\", move |b| b.iter(|| {\n\n let _ = ewald.energy(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"nacl\");\n\n let ewald = get_ewald();\n\n c.bench_function(\"nacl::ewald::force\", move |b| b.iter_batched_ref(\n\n || vec![Vector3D::zero(); system.size()],\n\n |forces| ewald.forces(&system, forces),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let system = utils::get_system(\"nacl\");\n\n let ewald = get_ewald();\n\n c.bench_function(\"nacl::ewald::atomic_virial\", move |b| b.iter(|| {\n\n let _ = ewald.atomic_virial(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"nacl\");\n\n let ewald = get_ewald();\n\n c.bench_function(\"nacl::ewald::molecular_virial\", move |b| b.iter(|| {\n\n let _ = ewald.molecular_virial(&system);\n\n }));\n\n}\n\n\n", "file_path": "benches/nacl.rs", "rank": 48, "score": 165165.33768945246 }, { "content": "fn monte_carlo_cache(c: &mut Criterion) {\n\n let system = utils::get_system(\"argon\");\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"argon::move_molecule_cost\", move |b| b.iter_batched(\n\n || utils::move_rigid_molecule(&system),\n\n |(molid, positions)| cache.move_molecule_cost(&system, molid, &positions),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let system = utils::get_system(\"argon\");\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"argon::move_all_molecules_cost\", move |b| b.iter_batched_ref(\n\n || utils::move_all_rigid_molecule(system.clone()),\n\n |system| cache.move_all_molecules_cost(system),\n\n BatchSize::SmallInput\n\n ));\n\n}\n\n\n\ncriterion_group!(argon, energy_computation, monte_carlo_cache);\n\ncriterion_main!(argon);\n", "file_path": "benches/argon.rs", "rank": 49, "score": 165165.33768945246 }, { "content": "fn monte_carlo_cache(c: &mut Criterion) {\n\n let system = utils::get_system(\"propane\");\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"propane::move_molecule_cost\", move |b| b.iter_batched(\n\n || utils::move_rigid_molecule(&system),\n\n |(molid, positions)| cache.move_molecule_cost(&system, molid, &positions),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let system = utils::get_system(\"propane\");\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"propane::move_all_molecules_cost\", move |b| b.iter_batched_ref(\n\n || utils::move_all_rigid_molecule(system.clone()),\n\n |system| cache.move_all_molecules_cost(system),\n\n BatchSize::SmallInput\n\n ));\n\n}\n\n\n\ncriterion_group!(propane, energy_computation, monte_carlo_cache);\n\ncriterion_main!(propane);\n", "file_path": "benches/propane.rs", "rank": 50, "score": 165165.33768945246 }, { "content": "fn wolf_energy_computation(c: &mut Criterion) {\n\n let system = utils::get_system(\"water\");\n\n let wolf = get_wolf();\n\n c.bench_function(\"water::wolf::energy\", move |b| b.iter(|| {\n\n let _ = wolf.energy(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"water\");\n\n let wolf = get_wolf();\n\n c.bench_function(\"water::wolf::force\", move |b| b.iter_batched_ref(\n\n || vec![Vector3D::zero(); system.size()],\n\n |forces| wolf.forces(&system, forces),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let system = utils::get_system(\"water\");\n\n let wolf = get_wolf();\n\n c.bench_function(\"water::wolf::atomic_virial\", move |b| b.iter(|| {\n\n let _ = wolf.atomic_virial(&system);\n\n }));\n\n\n\n let system = utils::get_system(\"water\");\n\n let wolf = get_wolf();\n\n c.bench_function(\"water::wolf::molecular_virial\", move |b| b.iter(|| {\n\n let _ = wolf.molecular_virial(&system);\n\n }));\n\n}\n\n\n", "file_path": "benches/water.rs", "rank": 51, "score": 165165.33768945246 }, { "content": "fn ewald_monte_carlo_cache(c: &mut Criterion) {\n\n let mut system = utils::get_system(\"water\");\n\n system.set_coulomb_potential(Box::new(get_ewald()));\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"water::ewald::move_molecule_cost\", move |b| b.iter_batched(\n\n || utils::move_rigid_molecule(&system),\n\n |(molid, positions)| cache.move_molecule_cost(&system, molid, &positions),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let mut system = utils::get_system(\"water\");\n\n system.set_coulomb_potential(Box::new(get_ewald()));\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"water::ewald::move_all_molecules_cost\", move |b| b.iter_batched_ref(\n\n || utils::move_all_rigid_molecule(system.clone()),\n\n |system| cache.move_all_molecules_cost(system),\n\n BatchSize::SmallInput\n\n ));\n\n}\n\n\n", "file_path": "benches/water.rs", "rank": 52, "score": 163436.5273485984 }, { "content": "fn ewald_monte_carlo_cache(c: &mut Criterion) {\n\n let mut system = utils::get_system(\"nacl\");\n\n system.set_coulomb_potential(Box::new(get_ewald()));\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"nacl::ewald::move_molecule_cost\", move |b| b.iter_batched(\n\n || utils::move_rigid_molecule(&system),\n\n |(molid, positions)| cache.move_molecule_cost(&system, molid, &positions),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let mut system = utils::get_system(\"nacl\");\n\n system.set_coulomb_potential(Box::new(get_ewald()));\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"nacl::ewald::move_all_molecules_cost\", move |b| b.iter_batched_ref(\n\n || utils::move_all_rigid_molecule(system.clone()),\n\n |system| cache.move_all_molecules_cost(system),\n\n BatchSize::SmallInput\n\n ));\n\n}\n\n\n", "file_path": "benches/nacl.rs", "rank": 53, "score": 163436.5273485984 }, { "content": "fn wolf_monte_carlo_cache(c: &mut Criterion) {\n\n let mut system = utils::get_system(\"nacl\");\n\n system.set_coulomb_potential(Box::new(get_wolf()));\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"nacl::wolf::move_molecule_cost\", move |b| b.iter_batched(\n\n || utils::move_rigid_molecule(&system),\n\n |(molid, positions)| cache.move_molecule_cost(&system, molid, &positions),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let mut system = utils::get_system(\"nacl\");\n\n system.set_coulomb_potential(Box::new(get_wolf()));\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"nacl::wolf::move_all_molecules_cost\", move |b| b.iter_batched_ref(\n\n || utils::move_all_rigid_molecule(system.clone()),\n\n |system| cache.move_all_molecules_cost(system),\n\n BatchSize::SmallInput\n\n ));\n\n}\n\n\n\ncriterion_group!(ewald, ewald_energy_computation, ewald_monte_carlo_cache);\n\ncriterion_group!(wolf, wolf_energy_computation, wolf_monte_carlo_cache);\n\n\n\ncriterion_main!(ewald, wolf);\n", "file_path": "benches/nacl.rs", "rank": 54, "score": 163436.5273485984 }, { "content": "fn wolf_monte_carlo_cache(c: &mut Criterion) {\n\n let mut system = utils::get_system(\"water\");\n\n system.set_coulomb_potential(Box::new(get_wolf()));\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"water::wolf::move_molecule_cost\", move |b| b.iter_batched(\n\n || utils::move_rigid_molecule(&system),\n\n |(molid, positions)| cache.move_molecule_cost(&system, molid, &positions),\n\n BatchSize::SmallInput\n\n ));\n\n\n\n let mut system = utils::get_system(\"water\");\n\n system.set_coulomb_potential(Box::new(get_wolf()));\n\n let mut cache = EnergyCache::new();\n\n cache.init(&system);\n\n\n\n c.bench_function(\"water::wolf::move_all_molecules_cost\", move |b| b.iter_batched_ref(\n\n || utils::move_all_rigid_molecule(system.clone()),\n\n |system| cache.move_all_molecules_cost(system),\n\n BatchSize::SmallInput\n\n ));\n\n}\n\n\n\ncriterion_group!(ewald, ewald_energy_computation, ewald_monte_carlo_cache);\n\ncriterion_group!(wolf, wolf_energy_computation, wolf_monte_carlo_cache);\n\n\n\ncriterion_main!(ewald, wolf);\n", "file_path": "benches/water.rs", "rank": 55, "score": 163436.5273485984 }, { "content": "/// Rotate the particles at `positions` with the center-of-mass position\n\n/// `com` around the `axis` axis by `angle`. The `positions` array is\n\n/// overwritten with the new positions.\n\nfn rotate_around_axis(positions: &mut [Vector3D], com: Vector3D, axis: Vector3D, angle: f64) {\n\n let rotation = Matrix3::rotation(&axis, angle);\n\n for position in positions {\n\n let oldpos = *position - com;\n\n *position = com + rotation * oldpos;\n\n }\n\n}\n", "file_path": "lumol-sim/src/mc/moves/rotate.rs", "rank": 56, "score": 161301.1145756914 }, { "content": "/// The `Output` trait defines the interface for all the quantities outputted by\n\n/// the simulation during the run. An Output can be a text or a binary data\n\n/// file, an image, a text log, …\n\npub trait Output {\n\n /// Function called once at the beginning of the simulation, which allows\n\n /// for some setup of the output if needed.\n\n fn setup(&mut self, _: &System) {}\n\n\n\n /// Write the output from the system.\n\n fn write(&mut self, system: &System);\n\n\n\n /// Function called once at the end of the simulation.\n\n fn finish(&mut self, _: &System) {}\n\n}\n\n\n\nmod tests;\n\n\n\nmacro_rules! writeln_or_log {\n\n ($this: expr, $($args: expr),* $(,)*) => (\n\n if let Err(err) = writeln!(&mut $this.file, $($args,)*) {\n\n error!(\"could not write to file '{}': {}\", $this.path.display(), err);\n\n return;\n\n }\n", "file_path": "lumol-sim/src/output/mod.rs", "rank": 57, "score": 159931.4875899987 }, { "content": "/// Convert a TOML table and some additional owned data to a Rust type.\n\npub trait FromTomlWithData: Sized {\n\n /// The type of the additional data needed.\n\n type Data;\n\n /// Do the conversion from `table` and `data` to Self.\n\n fn from_toml(table: &Table, data: Self::Data) -> Result<Self, Error>;\n\n}\n\n\n", "file_path": "lumol-input/src/lib.rs", "rank": 58, "score": 158019.9684673162 }, { "content": "fn read_restriction(config: &Table) -> Result<Option<PairRestriction>, Error> {\n\n let restriction = config.get(\"restriction\");\n\n if restriction.is_none() {\n\n // No restriction found\n\n return Ok(None);\n\n };\n\n\n\n match restriction.expect(\"Unreachable\").clone() {\n\n Value::String(name) => {\n\n match &*name {\n\n \"none\" => Ok(Some(PairRestriction::None)),\n\n \"intramolecular\" | \"IntraMolecular\" | \"intra-molecular\" => {\n\n Ok(Some(PairRestriction::IntraMolecular))\n\n }\n\n \"intermolecular\" | \"InterMolecular\" | \"inter-molecular\" => {\n\n Ok(Some(PairRestriction::InterMolecular))\n\n }\n\n \"exclude12\" => Ok(Some(PairRestriction::Exclude12)),\n\n \"exclude13\" => Ok(Some(PairRestriction::Exclude13)),\n\n \"exclude14\" => Ok(Some(PairRestriction::Exclude14)),\n", "file_path": "lumol-input/src/interactions/mod.rs", "rank": 59, "score": 157791.2276209832 }, { "content": "fn read_angle_potential(table: &Table) -> Result<Box<dyn AnglePotential>, Error> {\n\n match extract::typ(table, \"angle potential\")? {\n\n \"null\" => Ok(Box::new(NullPotential::from_toml(table)?)),\n\n \"harmonic\" => Ok(Box::new(Harmonic::from_toml(table)?)),\n\n \"cosine-harmonic\" => Ok(Box::new(CosineHarmonic::from_toml(table)?)),\n\n \"morse\" => Ok(Box::new(Morse::from_toml(table)?)),\n\n other => Err(Error::from(format!(\"unknown potential type '{}'\", other))),\n\n }\n\n}\n\n\n", "file_path": "lumol-input/src/interactions/angles.rs", "rank": 60, "score": 151957.0675139388 }, { "content": "fn read_pair_potential(table: &Table) -> Result<Box<dyn PairPotential>, Error> {\n\n match extract::typ(table, \"pair potential\")? {\n\n \"null\" => Ok(Box::new(NullPotential::from_toml(table)?)),\n\n \"harmonic\" => Ok(Box::new(Harmonic::from_toml(table)?)),\n\n \"lj\" => Ok(Box::new(LennardJones::from_toml(table)?)),\n\n \"buckingham\" => Ok(Box::new(Buckingham::from_toml(table)?)),\n\n \"born\" => Ok(Box::new(BornMayerHuggins::from_toml(table)?)),\n\n \"morse\" => Ok(Box::new(Morse::from_toml(table)?)),\n\n \"gaussian\" => Ok(Box::new(Gaussian::from_toml(table)?)),\n\n \"mie\" => Ok(Box::new(Mie::from_toml(table)?)),\n\n other => Err(Error::from(format!(\"unknown potential type '{}'\", other))),\n\n }\n\n}\n\n\n", "file_path": "lumol-input/src/interactions/pairs.rs", "rank": 61, "score": 151957.0675139388 }, { "content": "fn read_bond_potential(table: &Table) -> Result<Box<dyn BondPotential>, Error> {\n\n match extract::typ(table, \"bond potential\")? {\n\n \"null\" => Ok(Box::new(NullPotential::from_toml(table)?)),\n\n \"harmonic\" => Ok(Box::new(Harmonic::from_toml(table)?)),\n\n \"morse\" => Ok(Box::new(Morse::from_toml(table)?)),\n\n other => Err(Error::from(format!(\"unknown potential type '{}'\", other))),\n\n }\n\n}\n\n\n", "file_path": "lumol-input/src/interactions/pairs.rs", "rank": 62, "score": 151957.0675139388 }, { "content": "fn read_dihedral_potential(table: &Table) -> Result<Box<dyn DihedralPotential>, Error> {\n\n match extract::typ(table, \"dihedral potential\")? {\n\n \"null\" => Ok(Box::new(NullPotential::from_toml(table)?)),\n\n \"harmonic\" => Ok(Box::new(Harmonic::from_toml(table)?)),\n\n \"cosine-harmonic\" => Ok(Box::new(CosineHarmonic::from_toml(table)?)),\n\n \"torsion\" => Ok(Box::new(Torsion::from_toml(table)?)),\n\n \"morse\" => Ok(Box::new(Morse::from_toml(table)?)),\n\n other => Err(Error::from(format!(\"unknown potential type '{}'\", other))),\n\n }\n\n}\n", "file_path": "lumol-input/src/interactions/angles.rs", "rank": 63, "score": 151957.0675139388 }, { "content": "fn format_elapsed(elapsed: Duration) -> String {\n\n if elapsed.num_weeks() > 0 {\n\n let h = elapsed.num_hours() % 24;\n\n let d = elapsed.num_days() % 7;\n\n let w = elapsed.num_weeks();\n\n format!(\"{} weeks {} days {}h\", w, d, h)\n\n } else if elapsed.num_days() > 0 {\n\n let m = elapsed.num_minutes() % 60;\n\n let h = elapsed.num_hours() % 24;\n\n let d = elapsed.num_days();\n\n format!(\"{} days {}h {}min\", d, h, m)\n\n } else if elapsed.num_hours() > 0 {\n\n let s = elapsed.num_seconds() % 60;\n\n let m = elapsed.num_minutes() % 60;\n\n let h = elapsed.num_hours();\n\n format!(\"{}h {}min {}s\", h, m, s)\n\n } else {\n\n let s = elapsed.num_seconds() % 60;\n\n let m = elapsed.num_minutes();\n\n format!(\"{}min {}s\", m, s)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 64, "score": 138267.8265144517 }, { "content": " fn read_potentials(&self, system: &mut System) -> Result<(), Error> {\n\n let config = self.system_table()?;\n\n if let Some(potentials) = config.get(\"potentials\") {\n\n if let Some(potentials) = potentials.as_str() {\n\n let path = get_input_path(&self.path, potentials);\n\n let input = InteractionsInput::new(path)?;\n\n input.read(system)?;\n\n } else if let Some(potentials) = potentials.as_table() {\n\n let input = InteractionsInput::from_toml(potentials.clone());\n\n input.read(system)?;\n\n } else {\n\n return Err(Error::from(\"'potentials' must be a string or a table in system\"));\n\n }\n\n } else {\n\n warn!(\"No potentials found in input file\");\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 65, "score": 138190.6218599362 }, { "content": "// Lumol, an extensible molecular simulation engine\n\n// Copyright (C) Lumol's contributors — BSD license\n\nuse toml::value::{Table, Value};\n\n\n\nuse lumol_core::{System, UnitCell, TrajectoryBuilder};\n\nuse lumol_sim::{BoltzmannVelocities, InitVelocities};\n\nuse lumol_core::units;\n\n\n\nuse log::warn;\n\n\n\nuse crate::{Input, InteractionsInput, Error};\n\nuse crate::extract;\n\nuse crate::simulations::get_input_path;\n\n\n\nimpl Input {\n\n /// Get the the simulated system.\n\n pub fn read_system(&self) -> Result<System, Error> {\n\n let config = self.system_table()?;\n\n\n\n let file = extract::str(\"file\", config, \"system\")?;\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 66, "score": 138188.48043165417 }, { "content": " let gamma = get_cell_number(&cell[5])?;\n\n\n\n Ok(Some(UnitCell::triclinic(a, b, c, alpha, beta, gamma)))\n\n } else {\n\n Err(Error::from(\"'cell' array must have a size of 3 or 6\"))\n\n }\n\n }\n\n Value::Integer(lenght) => {\n\n let lenght = lenght as f64;\n\n Ok(Some(UnitCell::cubic(lenght)))\n\n }\n\n Value::Float(lenght) => Ok(Some(UnitCell::cubic(lenght))),\n\n _ => Err(Error::from(\"'cell' must be a number or an array in system\")),\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n\n\n fn init_velocities(&self, system: &mut System) -> Result<(), Error> {\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 67, "score": 138182.31525409388 }, { "content": " let file = get_input_path(&self.path, file);\n\n let mut trajectory = TrajectoryBuilder::new().open(file)?;\n\n\n\n let with_cell = if let Some(cell) = self.read_cell()? {\n\n trajectory.set_cell(&cell);\n\n true\n\n } else {\n\n false\n\n };\n\n\n\n if config.get(\"topology\").is_some() {\n\n let topology = extract::str(\"topology\", config, \"system\")?;\n\n trajectory.set_topology_file(topology)?;\n\n }\n\n\n\n let guess_bonds = if let Some(guess_bonds) = config.get(\"guess_bonds\") {\n\n guess_bonds.as_bool().ok_or(\n\n Error::from(\"'guess_bonds' should be a boolean value in system\")\n\n )?\n\n } else {\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 68, "score": 138181.24108474844 }, { "content": " let config = self.system_table()?;\n\n\n\n if let Some(velocities) = config.get(\"velocities\") {\n\n let velocities = velocities.as_table().ok_or(\n\n Error::from(\"'velocities' must be a table in system\")\n\n )?;\n\n\n\n if velocities.get(\"init\").is_some() {\n\n let temperature = extract::str(\"init\", velocities, \"velocities initializer\")?;\n\n let temperature = units::from_str(temperature)?;\n\n let mut velocities = BoltzmannVelocities::new(temperature);\n\n velocities.init(system);\n\n } else {\n\n warn!(\"'velocities' key does nothing in this input file\");\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 69, "score": 138176.2842576547 }, { "content": " Ok(system)\n\n }\n\n\n\n fn system_table(&self) -> Result<&Table, Error> {\n\n let systems = extract::slice(\"systems\", &self.config, \"input file\")?;\n\n\n\n if systems.is_empty() {\n\n return Err(Error::from(\"'systems' array should contain a system\"));\n\n }\n\n\n\n if systems.len() > 1 {\n\n return Err(Error::from(\"only one system is supported in input file\"));\n\n }\n\n\n\n let system = systems[0].as_table().ok_or(\n\n Error::from(\"'systems' should be an array of tables in input file\")\n\n )?;\n\n\n\n return Ok(system);\n\n }\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 70, "score": 138175.69712625077 }, { "content": " false\n\n };\n\n\n\n let mut system = if guess_bonds {\n\n trajectory.read_guess_bonds()?\n\n } else {\n\n trajectory.read()?\n\n };\n\n\n\n self.read_potentials(&mut system)?;\n\n self.init_velocities(&mut system)?;\n\n\n\n if !with_cell && system.cell.is_infinite() {\n\n warn!(\n\n \"No unit cell in the system, using an infinite unit cell.\\n\\\n\n You can get rid of this warning by using `cell = []` in the \\\n\n input file if this is what you want.\"\n\n );\n\n }\n\n\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 71, "score": 138168.86612624896 }, { "content": "\n\n fn read_cell(&self) -> Result<Option<UnitCell>, Error> {\n\n let config = self.system_table()?;\n\n if let Some(cell) = config.get(\"cell\") {\n\n match *cell {\n\n Value::Array(ref cell) => {\n\n if cell.is_empty() {\n\n Ok(Some(UnitCell::infinite()))\n\n } else if cell.len() == 3 {\n\n let a = get_cell_number(&cell[0])?;\n\n let b = get_cell_number(&cell[1])?;\n\n let c = get_cell_number(&cell[2])?;\n\n\n\n Ok(Some(UnitCell::ortho(a, b, c)))\n\n } else if cell.len() == 6 {\n\n let a = get_cell_number(&cell[0])?;\n\n let b = get_cell_number(&cell[1])?;\n\n let c = get_cell_number(&cell[2])?;\n\n let alpha = get_cell_number(&cell[3])?;\n\n let beta = get_cell_number(&cell[4])?;\n", "file_path": "lumol-input/src/simulations/system.rs", "rank": 72, "score": 138167.1054008185 }, { "content": "fn read_pair_computation(computation: &Table, potential: Box<dyn PairPotential>) -> Result<Box<dyn PairPotential>, Error> {\n\n if computation.keys().len() != 1 {\n\n return Err(Error::from(\"Missing computation type in computation table\"));\n\n }\n\n\n\n match computation.keys().map(|s| s.as_ref()).next() {\n\n Some(\"table\") => Ok(Box::new(TableComputation::from_toml(computation, potential)?)),\n\n Some(other) => Err(Error::from(format!(\"Unknown computation type '{}'\", other))),\n\n None => unreachable!(),\n\n }\n\n}\n", "file_path": "lumol-input/src/interactions/pairs.rs", "rank": 73, "score": 137058.52129852228 }, { "content": "pub fn get_rng() -> XorShiftRng {\n\n XorShiftRng::from_seed([145, 59, 58, 50, 238, 182, 97, 28, 107, 149, 227, 40, 90, 109, 196, 129])\n\n}\n\n\n", "file_path": "benches/utils/mod.rs", "rank": 74, "score": 135493.71071461603 }, { "content": "/// Cleanup temporary files after the tests\n\nstruct TestsCleanup;\n\nimpl Drop for TestsCleanup {\n\n fn drop(&mut self) {\n\n const REMOVE: &[&str] = &[\n\n \"energy.dat\",\n\n \"filename.xyz\",\n\n \"cell.dat\",\n\n \"properties.dat\",\n\n \"file.log\",\n\n \"custom.dat\",\n\n \"stress.dat\",\n\n \"forces.xyz\",\n\n ];\n\n\n\n for file in REMOVE {\n\n if let Err(err) = fs::remove_file(file) {\n\n match err.kind() {\n\n io::ErrorKind::NotFound => {}\n\n _ => panic!(\"io error in cleanup code: {}\", err),\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "lumol-input/tests/input.rs", "rank": 75, "score": 133364.1099708001 }, { "content": "fn any<F: Fn(f64) -> bool>(vector: &Vector3D, function: F) -> bool {\n\n function(vector[0]) || function(vector[1]) || function(vector[2])\n\n}\n", "file_path": "lumol-sim/src/simulations.rs", "rank": 76, "score": 132246.83976480458 }, { "content": "/// Transform a string to a stream of tokens\n\nfn tokenize(unit: &str) -> Vec<Token> {\n\n let mut tokens = Vec::new();\n\n let mut token = String::new();\n\n for c in unit.chars() {\n\n match c {\n\n '*' | '/' | '^' | '(' | ')' => {\n\n if !token.is_empty() {\n\n tokens.push(Token::Value(token.clone()));\n\n token.clear();\n\n }\n\n match c {\n\n '*' => tokens.push(Token::Mul),\n\n '/' => tokens.push(Token::Div),\n\n '^' => tokens.push(Token::Pow),\n\n '(' => tokens.push(Token::LParen),\n\n ')' => tokens.push(Token::RParen),\n\n _ => unreachable!(\"invalid unit operator\"),\n\n }\n\n }\n\n other if !other.is_whitespace() => {\n", "file_path": "lumol-core/src/units.rs", "rank": 77, "score": 131387.90920715555 }, { "content": "#[derive(Clone)]\n\nstruct LJ {\n\n a: f64,\n\n b: f64,\n\n}\n\n\n\n// All we need to do is to implement the Potential trait\n\nimpl Potential for LJ {\n\n // The energy function give the energy at distance `r`\n\n fn energy(&self, r: f64) -> f64 {\n\n self.a / r.powi(12) - self.b / r.powi(6)\n\n }\n\n\n\n // The force function give the norm of the force at distance `r`\n\n fn force(&self, r: f64) -> f64 {\n\n 12.0 * self.a / r.powi(13) - 6.0 * self.b / r.powi(7)\n\n }\n\n}\n\n\n\n// We want to use our LJ potential as a pair potential.\n\nimpl PairPotential for LJ {\n", "file_path": "examples/custom-potential.rs", "rank": 78, "score": 128146.63396903816 }, { "content": "fn all_tests() -> Vec<TestDescAndFn> {\n\n let mut tests = Vec::new();\n\n\n\n tests.extend(\n\n generate_tests(\"simulation/good\", |path, content| {\n\n Box::new(move || {\n\n let input = Input::from_str(path.clone(), &content).unwrap();\n\n input.read().unwrap();\n\n })\n\n }).expect(\"Could not generate the tests\"),\n\n );\n\n\n\n tests.extend(\n\n generate_tests(\"simulation/bad\", |path, content| {\n\n Box::new(move || {\n\n let message = get_error_message(&content);\n\n let result = Input::from_str(path.clone(), &content).and_then(|input| input.read());\n\n\n\n match result {\n\n Err(Error::Config(reason)) => assert_eq!(reason, message),\n", "file_path": "lumol-input/tests/input.rs", "rank": 79, "score": 128012.52347057809 }, { "content": "fn main() {\n\n env_logger::init();\n\n let _cleanup = TestsCleanup;\n\n\n\n let args: Vec<_> = env::args()\n\n .filter(|arg| !arg.contains(\"test-threads\"))\n\n .collect();\n\n\n\n let mut opts = match rustc_test::parse_opts(&args).expect(\"no options\") {\n\n Ok(opts) => opts,\n\n Err(msg) => panic!(\"{:?}\", msg),\n\n };\n\n opts.verbose = true;\n\n\n\n let tests = all_tests();\n\n let result = rustc_test::run_tests_console(&opts, tests);\n\n match result {\n\n Ok(true) => {}\n\n Ok(false) => std::process::exit(-1),\n\n Err(err) => panic!(\"io error when running tests: {:?}\", err),\n\n }\n\n}\n\n\n", "file_path": "lumol-input/tests/input.rs", "rank": 80, "score": 126894.81583136931 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut system = TrajectoryBuilder::new().open(\"data/xenon.xyz\")?\n\n .read()?;\n\n system.cell = UnitCell::cubic(units::from(21.65, \"A\")?);\n\n\n\n let lj = Box::new(LennardJones {\n\n sigma: units::from(4.57, \"A\")?,\n\n epsilon: units::from(1.87, \"kJ/mol\")?,\n\n });\n\n system.set_pair_potential((\"Xe\", \"Xe\"), PairInteraction::new(lj, 12.0));\n\n\n\n // Create a Monte Carlo builder\n\n let mut builder = MonteCarloBuilder::new(units::from(500.0, \"K\")?);\n\n // Add the `Translate` move with 0.5 A amplitude and 1.0 frequency\n\n builder.add(Box::new(Translate::new(units::from(0.5, \"A\")?, None)), 1.0, None);\n\n\n\n // Extract the Monte Carlo propagator\n\n let mc = builder.finish();\n\n let mut simulation = Simulation::new(Box::new(mc));\n\n\n\n let trajectory_out = Box::new(TrajectoryOutput::new(\"trajectory.xyz\")?);\n\n simulation.add_output_with_frequency(trajectory_out, 50);\n\n\n\n simulation.run(&mut system, 20_000);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/xenon.rs", "rank": 81, "score": 126693.73397220351 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut system = TrajectoryBuilder::new().open(\"data/binary.pdb\")?\n\n .read()?;\n\n let input = InteractionsInput::new(\"data/binary.toml\")?;\n\n input.read(&mut system)?;\n\n\n\n // We can read files to get molecule hash\n\n let co2 = read_molecule(\"data/CO2.pdb\")?.hash();\n\n\n\n // Or define a new molecule by hand\n\n let mut molecule = Molecule::new(Particle::new(\"H\"));\n\n molecule.add_particle_bonded_to(0, Particle::new(\"O\"));\n\n molecule.add_particle_bonded_to(1, Particle::new(\"H\"));\n\n let h2o = molecule.hash();\n\n\n\n let mut builder = MonteCarloBuilder::new(units::from(500.0, \"K\")?);\n\n // Use the molecular types of CO2 and H2O to specify different probabilities\n\n builder.add(Box::new(Translate::new(units::from(0.5, \"A\")?, co2)), 1.0, None);\n\n builder.add(Box::new(Rotate::new(units::from(10.0, \"deg\")?, co2)), 1.0, None);\n\n\n\n builder.add(Box::new(Translate::new(units::from(10.0, \"A\")?, h2o)), 2.0, None);\n\n builder.add(Box::new(Rotate::new(units::from(20.0, \"deg\")?, h2o)), 2.0, None);\n\n\n\n let mut simulation = Simulation::new(Box::new(builder.finish()));\n\n simulation.run(&mut system, 200_000_000);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/binary.rs", "rank": 82, "score": 126693.73397220351 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // Read the system fromt the `data/nacl.xyz` file\n\n let mut system = TrajectoryBuilder::new().open(\"data/nacl.xyz\")?\n\n .read()?;\n\n // Set the unit cell, as there is no unit cell data in XYZ files\n\n system.cell = UnitCell::cubic(units::from(22.5608, \"A\")?);\n\n // Read the interactions from the `data/nacl.toml` TOML file\n\n let input = InteractionsInput::new(\"data/nacl.toml\")?;\n\n input.read(&mut system)?;\n\n\n\n let mut velocities = BoltzmannVelocities::new(units::from(300.0, \"K\")?);\n\n velocities.init(&mut system);\n\n\n\n let mut md = MolecularDynamics::new(units::from(1.0, \"fs\")?);\n\n // Use a velocity rescaling thermostat\n\n md.set_thermostat(Box::new(RescaleThermostat::new(units::from(300.0, \"K\")?)));\n\n\n\n let mut simulation = Simulation::new(Box::new(md));\n\n simulation.run(&mut system, 1000);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/nacl.rs", "rank": 83, "score": 126693.73397220351 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut system = System::new();\n\n\n\n let alpha = units::from(50.0, \"deg\")?;\n\n let a_cos = 1.2 * f64::cos(alpha);\n\n let a_sin = 1.2 * f64::sin(alpha);\n\n let mut molecule = Molecule::new(Particle::with_position(\"O\", Vector3D::new(0.0, 0.0, 0.0)));\n\n molecule.add_particle_bonded_to(0, Particle::with_position(\"H\", Vector3D::new(a_cos, a_sin, 0.0)));\n\n molecule.add_particle_bonded_to(0, Particle::with_position(\"H\", Vector3D::new(a_cos, -a_sin, 0.0)));\n\n system.add_molecule(molecule);\n\n\n\n system.set_bond_potential(\n\n (\"O\", \"H\"),\n\n Box::new(Harmonic {\n\n x0: units::from(1.1, \"A\")?,\n\n k: units::from(100.0, \"kJ/mol/A^2\")?,\n\n }),\n\n );\n\n system.set_angle_potential(\n\n (\"H\", \"O\", \"H\"),\n", "file_path": "examples/minimization.rs", "rank": 84, "score": 126693.73397220351 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut system = System::with_cell(UnitCell::cubic(17.0));\n\n\n\n // Create a cubic crystal of Argon by hand.\n\n for i in 0..5 {\n\n for j in 0..5 {\n\n for k in 0..5 {\n\n let position = Vector3D::new(i as f64 * 3.4, j as f64 * 3.4, k as f64 * 3.4);\n\n let particle = Particle::with_position(\"Ar\", position);\n\n system.add_molecule(Molecule::new(particle));\n\n }\n\n }\n\n }\n\n\n\n let lj = Box::new(LennardJones {\n\n sigma: units::from(3.4, \"A\")?,\n\n epsilon: units::from(1.0, \"kJ/mol\")?,\n\n });\n\n system.set_pair_potential(\n\n (\"Ar\", \"Ar\"),\n", "file_path": "examples/argon.rs", "rank": 85, "score": 126693.73397220351 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum UnitExpr {\n\n /// A single value\n\n Val(f64),\n\n /// Multiplication of left-hand side by right-hand side\n\n Mul(Box<UnitExpr>, Box<UnitExpr>),\n\n /// Division of left-hand side by right-hand side\n\n Div(Box<UnitExpr>, Box<UnitExpr>),\n\n /// Take the power of the expr by the `i32` value\n\n Pow(Box<UnitExpr>, i32),\n\n}\n\n\n\nimpl UnitExpr {\n\n /// Recursively evaluate an unit expression\n\n fn eval(&self) -> f64 {\n\n match *self {\n\n UnitExpr::Val(v) => v,\n\n UnitExpr::Mul(ref lhs, ref rhs) => lhs.eval() * rhs.eval(),\n\n UnitExpr::Div(ref lhs, ref rhs) => lhs.eval() / rhs.eval(),\n\n UnitExpr::Pow(ref expr, pow) => expr.eval().powi(pow),\n\n }\n", "file_path": "lumol-core/src/units.rs", "rank": 86, "score": 124846.28502836419 }, { "content": "/// Get the angles between the vectors `u` and `v`.\n\nfn angle(u: Vector3D, v: Vector3D) -> f64 {\n\n let un = u.normalized();\n\n let vn = v.normalized();\n\n acos(un * vn)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::f64;\n\n use std::f64::consts::PI;\n\n use crate::Matrix3;\n\n\n\n use approx::{assert_ulps_eq, assert_relative_eq};\n\n\n\n #[test]\n\n #[should_panic]\n\n fn negative_cubic() {\n\n let _ = UnitCell::cubic(-4.0);\n\n }\n", "file_path": "lumol-core/src/sys/config/cells.rs", "rank": 87, "score": 124735.45479803598 }, { "content": "/// Writing an output at a given frequency\n\nstruct OutputFrequency {\n\n /// The output to use\n\n output: Box<dyn Output>,\n\n /// The frequency. `output` will be used every time the system step matches\n\n /// this frequency.\n\n frequency: u64,\n\n}\n\n\n\nimpl OutputFrequency {\n\n pub fn new(output: Box<dyn Output>) -> OutputFrequency {\n\n OutputFrequency {\n\n frequency: 1,\n\n output: output,\n\n }\n\n }\n\n\n\n pub fn with_frequency(output: Box<dyn Output>, frequency: u64) -> OutputFrequency {\n\n OutputFrequency {\n\n frequency: frequency,\n\n output: output,\n", "file_path": "lumol-sim/src/simulations.rs", "rank": 88, "score": 124125.3939244345 }, { "content": "/// Ensure that a logger will be initialized, even in case of error in the log\n\n/// section of the input file\n\nstruct EnsureLogger;\n\nimpl Drop for EnsureLogger {\n\n fn drop(&mut self) {\n\n // Setup default logger unconditionally, it won't do anything if another\n\n // logger is already initialized.\n\n setup_default_logger();\n\n }\n\n}\n\n\n\nimpl Input {\n\n /// Setup the logger from the input file or to stdout as a default.\n\n pub(crate) fn setup_logging(&self) -> Result<(), Error> {\n\n let _guard = EnsureLogger;\n\n if let Some(loggers) = self.config.get(\"log\") {\n\n let loggers = loggers.as_table().ok_or(\n\n Error::from(\"'log' section must be a table\")\n\n )?;\n\n\n\n if loggers.get(\"target\").is_some() {\n\n if loggers.get(\"targets\").is_some() {\n", "file_path": "lumol-input/src/simulations/logging.rs", "rank": 89, "score": 121874.94963867782 }, { "content": "#[derive(Debug)]\n\nstruct LogEncoder;\n\n\n\nimpl Encode for LogEncoder {\n\n fn encode(&self, out: &mut dyn Write, record: &Record<'_>) -> Result<(), LogError> {\n\n match record.level() {\n\n log::Level::Trace => write!(out, \"[trace] \")?,\n\n log::Level::Debug => write!(out, \"[debug] \")?,\n\n log::Level::Info => {}\n\n log::Level::Warn => {\n\n out.set_style(Style::new().text(Color::Red))?;\n\n write!(out, \"[warning] \")?;\n\n out.set_style(&Style::new())?;\n\n }\n\n log::Level::Error => {\n\n out.set_style(Style::new().text(Color::Red).intense(true))?;\n\n write!(out, \"[error] \")?;\n\n }\n\n }\n\n\n\n write!(out, \"{}\", record.args())?;\n", "file_path": "lumol-input/src/simulations/logging.rs", "rank": 90, "score": 121865.29016131094 }, { "content": "//! Example of a run using input files for the simulation and the system\n\n//! This is the exact same simulation as the one in `binary.rs`\n\nfn main() {\n\n let input = lumol::input::Input::new(\"data/simulation.toml\").unwrap();\n\n match input.read() {\n\n Err(error) => println!(\"Error in input: {}\", error),\n\n Ok(mut config) => {\n\n config.simulation.run(&mut config.system, config.nsteps);\n\n }\n\n }\n\n}\n", "file_path": "examples/input.rs", "rank": 91, "score": 121432.64705491642 }, { "content": "/// Global settings for the pair interactions\n\nstruct GlobalInformation<'a> {\n\n cutoff: Option<&'a Value>,\n\n tail: Option<bool>,\n\n}\n\n\n\nimpl GlobalInformation<'_> {\n\n fn read(config: &Table) -> Result<GlobalInformation<'_>, Error> {\n\n match config.get(\"global\") {\n\n Some(global) => {\n\n let global = global.as_table().ok_or(\n\n Error::from(\"'global' section must be a table\")\n\n )?;\n\n\n\n let cutoff = global.get(\"cutoff\");\n\n let tail = global.get(\"tail_correction\")\n\n .map(|tail| {\n\n tail.as_bool().ok_or(\n\n Error::from(\"the 'tail_correction' section must be a boolean value\")\n\n )\n\n })\n", "file_path": "lumol-input/src/interactions/pairs.rs", "rank": 92, "score": 119751.02424219874 }, { "content": "#[test]\n\nfn constant_pressure() {\n\n START.call_once(::env_logger::init);\n\n let path = Path::new(file!()).parent()\n\n .unwrap()\n\n .join(\"data\")\n\n .join(\"mc-ethane\")\n\n .join(\"npt.toml\");\n\n let mut config = Input::new(path).unwrap().read().unwrap();\n\n\n\n let collecter = utils::Collecter::starting_at((config.nsteps - 50_000) as u64);\n\n let pressures = collecter.pressures();\n\n\n\n config.simulation.add_output(Box::new(collecter));\n\n config.simulation.run(&mut config.system, config.nsteps);\n\n\n\n let pressure = utils::mean(pressures.clone());\n\n let expected = units::from(200.0, \"bar\").unwrap();\n\n let tolerance = units::from(200.0, \"bar\").unwrap();\n\n assert!(f64::abs(pressure - expected) < tolerance);\n\n}\n", "file_path": "tests/mc-ethane.rs", "rank": 93, "score": 118211.84901839911 }, { "content": "fn apply_particle_permutation(bonds: &mut Vec<[u64; 2]>, permutations: &[Permutation]) {\n\n for bond in bonds {\n\n // Search for a permutation applying to the first atom of the bond. We\n\n // need to stop just after the first permutations is found, because we\n\n // can have a permutation looking like this: [1 -> 2, 2 -> 3, 3 -> 4].\n\n // If we do not stop after the first match, then all indexes in 1-3\n\n // range will become 4.\n\n for permutation in permutations {\n\n if bond[0] == permutation.old as u64 {\n\n bond[0] = permutation.new as u64;\n\n break;\n\n }\n\n }\n\n\n\n // Now we look for permutations applying to the second atom of the bond\n\n for permutation in permutations {\n\n if bond[1] == permutation.old as u64 {\n\n bond[1] = permutation.new as u64;\n\n break;\n\n }\n", "file_path": "lumol-core/src/sys/chfl.rs", "rank": 94, "score": 117610.50180388731 }, { "content": "#[allow(trivial_casts)]\n\nfn shunting_yard(tokens: Vec<Token>) -> Result<Vec<Token>, ParseError> {\n\n let mut operators = Vec::<Token>::new();\n\n let mut output = Vec::new();\n\n for token in tokens {\n\n match token {\n\n Token::Value(..) => output.push(token),\n\n Token::Mul | Token::Div | Token::Pow => {\n\n while !operators.is_empty() {\n\n let top_operator = operators.last().expect(MISSING_OPERATOR).clone();\n\n // All the operators are left-associative\n\n if token.precedence() <= top_operator.precedence() {\n\n output.push(operators.pop().expect(MISSING_OPERATOR));\n\n } else {\n\n break;\n\n }\n\n }\n\n operators.push(token);\n\n }\n\n Token::LParen => operators.push(token),\n\n Token::RParen => {\n", "file_path": "lumol-core/src/units.rs", "rank": 95, "score": 117346.801056519 }, { "content": "/// Convert a TOML table to a Rust type.\n\npub trait FromToml: Sized {\n\n /// Do the conversion from `table` to Self.\n\n fn from_toml(table: &Table) -> Result<Self, Error>;\n\n}\n\n\n", "file_path": "lumol-input/src/lib.rs", "rank": 96, "score": 115979.72511309036 }, { "content": "/// Marker trait for potentials that can be used for non-bonded two body\n\n/// interactions.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use lumol_core::energy::{Potential, PairPotential};\n\n///\n\n/// // A no-op potential\n\n/// #[derive(Clone)]\n\n/// struct Null;\n\n///\n\n/// impl Potential for Null {\n\n/// fn energy(&self, x: f64) -> f64 {0.0}\n\n/// fn force(&self, x: f64) -> f64 {0.0}\n\n/// }\n\n///\n\n/// // By implementing this trait, we can use the Null potential for pair\n\n/// // interactions\n\n/// impl PairPotential for Null {\n\n/// fn tail_energy(&self, cutoff: f64) -> f64 {\n\n/// return 0.0;\n\n/// }\n\n///\n\n/// fn tail_virial(&self, cutoff: f64) -> f64 {\n\n/// return 0.0;\n\n/// }\n\n/// }\n\n/// ```\n\npub trait PairPotential: Potential + BoxClonePair {\n\n /// Compute the virial contribution corresponding to the distance `r`\n\n /// between the particles.\n\n fn virial(&self, r: &Vector3D) -> Matrix3 {\n\n let fact = self.force(r.norm());\n\n let rn = r.normalized();\n\n let force = fact * rn;\n\n force.tensorial(r)\n\n }\n\n\n\n /// Compute the tail correction to the energy for the given cutoff.\n\n ///\n\n /// Calling `V(r)` the `Potential::energy(r)` function corresponding to this\n\n /// potential, this function should return the integral from `cutoff` to\n\n /// infinity of `r^2 V(r)`: `\\int_{cutoff}^\\infty r^2 V(r) dr`.\n\n ///\n\n /// If this integral does not converge for the current potential, this\n\n /// function should then return 0 to disable tail corrections.\n\n fn tail_energy(&self, cutoff: f64) -> f64;\n\n\n", "file_path": "lumol-core/src/energy/mod.rs", "rank": 97, "score": 107110.92860379783 }, { "content": "/// Marker trait for potentials that can be used for molecular bonds.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use lumol_core::energy::{Potential, BondPotential};\n\n///\n\n/// // A no-op potential\n\n/// #[derive(Clone)]\n\n/// struct Null;\n\n///\n\n/// impl Potential for Null {\n\n/// fn energy(&self, x: f64) -> f64 {0.0}\n\n/// fn force(&self, x: f64) -> f64 {0.0}\n\n/// }\n\n///\n\n/// // Now we can use the Null potential for bonds\n\n/// impl BondPotential for Null {}\n\n/// ```\n\npub trait BondPotential: Potential + BoxCloneBond {\n\n /// Compute the virial contribution corresponding to the distance `r`\n\n /// between the particles.\n\n fn virial(&self, r: &Vector3D) -> Matrix3 {\n\n let fact = self.force(r.norm());\n\n let rn = r.normalized();\n\n let force = fact * rn;\n\n force.tensorial(r)\n\n }\n\n}\n\nimpl_box_clone!(BondPotential, BoxCloneBond, box_clone_bond);\n\n\n", "file_path": "lumol-core/src/energy/mod.rs", "rank": 98, "score": 107102.24670859803 }, { "content": "/// Marker trait for potentials that can be used for molecular angles.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use lumol_core::energy::{Potential, AnglePotential};\n\n///\n\n/// // A no-op potential\n\n/// #[derive(Clone)]\n\n/// struct Null;\n\n///\n\n/// impl Potential for Null {\n\n/// fn energy(&self, x: f64) -> f64 {0.0}\n\n/// fn force(&self, x: f64) -> f64 {0.0}\n\n/// }\n\n///\n\n/// // Now we can use the Null potential for angles\n\n/// impl AnglePotential for Null {}\n\n/// ```\n\npub trait AnglePotential: Potential + BoxCloneAngle {}\n\nimpl_box_clone!(AnglePotential, BoxCloneAngle, box_clone_angle);\n\n\n", "file_path": "lumol-core/src/energy/mod.rs", "rank": 99, "score": 107102.24670859803 } ]
Rust
import/src/location/record.rs
pixunil/tiny-transport
7aee05ba0303e005768e44cabd995b6413c4ce85
use std::collections::HashMap; use std::rc::Rc; use serde_derive::Deserialize; use super::{Location, LocationId, LocationImportError, LocationKind}; use crate::coord::project; #[derive(Debug, PartialEq, Deserialize)] pub(super) struct LocationRecord { stop_id: LocationId, #[serde(rename = "location_type")] location_kind: LocationKind, parent_station: Option<LocationId>, stop_name: String, stop_lat: f64, stop_lon: f64, } impl LocationRecord { pub(super) fn stop_id(&self) -> &LocationId { &self.stop_id } pub(super) fn parent_station(&self) -> Option<&LocationId> { self.parent_station.as_ref() } pub(super) fn try_import( self, locations: &mut HashMap<LocationId, Rc<Location>>, ) -> Result<(), Self> { match self.parent_station { Some(ref parent_id) => match locations.get(parent_id).cloned() { Some(parent) => { locations.insert(self.stop_id, parent); Ok(()) } None => Err(self), }, None => { let id = self.stop_id.clone(); locations.insert(id, Rc::new(self.into())); Ok(()) } } } pub(super) fn import_or_enqueue( self, locations: &mut HashMap<LocationId, Rc<Location>>, queues: &mut (Vec<Self>, Vec<Self>), ) -> Result<(), LocationImportError> { if let Err(record) = self.try_import(locations) { match record.location_kind { LocationKind::Station => { return Err(LocationImportError::StationHasParent(record)); } LocationKind::Stop | LocationKind::Entrance | LocationKind::GenericNode => { queues.0.push(record); } LocationKind::BoardingArea => { queues.1.push(record); } } } Ok(()) } } impl Into<Location> for LocationRecord { fn into(self) -> Location { let position = project(self.stop_lat, self.stop_lon); Location::new(self.stop_id, self.stop_name, position) } } #[cfg(test)] mod tests { use super::*; use crate::fixtures::locations; use test_utils::map; fn main_station_record() -> LocationRecord { LocationRecord { stop_id: "hauptbahnhof".into(), location_kind: LocationKind::Station, parent_station: None, stop_name: "Hauptbahnhof".to_string(), stop_lat: 52.526, stop_lon: 13.369, } } fn main_station_platform_record() -> LocationRecord { LocationRecord { stop_id: "hauptbahnhof_1".into(), location_kind: LocationKind::Stop, parent_station: Some("hauptbahnhof".into()), stop_name: "Hauptbahnhof Gleis 1".to_string(), stop_lat: 52.526, stop_lon: 13.369, } } #[test] fn test_into_location() { let location: Location = main_station_record().into(); assert_eq!(location, locations::hauptbahnhof()); } #[test] fn test_import_parent() { let mut locations = HashMap::new(); main_station_record().try_import(&mut locations).unwrap(); assert_eq!( locations, map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), } ); } #[test] fn test_import_child_without_parent() { let mut locations = HashMap::new(); let record = main_station_platform_record() .try_import(&mut locations) .unwrap_err(); assert_eq!(record, main_station_platform_record()); assert!(locations.is_empty()); } #[test] fn test_import_child_with_parent() { let mut locations = map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), }; main_station_platform_record() .try_import(&mut locations) .unwrap(); assert_eq!( locations, map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), "hauptbahnhof_1" => Rc::new(locations::hauptbahnhof()), } ); } }
use std::collections::HashMap; use std::rc::Rc; use serde_derive::Deserialize; use super::{Location, LocationId, LocationImportError, LocationKind}; use crate::coord::project; #[derive(Debug, PartialEq, Deserialize)] pub(super) struct LocationRecord { stop_id: LocationId, #[serde(rename = "location_type")] location_kind: LocationKind, parent_station: Option<LocationId>, stop_name: String, stop_lat: f64, stop_lon: f64, } impl LocationRecord { pub(super) fn stop_id(&self) -> &LocationId { &self.stop_id } pub(super) fn parent_station(&self) -> Option<&LocationId> { self.parent_station.as_ref() } pub(super) fn try_import( self, locations: &mut HashMap<LocationId, Rc<Location>>, ) -> Result<(), Self> { match self.parent_station { Some(ref parent_id) => match locations.get(parent_id).cloned() { Some(parent) => { locations.insert(self.stop_id, parent); Ok(()) } None => Err(self), }, None => { let id = self.stop_id.clone(); locations.insert(id, Rc::new(self.into())); Ok(()) } } } pub(super) fn import_or_enqueue( self, locations: &mut HashMap<LocationId, Rc<Location>>, queues: &mut (Vec<Self>, Vec<Self>), ) -> Result<(), LocationImportError> {
Ok(()) } } impl Into<Location> for LocationRecord { fn into(self) -> Location { let position = project(self.stop_lat, self.stop_lon); Location::new(self.stop_id, self.stop_name, position) } } #[cfg(test)] mod tests { use super::*; use crate::fixtures::locations; use test_utils::map; fn main_station_record() -> LocationRecord { LocationRecord { stop_id: "hauptbahnhof".into(), location_kind: LocationKind::Station, parent_station: None, stop_name: "Hauptbahnhof".to_string(), stop_lat: 52.526, stop_lon: 13.369, } } fn main_station_platform_record() -> LocationRecord { LocationRecord { stop_id: "hauptbahnhof_1".into(), location_kind: LocationKind::Stop, parent_station: Some("hauptbahnhof".into()), stop_name: "Hauptbahnhof Gleis 1".to_string(), stop_lat: 52.526, stop_lon: 13.369, } } #[test] fn test_into_location() { let location: Location = main_station_record().into(); assert_eq!(location, locations::hauptbahnhof()); } #[test] fn test_import_parent() { let mut locations = HashMap::new(); main_station_record().try_import(&mut locations).unwrap(); assert_eq!( locations, map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), } ); } #[test] fn test_import_child_without_parent() { let mut locations = HashMap::new(); let record = main_station_platform_record() .try_import(&mut locations) .unwrap_err(); assert_eq!(record, main_station_platform_record()); assert!(locations.is_empty()); } #[test] fn test_import_child_with_parent() { let mut locations = map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), }; main_station_platform_record() .try_import(&mut locations) .unwrap(); assert_eq!( locations, map! { "hauptbahnhof" => Rc::new(locations::hauptbahnhof()), "hauptbahnhof_1" => Rc::new(locations::hauptbahnhof()), } ); } }
if let Err(record) = self.try_import(locations) { match record.location_kind { LocationKind::Station => { return Err(LocationImportError::StationHasParent(record)); } LocationKind::Stop | LocationKind::Entrance | LocationKind::GenericNode => { queues.0.push(record); } LocationKind::BoardingArea => { queues.1.push(record); } } }
if_condition
[ { "content": "pub fn project_back(position: Point) -> (f64, f64) {\n\n let utm = Utm::new(position.x, position.y, true, 33, 'U', false);\n\n let coord = Coord::from(utm);\n\n (coord.lat, coord.lon)\n\n}\n\n\n", "file_path": "import/src/coord.rs", "rank": 0, "score": 91804.71011538121 }, { "content": "pub fn project(lat: f64, lon: f64) -> Point {\n\n let coord = Coord::new(lat, lon);\n\n let utm = Utm::from(coord);\n\n Point::new(utm.easting, utm.northing)\n\n}\n\n\n", "file_path": "import/src/coord.rs", "rank": 1, "score": 89775.48835260955 }, { "content": "struct ColorVisitor;\n\n\n\nimpl<'de> Visitor<'de> for ColorVisitor {\n\n type Value = Color;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"color hex string\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Color, E>\n\n where\n\n E: DeserializeError,\n\n {\n\n if value.len() != 7 {\n\n return Err(E::custom(format_args!(\n\n \"invalid hex string: {}, expected 7 instead of {} characters\",\n\n value,\n\n value.len()\n\n )));\n\n }\n", "file_path": "import/src/deserialize/color.rs", "rank": 2, "score": 84864.83782568612 }, { "content": "struct DurationVisitor;\n\n\n\nimpl<'de> Visitor<'de> for DurationVisitor {\n\n type Value = Duration;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"time string\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Duration, E>\n\n where\n\n E: DeserializeError,\n\n {\n\n let seconds = value\n\n .splitn(3, ':')\n\n .map(|chunk| {\n\n str::parse::<i64>(chunk).map_err(|_| {\n\n E::custom(format_args!(\n\n \"invalid time string: {}, invalid digit in {}\",\n\n value, chunk\n", "file_path": "import/src/deserialize/duration.rs", "rank": 3, "score": 84864.83782568612 }, { "content": "struct DirectionVisitor;\n\n\n\nimpl<'de> Visitor<'de> for DirectionVisitor {\n\n type Value = Direction;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"either 0 or 1\")\n\n }\n\n\n\n fn visit_u64<E>(self, value: u64) -> Result<Direction, E>\n\n where\n\n E: DeserializeError,\n\n {\n\n match value {\n\n 0 => Ok(Direction::Upstream),\n\n 1 => Ok(Direction::Downstream),\n\n _ => Err(E::invalid_value(Unexpected::Unsigned(value), &self)),\n\n }\n\n }\n\n}\n", "file_path": "import/src/deserialize/direction.rs", "rank": 4, "score": 84864.83782568612 }, { "content": "struct NumericBoolVisitor;\n\n\n\nimpl<'de> Visitor<'de> for NumericBoolVisitor {\n\n type Value = bool;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"either 0 or 1\")\n\n }\n\n\n\n fn visit_u64<E>(self, value: u64) -> Result<bool, E>\n\n where\n\n E: DeserializeError,\n\n {\n\n match value {\n\n 0 => Ok(false),\n\n 1 => Ok(true),\n\n _ => Err(E::invalid_value(Unexpected::Unsigned(value), &self)),\n\n }\n\n }\n\n}\n", "file_path": "import/src/deserialize/numeric_bool.rs", "rank": 5, "score": 80259.94015385876 }, { "content": "struct LineKindVisitor;\n\n\n\nimpl<'de> Visitor<'de> for LineKindVisitor {\n\n type Value = LineKind;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"positive integer\")\n\n }\n\n\n\n fn visit_u64<E>(self, value: u64) -> Result<LineKind, E>\n\n where\n\n E: DeserializeError,\n\n {\n\n match value {\n\n 100 => Ok(LineKind::Railway),\n\n 109 => Ok(LineKind::SuburbanRailway),\n\n 400 => Ok(LineKind::UrbanRailway),\n\n 3 | 700 => Ok(LineKind::Bus),\n\n 900 => Ok(LineKind::Tram),\n\n 1000 => Ok(LineKind::WaterTransport),\n", "file_path": "import/src/deserialize/line_kind.rs", "rank": 6, "score": 80259.94015385876 }, { "content": "struct NaiveDateVisitor;\n\n\n\nimpl<'de> Visitor<'de> for NaiveDateVisitor {\n\n type Value = NaiveDate;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"date string\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<NaiveDate, E>\n\n where\n\n E: DeserializeError,\n\n {\n\n NaiveDate::parse_from_str(value, \"%Y%m%d\").map_err(E::custom)\n\n }\n\n}\n\n\n\npub(crate) fn naive_date<'de, D>(deserializer: D) -> Result<NaiveDate, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n", "file_path": "import/src/deserialize/naive_date.rs", "rank": 7, "score": 80259.94015385876 }, { "content": "#[derive(Debug)]\n\nstruct Smoother {\n\n mode: Mode,\n\n points: Vec<Point>,\n\n last_sharp_turn: Option<usize>,\n\n}\n\n\n\nimpl Smoother {\n\n fn new(mode: Mode) -> Self {\n\n Self {\n\n mode,\n\n points: Vec::new(),\n\n last_sharp_turn: None,\n\n }\n\n }\n\n\n\n fn dedup(&mut self) -> bool {\n\n let len = self.points.len();\n\n if len >= 2 && self.points[len - 2] == self.points[len - 1] {\n\n self.points.pop();\n\n return true;\n", "file_path": "import/src/shape/smoother.rs", "rank": 8, "score": 52929.73342807907 }, { "content": "struct StopCandidate {\n\n pos: usize,\n\n distance: NotNan<f64>,\n\n location: Rc<Location>,\n\n}\n\n\n\nimpl StopCandidate {\n\n fn find_nearest(nodes: &[Node], lower: usize, upper: usize, location: Rc<Location>) -> Self {\n\n let (pos, node) = nodes[lower..upper]\n\n .iter()\n\n .enumerate()\n\n .min_by_key(|(_, node)| node.distance_to(&location))\n\n .unwrap();\n\n Self {\n\n pos: pos + lower,\n\n distance: node.distance_to(&location),\n\n location,\n\n }\n\n }\n\n\n", "file_path": "import/src/trip/route_variant.rs", "rank": 9, "score": 50444.969477564846 }, { "content": "struct State<'a> {\n\n offset: i32,\n\n start_time_offset: i32,\n\n driving_before: Option<DrivingSegment<'a>>,\n\n driving_after: Option<DrivingSegment<'a>>,\n\n}\n\n\n\nimpl<'a> State<'a> {\n\n const MAXIMUM_OFFSET_VALUE: i32 = 25;\n\n\n\n fn new() -> Self {\n\n Self {\n\n offset: 0,\n\n start_time_offset: 0,\n\n driving_before: None,\n\n driving_after: None,\n\n }\n\n }\n\n\n\n fn step(&mut self, driving_segment: Option<DrivingSegment<'a>>) {\n", "file_path": "import/src/trip/schedule.rs", "rank": 10, "score": 49473.133058093175 }, { "content": "#[derive(Debug)]\n\nstruct DrivingSegment<'a> {\n\n duration: &'a mut u32,\n\n weight: f64,\n\n}\n\n\n\nimpl<'a> DrivingSegment<'a> {\n\n fn add_duration(&mut self, delta: i32) {\n\n *self.duration = (*self.duration as i32 + delta).max(1) as u32;\n\n }\n\n}\n\n\n", "file_path": "import/src/trip/schedule.rs", "rank": 11, "score": 48184.657675828836 }, { "content": "use std::fmt;\n\n\n\nuse crate::coord::{transform, Point, PointDebug};\n\nuse crate::create_id_type;\n\n\n\ncreate_id_type!(LocationId);\n\n\n\n#[derive(PartialEq)]\n\npub struct Location {\n\n id: LocationId,\n\n name: String,\n\n position: Point,\n\n}\n\n\n\nimpl Location {\n\n pub(crate) fn new(id: LocationId, name: String, position: Point) -> Location {\n\n Location { id, name, position }\n\n }\n\n\n\n pub(crate) fn id(&self) -> LocationId {\n", "file_path": "import/src/location/location.rs", "rank": 12, "score": 45258.99725598685 }, { "content": " let position = PointDebug::new(self.position, if formatter.alternate() { 6 } else { 3 });\n\n formatter\n\n .debug_struct(\"Location\")\n\n .field(\"id\", &self.id)\n\n .field(\"name\", &self.name)\n\n .field(\"position\", &position)\n\n .finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod fixtures {\n\n use std::collections::HashMap;\n\n use std::rc::Rc;\n\n\n\n use super::*;\n\n use crate::coord::project;\n\n use test_utils::map;\n\n\n\n macro_rules! locations {\n", "file_path": "import/src/location/location.rs", "rank": 13, "score": 45253.44158595551 }, { "content": " self.id.clone()\n\n }\n\n\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n pub fn position(&self) -> Point {\n\n self.position\n\n }\n\n\n\n pub(crate) fn store(&self) -> storage::Station {\n\n let position = transform(self.position());\n\n storage::Station::new(position, self.name.clone())\n\n }\n\n}\n\n\n\n#[cfg(not(tarpaulin_include))]\n\nimpl fmt::Debug for Location {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "import/src/location/location.rs", "rank": 14, "score": 45253.29140819352 }, { "content": " ($($location:ident: $lat:expr, $lon:expr, $name:expr);* $(;)?) => (\n\n $(\n\n pub(crate) fn $location() -> Location {\n\n Location::new(stringify!($location).into(), $name.to_string(), project($lat, $lon))\n\n }\n\n )*\n\n\n\n pub(crate) fn by_id() -> HashMap<LocationId, Rc<Location>> {\n\n map! {\n\n $( stringify!($location) => Rc::new($location()) ),*\n\n }\n\n }\n\n );\n\n }\n\n\n\n locations! {\n\n hauptbahnhof: 52.526, 13.369, \"Hauptbahnhof\";\n\n friedrichstr: 52.520, 13.387, \"Friedrichstr.\";\n\n hackescher_markt: 52.523, 13.402, \"Hackescher Markt\";\n\n bellevue: 52.520, 13.347, \"Bellevue\";\n", "file_path": "import/src/location/location.rs", "rank": 15, "score": 45250.76451768912 }, { "content": "\n\n #[test]\n\n fn test_getters() {\n\n let location = locations::hauptbahnhof();\n\n assert_eq!(location.id(), \"hauptbahnhof\".into());\n\n assert_eq!(location.position(), project(52.526, 13.369));\n\n }\n\n\n\n #[test]\n\n fn test_store() {\n\n let location = locations::hauptbahnhof();\n\n assert_eq!(\n\n location.store(),\n\n storage::fixtures::stations::hauptbahnhof()\n\n );\n\n }\n\n}\n", "file_path": "import/src/location/location.rs", "rank": 16, "score": 45247.650753875816 }, { "content": " potsdamer_platz_bus_stresemannstr: 52.509, 13.377, \"Potsdamer Platz [Bus Stresemannstr.]\";\n\n potsdamer_platz_vossstr: 52.510, 13.377, \"Potsdamer Platz/Voßstr.\";\n\n wannsee: 52.421, 13.179, \"Wannsee\";\n\n wannseebruecke: 52.420, 13.175, \"Wannseebrücke\";\n\n am_kleinen_wannsee: 52.420, 13.167, \"Am Kleinen Wannsee\";\n\n seglerweg: 52.424, 13.161, \"Seglerweg\";\n\n koblanckstr: 52.427, 13.162, \"Koblanckstr.\";\n\n liebermann_villa: 52.429, 13.164, \"Liebermann-Villa\";\n\n am_grossen_wannsee: 52.432, 13.165, \"Am Großen Wannsee\";\n\n haus_der_wannsee_konferenz: 52.433, 13.164, \"Haus der Wannsee-Konferenz\";\n\n zum_heckeshorn: 52.430, 13.161, \"Zum Heckeshorn\";\n\n strasse_zum_loewen: 52.427, 13.160, \"Straße zum Löwen\";\n\n conradstr: 52.420, 13.162, \"Conradstr.\";\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::coord::project;\n\n use crate::fixtures::locations;\n", "file_path": "import/src/location/location.rs", "rank": 17, "score": 45244.75629747342 }, { "content": " gesundbrunnen: 52.549, 13.388, \"Gesundbrunnen\";\n\n ostkreuz: 52.503, 13.469, \"Ostkreuz\";\n\n suedkreuz: 52.475, 13.366, \"Südkreuz\";\n\n westkreuz: 52.501, 13.283, \"Westkreuz\";\n\n nollendorfplatz: 52.500, 13.354, \"Nollendorfplatz\";\n\n viktoria_luise_platz: 52.496, 13.343, \"Viktoria-Luise-Platz\";\n\n bayerischer_platz: 52.489, 13.340, \"Bayerischer Platz\";\n\n rathaus_schoeneberg: 52.483, 13.342, \"Rathaus Schöneberg\";\n\n innsbrucker_platz: 52.478, 13.343, \"Innsbrucker Platz\";\n\n oranienburger_tor: 52.525, 13.388, \"Oranienburger Tor\";\n\n clara_jaschke_str: 52.525, 13.366, \"Clara-Jaschke-Str.\";\n\n lueneburger_str: 52.523, 13.362, \"Lüneburger Str.\";\n\n lesser_ury_weg: 52.524, 13.362, \"Lesser-Ury-Weg\";\n\n invalidenpark: 52.529, 13.377, \"Invalidenpark\";\n\n naturkundemuseum: 52.530, 13.382, \"Naturkundemuseum\";\n\n nordbahnhof: 52.532, 13.389, \"Nordbahnhof\";\n\n gedenkstaette_berliner_mauer: 52.536, 13.390, \"Gedenkstätte Berliner Mauer\";\n\n bernauer_str: 52.538, 13.396, \"Bernauer Str.\";\n\n wolliner_str: 52.540, 13.402, \"Wolliner Str.\";\n\n friedrich_ludwig_jahn_sportpark: 52.541, 13.406, \"Friedrich-Ludwig-Jahn-Sportpark\";\n", "file_path": "import/src/location/location.rs", "rank": 18, "score": 45239.59589031355 }, { "content": " eberswalder_str: 52.541, 13.412, \"Eberswalder Str.\";\n\n husemannstr: 52.540, 13.419, \"Husemannstr.\";\n\n prenzlauer_allee_danziger_str: 52.539, 13.424, \"Prenzlauer Allee/Danziger Str.\";\n\n winsstr: 52.538, 13.429, \"Winsstr.\";\n\n greifswalder_str_danziger_str: 52.536, 13.433, \"Greifswalder Str./Danziger Str.\";\n\n arnswalder_platz: 52.534, 13.437, \"Arnswalder Platz\";\n\n landsberger_allee_petersburger_str: 52.526, 13.447, \"Landsberger Allee/Petersburger Str.\";\n\n strassmannstr: 52.523, 13.450, \"Straßmannstr.\";\n\n bersarinplatz: 52.519, 13.453, \"Bersarinplatz\";\n\n warschauer_str: 52.506, 13.449, \"Warschauer Str.\";\n\n revaler_str: 52.509, 13.451, \"Revaler Str.\";\n\n gruenberger_str_warschauer_str: 52.512, 13.452, \"Grünberger Str./Warschauer Str.\";\n\n frankfurter_tor: 52.516, 13.454, \"Frankfurter Tor\";\n\n kniprodestr_danziger_str: 52.532, 13.442, \"Kniprodestr./Danziger Str.\";\n\n paul_heyse_str: 52.529, 13.445, \"Paul-Heyse-Str.\";\n\n universitaetsstr: 52.519, 13.392, \"Universitätsstr.\";\n\n am_kupfergraben: 52.519, 13.395, \"Am Kupfergraben\";\n\n georgenstr_am_kupfergraben: 52.520, 13.394, \"Georgenstr./Am Kupfergraben\";\n\n anhalter_bahnhof: 52.505, 13.382, \"Anhalter Bahnhof\";\n\n abgeordnetenhaus: 52.507, 13.380, \"Abgeordnetenhaus\";\n", "file_path": "import/src/location/location.rs", "rank": 19, "score": 45239.59589031355 }, { "content": "pub fn transform(point: Point) -> Point2<f32> {\n\n let translated = point.coords - project(52.51, 13.39).coords;\n\n Point2::new(translated.x.round() as f32, -translated.y.round() as f32)\n\n}\n\n\n\npub(crate) struct PointDebug {\n\n position: Point,\n\n precision: usize,\n\n}\n\n\n\n#[cfg(not(tarpaulin_include))]\n\nimpl PointDebug {\n\n pub(crate) fn new(position: Point, precision: usize) -> Self {\n\n PointDebug {\n\n position,\n\n precision,\n\n }\n\n }\n\n}\n\n\n", "file_path": "import/src/coord.rs", "rank": 20, "score": 38690.10435945145 }, { "content": " ))\n\n })\n\n })\n\n .try_fold(0, |acc, time| Ok(60 * acc + time?));\n\n\n\n Ok(Duration::seconds(seconds?))\n\n }\n\n}\n\n\n\npub(crate) fn duration<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Duration, D::Error> {\n\n deserializer.deserialize_str(DurationVisitor)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde::de::value::{Error as ValueError, StrDeserializer, U64Deserializer};\n\n use serde::de::IntoDeserializer;\n\n\n\n use super::*;\n\n\n", "file_path": "import/src/deserialize/duration.rs", "rank": 21, "score": 35901.266256128394 }, { "content": " use serde::de::value::{Error as ValueError, StrDeserializer, U64Deserializer};\n\n use serde::de::IntoDeserializer;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_six_digit_hex() {\n\n let deserializer: StrDeserializer<ValueError> = \"#ff0420\".into_deserializer();\n\n assert_eq!(color(deserializer), Ok(Color::new(255, 4, 32)));\n\n }\n\n\n\n #[test]\n\n fn test_invalid_digit() {\n\n let deserializer: StrDeserializer<ValueError> = \"#12345g\".into_deserializer();\n\n assert_eq!(\n\n color(deserializer).unwrap_err().to_string(),\n\n \"invalid hex string: #12345g, invalid digit in 5g\"\n\n );\n\n }\n\n\n", "file_path": "import/src/deserialize/color.rs", "rank": 22, "score": 35900.930483533746 }, { "content": "\n\npub(crate) fn direction<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Direction, D::Error> {\n\n deserializer.deserialize_u64(DirectionVisitor)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde::de::value::{Error as ValueError, StrDeserializer, U64Deserializer};\n\n use serde::de::IntoDeserializer;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_upstream() {\n\n let deserializer: U64Deserializer<ValueError> = 0u64.into_deserializer();\n\n assert_eq!(direction(deserializer), Ok(Direction::Upstream));\n\n }\n\n\n\n #[test]\n\n fn test_downstream() {\n", "file_path": "import/src/deserialize/direction.rs", "rank": 23, "score": 35899.87634899471 }, { "content": "\n\n let component = |number: usize| {\n\n let slice = &value[2 * number + 1..=2 * number + 2];\n\n u8::from_str_radix(slice, 16).map_err(|_| {\n\n E::custom(format_args!(\n\n \"invalid hex string: {}, invalid digit in {}\",\n\n value, slice\n\n ))\n\n })\n\n };\n\n Ok(Color::new(component(0)?, component(1)?, component(2)?))\n\n }\n\n}\n\n\n\npub(crate) fn color<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Color, D::Error> {\n\n deserializer.deserialize_str(ColorVisitor)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "import/src/deserialize/color.rs", "rank": 24, "score": 35898.20683089742 }, { "content": " let deserializer: U64Deserializer<ValueError> = 1u64.into_deserializer();\n\n assert_eq!(direction(deserializer), Ok(Direction::Downstream));\n\n }\n\n\n\n #[test]\n\n fn test_invalid() {\n\n let deserializer: U64Deserializer<ValueError> = 2u64.into_deserializer();\n\n assert_eq!(\n\n direction(deserializer).unwrap_err().to_string(),\n\n \"invalid value: integer `2`, expected either 0 or 1\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_empty() {\n\n let deserializer: StrDeserializer<ValueError> = \"\".into_deserializer();\n\n assert_eq!(\n\n direction(deserializer).unwrap_err().to_string(),\n\n \"invalid type: string \\\"\\\", expected either 0 or 1\"\n\n );\n\n }\n\n}\n", "file_path": "import/src/deserialize/direction.rs", "rank": 25, "score": 35897.9768294128 }, { "content": " let deserializer: StrDeserializer<ValueError> = \"\".into_deserializer();\n\n assert_eq!(\n\n color(deserializer).unwrap_err().to_string(),\n\n \"invalid hex string: , expected 7 instead of 0 characters\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_invalid_type() {\n\n let deserializer: U64Deserializer<ValueError> = 0u64.into_deserializer();\n\n assert_eq!(\n\n color(deserializer).unwrap_err().to_string(),\n\n \"invalid type: integer `0`, expected color hex string\"\n\n );\n\n }\n\n}\n", "file_path": "import/src/deserialize/color.rs", "rank": 26, "score": 35896.02104515461 }, { "content": " #[test]\n\n fn test_too_few_digits() {\n\n let deserializer: StrDeserializer<ValueError> = \"#12\".into_deserializer();\n\n assert_eq!(\n\n color(deserializer).unwrap_err().to_string(),\n\n \"invalid hex string: #12, expected 7 instead of 3 characters\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_too_many_digits() {\n\n let deserializer: StrDeserializer<ValueError> = \"#1234567\".into_deserializer();\n\n assert_eq!(\n\n color(deserializer).unwrap_err().to_string(),\n\n \"invalid hex string: #1234567, expected 7 instead of 8 characters\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_empty() {\n", "file_path": "import/src/deserialize/color.rs", "rank": 27, "score": 35895.87653807716 }, { "content": " }\n\n\n\n #[test]\n\n fn test_invalid_digit() {\n\n let deserializer: StrDeserializer<ValueError> = \"12:IV:56\".into_deserializer();\n\n assert_eq!(\n\n duration(deserializer).unwrap_err().to_string(),\n\n \"invalid time string: 12:IV:56, invalid digit in IV\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_invalid_type() {\n\n let deserializer: U64Deserializer<ValueError> = 0u64.into_deserializer();\n\n assert_eq!(\n\n duration(deserializer).unwrap_err().to_string(),\n\n \"invalid type: integer `0`, expected time string\"\n\n );\n\n }\n\n}\n", "file_path": "import/src/deserialize/duration.rs", "rank": 28, "score": 35895.87653807716 }, { "content": "use std::fmt;\n\n\n\nuse serde::de::{Error as DeserializeError, Visitor};\n\nuse serde::Deserializer;\n\n\n\nuse chrono::Duration;\n\n\n", "file_path": "import/src/deserialize/duration.rs", "rank": 29, "score": 35895.59132666475 }, { "content": "use std::fmt;\n\n\n\nuse serde::de::{Error as DeserializeError, Visitor};\n\nuse serde::Deserializer;\n\n\n\nuse simulation::Color;\n\n\n", "file_path": "import/src/deserialize/color.rs", "rank": 30, "score": 35895.59132666475 }, { "content": "use std::fmt;\n\n\n\nuse serde::de::{Error as DeserializeError, Unexpected, Visitor};\n\nuse serde::Deserializer;\n\n\n\nuse simulation::Direction;\n\n\n", "file_path": "import/src/deserialize/direction.rs", "rank": 31, "score": 35895.53575711894 }, { "content": " fn from_hms(hours: i64, minutes: i64, seconds: i64) -> Duration {\n\n Duration::seconds((hours * 60 + minutes) * 60 + seconds)\n\n }\n\n\n\n #[test]\n\n fn test_one_hour_digit() {\n\n let deserializer: StrDeserializer<ValueError> = \"1:34:56\".into_deserializer();\n\n assert_eq!(duration(deserializer), Ok(from_hms(1, 34, 56)));\n\n }\n\n\n\n #[test]\n\n fn test_two_hour_digit() {\n\n let deserializer: StrDeserializer<ValueError> = \"12:34:56\".into_deserializer();\n\n assert_eq!(duration(deserializer), Ok(from_hms(12, 34, 56)));\n\n }\n\n\n\n #[test]\n\n fn test_after_midnight() {\n\n let deserializer: StrDeserializer<ValueError> = \"24:34:56\".into_deserializer();\n\n assert_eq!(duration(deserializer), Ok(from_hms(24, 34, 56)));\n", "file_path": "import/src/deserialize/duration.rs", "rank": 32, "score": 35895.44894569906 }, { "content": "mod color;\n\nmod direction;\n\nmod duration;\n\nmod line_kind;\n\nmod naive_date;\n\nmod numeric_bool;\n\n\n\npub(crate) use color::color;\n\npub(crate) use direction::direction;\n\npub(crate) use duration::duration;\n\npub(crate) use line_kind::line_kind;\n\npub(crate) use naive_date::naive_date;\n\npub(crate) use numeric_bool::numeric_bool;\n", "file_path": "import/src/deserialize/mod.rs", "rank": 33, "score": 35890.881158819604 }, { "content": "use std::collections::HashMap;\n\nuse std::error::Error;\n\nuse std::rc::Rc;\n\n\n\nuse super::{Location, LocationId, LocationImportError, LocationRecord};\n\nuse crate::utils::{Action, Dataset};\n\n\n\npub(crate) struct Importer;\n\n\n\nimpl Importer {\n\n pub(crate) fn import(\n\n dataset: &mut impl Dataset,\n\n ) -> Result<HashMap<LocationId, Rc<Location>>, Box<dyn Error>> {\n\n let mut queues = (Vec::new(), Vec::new());\n\n let mut locations = HashMap::new();\n\n\n\n let action = Action::start(\"Importing locations\");\n\n for result in action.read_csv(dataset, \"stops.txt\")? {\n\n let record: LocationRecord = result?;\n\n record.import_or_enqueue(&mut locations, &mut queues)?;\n", "file_path": "import/src/location/importer.rs", "rank": 37, "score": 35860.628194794284 }, { "content": "use std::error::Error;\n\nuse std::fmt;\n\n\n\nuse super::LocationRecord;\n\n\n\n#[derive(Debug)]\n\npub(super) enum LocationImportError {\n\n StationHasParent(LocationRecord),\n\n ParentNotFound(LocationRecord),\n\n}\n\n\n\nimpl fmt::Display for LocationImportError {\n\n fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n LocationImportError::StationHasParent(record) => write!(\n\n formatter,\n\n \"forbidden parent {} for station {}\",\n\n record.parent_station().unwrap(),\n\n record.stop_id()\n\n ),\n", "file_path": "import/src/location/errors.rs", "rank": 38, "score": 35860.268820555735 }, { "content": " impl<'de> Visitor<'de> for LineKindVisitor {\n\n type Value = LocationKind;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"integer\")\n\n }\n\n\n\n fn visit_u64<E>(self, value: u64) -> Result<LocationKind, E>\n\n where\n\n E: DeserializeError,\n\n {\n\n match value {\n\n 0 => Ok(LocationKind::Stop),\n\n 1 => Ok(LocationKind::Station),\n\n 2 => Ok(LocationKind::Entrance),\n\n 3 => Ok(LocationKind::GenericNode),\n\n 4 => Ok(LocationKind::BoardingArea),\n\n _ => Err(E::custom(format!(\n\n \"unknown location type of value: {}\",\n\n value\n", "file_path": "import/src/location/kind.rs", "rank": 39, "score": 35857.07724050973 }, { "content": "use std::collections::hash_map::{Entry, HashMap};\n\nuse std::rc::Rc;\n\n\n\nuse crate::location::{Location, LocationId};\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Linearizer {\n\n ids: HashMap<LocationId, usize>,\n\n locations: Vec<Rc<Location>>,\n\n}\n\n\n\nimpl Linearizer {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n ids: HashMap::new(),\n\n locations: Vec::new(),\n\n }\n\n }\n\n\n\n pub(crate) fn retrieve(&mut self, location: &Rc<Location>) -> usize {\n", "file_path": "import/src/location/linearizer.rs", "rank": 40, "score": 35856.7829100283 }, { "content": " }\n\n\n\n for record in queues.0.into_iter().chain(queues.1) {\n\n if let Err(record) = record.try_import(&mut locations) {\n\n return Err(LocationImportError::ParentNotFound(record).into());\n\n }\n\n }\n\n\n\n action.complete(&format!(\"Imported {} locations\", locations.len()));\n\n Ok(locations)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::dataset;\n\n use crate::fixtures::locations;\n\n use test_utils::map;\n\n\n", "file_path": "import/src/location/importer.rs", "rank": 41, "score": 35856.51250440385 }, { "content": "use std::fmt;\n\n\n\nuse serde::de::{Deserialize, Deserializer, Error as DeserializeError, Visitor};\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub(super) enum LocationKind {\n\n Stop,\n\n Station,\n\n Entrance,\n\n GenericNode,\n\n BoardingArea,\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for LocationKind {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n struct LineKindVisitor;\n\n\n", "file_path": "import/src/location/kind.rs", "rank": 44, "score": 35854.96084211394 }, { "content": " match self.ids.entry(location.id()) {\n\n Entry::Occupied(entry) => *entry.get(),\n\n Entry::Vacant(entry) => {\n\n self.locations.push(location.clone());\n\n *entry.insert(self.locations.len() - 1)\n\n }\n\n }\n\n }\n\n\n\n #[cfg(test)]\n\n pub(crate) fn location_ids(&self) -> HashMap<String, usize> {\n\n self.ids\n\n .iter()\n\n .map(|(identifier, id)| (format!(\"{}\", identifier), *id))\n\n .collect()\n\n }\n\n}\n\n\n\nimpl IntoIterator for Linearizer {\n\n type Item = Rc<Location>;\n", "file_path": "import/src/location/linearizer.rs", "rank": 45, "score": 35853.16377308089 }, { "content": " #[test]\n\n fn test_station_with_parent() {\n\n let mut dataset = dataset!(\n\n stops:\n\n stop_id, stop_name, stop_lat, stop_lon, location_type, parent_station;\n\n \"hauptbahnhof\", \"Hauptbahnhof\", 52.526, 13.369, 1, \"bahnhof\"\n\n );\n\n\n\n assert_eq!(\n\n Importer::import(&mut dataset).unwrap_err().to_string(),\n\n \"forbidden parent bahnhof for station hauptbahnhof\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_child_missing_parent() {\n\n let mut dataset = dataset!(\n\n stops:\n\n stop_id, stop_name, stop_lat, stop_lon, location_type, parent_station;\n\n \"hauptbahnhof_1\", \"Hauptbahnhof Gleis 1\", 52.526, 13.369, 0, \"hauptbahnhof\"\n", "file_path": "import/src/location/importer.rs", "rank": 46, "score": 35852.697127847496 }, { "content": " );\n\n\n\n assert_eq!(\n\n Importer::import(&mut dataset).unwrap_err().to_string(),\n\n \"parent hauptbahnhof for location hauptbahnhof_1 not found\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_from_csv() {\n\n let mut dataset = dataset!(\n\n stops:\n\n stop_id, stop_name, stop_lat, stop_lon, location_type, parent_station;\n\n \"hauptbahnhof\", \"Hauptbahnhof\", 52.526, 13.369, 1, \"\";\n\n \"friedrichstr\", \"Friedrichstr.\", 52.520, 13.387, 1, \"\"\n\n );\n\n\n\n assert_eq!(\n\n Importer::import(&mut dataset).unwrap(),\n\n map! {\n\n \"hauptbahnhof\" => Rc::new(locations::hauptbahnhof()),\n\n \"friedrichstr\" => Rc::new(locations::friedrichstr()),\n\n }\n\n );\n\n }\n\n}\n", "file_path": "import/src/location/importer.rs", "rank": 47, "score": 35852.0700103785 }, { "content": " LocationImportError::ParentNotFound(record) => write!(\n\n formatter,\n\n \"parent {} for location {} not found\",\n\n record.parent_station().unwrap(),\n\n record.stop_id()\n\n ),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for LocationImportError {}\n", "file_path": "import/src/location/errors.rs", "rank": 48, "score": 35851.671414667515 }, { "content": " type IntoIter = <Vec<Rc<Location>> as IntoIterator>::IntoIter;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.locations.into_iter()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use itertools::assert_equal;\n\n\n\n use super::*;\n\n use crate::fixtures::locations;\n\n\n\n #[test]\n\n fn test_retrieve() {\n\n let mut linearizer = Linearizer::new();\n\n assert_eq!(linearizer.retrieve(&Rc::new(locations::hauptbahnhof())), 0);\n\n assert_eq!(linearizer.retrieve(&Rc::new(locations::friedrichstr())), 1);\n\n assert_eq!(linearizer.retrieve(&Rc::new(locations::hauptbahnhof())), 0);\n", "file_path": "import/src/location/linearizer.rs", "rank": 49, "score": 35848.857405901996 }, { "content": "mod errors;\n\nmod importer;\n\nmod kind;\n\nmod linearizer;\n\nmod location;\n\nmod record;\n\n\n\n#[cfg(test)]\n\npub(crate) mod fixtures {\n\n pub(crate) use super::location::fixtures as locations;\n\n}\n\n\n\nuse errors::LocationImportError;\n\nuse kind::LocationKind;\n\nuse record::LocationRecord;\n\n\n\npub(crate) use importer::Importer;\n\npub(crate) use linearizer::Linearizer;\n\npub(crate) use location::{Location, LocationId};\n", "file_path": "import/src/location/mod.rs", "rank": 51, "score": 35847.76107998087 }, { "content": " ))),\n\n }\n\n }\n\n }\n\n\n\n deserializer.deserialize_u64(LineKindVisitor)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde_test::{assert_de_tokens, assert_de_tokens_error, Token};\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_deserialize_location_kind() {\n\n assert_de_tokens(&LocationKind::Stop, &[Token::U16(0)]);\n\n assert_de_tokens(&LocationKind::Station, &[Token::U16(1)]);\n\n assert_de_tokens(&LocationKind::Entrance, &[Token::U16(2)]);\n", "file_path": "import/src/location/kind.rs", "rank": 52, "score": 35847.46755140759 }, { "content": " }\n\n\n\n #[test]\n\n fn test_into_vec() {\n\n let mut linearizer = Linearizer::new();\n\n linearizer.retrieve(&Rc::new(locations::hauptbahnhof()));\n\n linearizer.retrieve(&Rc::new(locations::friedrichstr()));\n\n assert_equal(\n\n linearizer,\n\n vec![\n\n Rc::new(locations::hauptbahnhof()),\n\n Rc::new(locations::friedrichstr()),\n\n ],\n\n );\n\n }\n\n}\n", "file_path": "import/src/location/linearizer.rs", "rank": 54, "score": 35843.40187094952 }, { "content": " assert_de_tokens(&LocationKind::GenericNode, &[Token::U16(3)]);\n\n assert_de_tokens(&LocationKind::BoardingArea, &[Token::U16(4)]);\n\n assert_de_tokens_error::<LocationKind>(\n\n &[Token::U16(5)],\n\n \"unknown location type of value: 5\",\n\n );\n\n assert_de_tokens_error::<LocationKind>(\n\n &[Token::Str(\"\")],\n\n \"invalid type: string \\\"\\\", expected integer\",\n\n );\n\n }\n\n}\n", "file_path": "import/src/location/kind.rs", "rank": 55, "score": 35843.33306551274 }, { "content": "\n\npub(crate) fn numeric_bool<'de, D: Deserializer<'de>>(deserializer: D) -> Result<bool, D::Error> {\n\n deserializer.deserialize_u64(NumericBoolVisitor)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde::de::value::{Error as ValueError, StrDeserializer, U64Deserializer};\n\n use serde::de::IntoDeserializer;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_true() {\n\n let deserializer: U64Deserializer<ValueError> = 1u64.into_deserializer();\n\n assert_eq!(numeric_bool(deserializer), Ok(true));\n\n }\n\n\n\n #[test]\n\n fn test_false() {\n", "file_path": "import/src/deserialize/numeric_bool.rs", "rank": 57, "score": 34517.03533605064 }, { "content": " _ => Err(E::custom(format!(\"unknown line kind of value: {}\", value))),\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn line_kind<'de, D: Deserializer<'de>>(deserializer: D) -> Result<LineKind, D::Error> {\n\n deserializer.deserialize_u64(LineKindVisitor)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde::de::value::{Error as ValueError, StrDeserializer, U64Deserializer};\n\n use serde::de::IntoDeserializer;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_deserialize() {\n\n let deserializer: U64Deserializer<ValueError> = 100u64.into_deserializer();\n\n assert_eq!(line_kind(deserializer), Ok(LineKind::Railway));\n", "file_path": "import/src/deserialize/line_kind.rs", "rank": 58, "score": 34516.56609587705 }, { "content": "{\n\n deserializer.deserialize_str(NaiveDateVisitor)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde::de::value::{Error as ValueError, StrDeserializer, U64Deserializer};\n\n use serde::de::IntoDeserializer;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_date() {\n\n let deserializer: StrDeserializer<ValueError> = \"20190711\".into_deserializer();\n\n assert_eq!(\n\n naive_date(deserializer),\n\n Ok(NaiveDate::from_ymd(2019, 7, 11))\n\n );\n\n }\n\n\n", "file_path": "import/src/deserialize/naive_date.rs", "rank": 59, "score": 34515.378971095524 }, { "content": " let deserializer: U64Deserializer<ValueError> = 0u64.into_deserializer();\n\n assert_eq!(numeric_bool(deserializer), Ok(false));\n\n }\n\n\n\n #[test]\n\n fn test_invalid_number() {\n\n let deserializer: U64Deserializer<ValueError> = 2u64.into_deserializer();\n\n assert_eq!(\n\n numeric_bool(deserializer).unwrap_err().to_string(),\n\n \"invalid value: integer `2`, expected either 0 or 1\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_empty() {\n\n let deserializer: StrDeserializer<ValueError> = \"\".into_deserializer();\n\n assert_eq!(\n\n numeric_bool(deserializer).unwrap_err().to_string(),\n\n \"invalid type: string \\\"\\\", expected either 0 or 1\"\n\n );\n\n }\n\n}\n", "file_path": "import/src/deserialize/numeric_bool.rs", "rank": 60, "score": 34515.12417458509 }, { "content": " let deserializer: U64Deserializer<ValueError> = 100u64.into_deserializer();\n\n assert_eq!(line_kind(deserializer), Ok(LineKind::Railway));\n\n let deserializer: U64Deserializer<ValueError> = 109u64.into_deserializer();\n\n assert_eq!(line_kind(deserializer), Ok(LineKind::SuburbanRailway));\n\n let deserializer: U64Deserializer<ValueError> = 400u64.into_deserializer();\n\n assert_eq!(line_kind(deserializer), Ok(LineKind::UrbanRailway));\n\n let deserializer: U64Deserializer<ValueError> = 3u64.into_deserializer();\n\n assert_eq!(line_kind(deserializer), Ok(LineKind::Bus));\n\n let deserializer: U64Deserializer<ValueError> = 700u64.into_deserializer();\n\n assert_eq!(line_kind(deserializer), Ok(LineKind::Bus));\n\n let deserializer: U64Deserializer<ValueError> = 900u64.into_deserializer();\n\n assert_eq!(line_kind(deserializer), Ok(LineKind::Tram));\n\n let deserializer: U64Deserializer<ValueError> = 1000u64.into_deserializer();\n\n assert_eq!(line_kind(deserializer), Ok(LineKind::WaterTransport));\n\n }\n\n\n\n #[test]\n\n fn test_unknown_line_kind() {\n\n let deserializer: U64Deserializer<ValueError> = 0u64.into_deserializer();\n\n assert_eq!(\n", "file_path": "import/src/deserialize/line_kind.rs", "rank": 61, "score": 34513.18415555262 }, { "content": " line_kind(deserializer).unwrap_err().to_string(),\n\n \"unknown line kind of value: 0\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_empty() {\n\n let deserializer: StrDeserializer<ValueError> = \"\".into_deserializer();\n\n assert_eq!(\n\n line_kind(deserializer).unwrap_err().to_string(),\n\n \"invalid type: string \\\"\\\", expected positive integer\"\n\n );\n\n }\n\n}\n", "file_path": "import/src/deserialize/line_kind.rs", "rank": 62, "score": 34513.039250638154 }, { "content": " #[test]\n\n fn test_empty() {\n\n let deserializer: StrDeserializer<ValueError> = \"\".into_deserializer();\n\n assert_eq!(\n\n naive_date(deserializer).unwrap_err().to_string(),\n\n \"premature end of input\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_invalid_type() {\n\n let deserializer: U64Deserializer<ValueError> = 0u64.into_deserializer();\n\n assert_eq!(\n\n naive_date(deserializer).unwrap_err().to_string(),\n\n \"invalid type: integer `0`, expected date string\"\n\n );\n\n }\n\n}\n", "file_path": "import/src/deserialize/naive_date.rs", "rank": 63, "score": 34512.98543552391 }, { "content": "use std::fmt;\n\n\n\nuse serde::de::{Error as DeserializeError, Visitor};\n\nuse serde::Deserializer;\n\n\n\nuse chrono::NaiveDate;\n\n\n", "file_path": "import/src/deserialize/naive_date.rs", "rank": 64, "score": 34512.82007496631 }, { "content": "use std::fmt;\n\n\n\nuse serde::de::{Error as DeserializeError, Unexpected, Visitor};\n\nuse serde::Deserializer;\n\n\n", "file_path": "import/src/deserialize/numeric_bool.rs", "rank": 65, "score": 34512.79324948697 }, { "content": "use std::fmt;\n\n\n\nuse serde::de::{Error as DeserializeError, Visitor};\n\nuse serde::Deserializer;\n\n\n\nuse simulation::line::Kind as LineKind;\n\n\n", "file_path": "import/src/deserialize/line_kind.rs", "rank": 66, "score": 34512.71107843631 }, { "content": " get uniformLocations() {\n\n return this.programInfo.uniformLocations;\n", "file_path": "wasm/www/js/base/renderer.js", "rank": 67, "score": 27136.17712110252 }, { "content": " fetchLocations() {\n\n this.uniformLocations = {};\n\n const uniformCount = this.gl.getProgramParameter(this.program, this.gl.ACTIVE_UNIFORMS);\n\n for (let uniformIndex = 0; uniformIndex < uniformCount; uniformIndex++) {\n\n const uniform = this.gl.getActiveUniform(this.program, uniformIndex);\n\n const name = uniform.name.substr(2);\n\n this.uniformLocations[name] = this.gl.getUniformLocation(this.program, uniform.name);\n\n }\n\n\n\n this.attributeLocations = {};\n\n const attributeCount = this.gl.getProgramParameter(this.program, this.gl.ACTIVE_ATTRIBUTES);\n\n for (let attributeIndex = 0; attributeIndex < attributeCount; attributeIndex++) {\n\n const attribute = this.gl.getActiveAttrib(this.program, attributeIndex);\n\n const name = attribute.name.substr(2);\n\n this.attributeLocations[name] = this.gl.getAttribLocation(this.program, attribute.name);\n\n }\n", "file_path": "wasm/www/js/base/program-info.js", "rank": 68, "score": 25583.47876227201 }, { "content": "mod action;\n\nmod dataset;\n\n\n\npub(crate) use action::Action;\n\npub(crate) use dataset::Dataset;\n\n\n\n#[macro_export]\n\nmacro_rules! create_id_type {\n\n ($name:ident) => {\n\n use serde_derive::Deserialize;\n\n\n\n #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize)]\n\n pub(crate) struct $name(String);\n\n\n\n impl From<&str> for $name {\n\n fn from(value: &str) -> Self {\n\n Self(value.to_string())\n\n }\n\n }\n\n\n\n impl std::fmt::Display for $name {\n\n fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n self.0.fmt(formatter)\n\n }\n\n }\n\n };\n\n}\n", "file_path": "import/src/utils/mod.rs", "rank": 69, "score": 22.520883284287404 }, { "content": "\n\n fn open_csv(&mut self, name: &str) -> Result<Table, Self::Error> {\n\n let file = self.by_name(name)?;\n\n Ok(Table::new(file.size(), file))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashMap;\n\n use std::io;\n\n\n\n use super::*;\n\n\n\n impl Dataset for HashMap<String, String> {\n\n type Error = io::Error;\n\n\n\n fn open_csv(&mut self, name: &str) -> Result<Table, Self::Error> {\n\n let data = self\n\n .get(name)\n", "file_path": "import/src/utils/dataset.rs", "rank": 70, "score": 22.20758988590502 }, { "content": "impl fmt::Display for InvalidProfileError {\n\n fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {\n\n write!(formatter, \"profile '{}' not found\", self.0)\n\n }\n\n}\n\n\n\nimpl Error for InvalidProfileError {}\n\n\n\nimpl TryFrom<&str> for Profile {\n\n type Error = InvalidProfileError;\n\n\n\n fn try_from(value: &str) -> Result<Self, Self::Error> {\n\n match value {\n\n \"berlin-s\" => Ok(Self::BerlinSuburbanRailway),\n\n \"berlin-u\" => Ok(Self::BerlinUrbanRailway),\n\n \"berlin-s+u\" => Ok(Self::BerlinRapidTransit),\n\n \"berlin-s+u+metro\" => Ok(Self::BerlinMetro),\n\n \"berlin-no-r\" => Ok(Self::BerlinWithoutRailway),\n\n \"berlin\" => Ok(Self::Berlin),\n\n \"berlin-brandenburg-no-r\" => Ok(Self::BerlinBrandenburgWithoutRailway),\n\n \"berlin-brandenburg\" => Ok(Self::BerlinBrandenburg),\n\n _ => Err(InvalidProfileError(value.to_string())),\n\n }\n\n }\n\n}\n", "file_path": "import/src/profile.rs", "rank": 71, "score": 21.026307699895465 }, { "content": "use std::collections::HashMap;\n\nuse std::error::Error;\n\n\n\nuse super::{IncompleteLine, Line, LineColorRecord, LineId, LineRecord};\n\nuse crate::agency::AgencyId;\n\nuse crate::trip::Route;\n\nuse crate::utils::{Action, Dataset};\n\n\n\npub(crate) struct Importer {\n\n id_mapping: HashMap<LineId, usize>,\n\n incomplete_lines: Vec<IncompleteLine>,\n\n}\n\n\n\nimpl Importer {\n\n pub(crate) fn import(dataset: &mut impl Dataset) -> Result<Importer, Box<dyn Error>> {\n\n let mut importer = Self::import_lines(dataset)?;\n\n importer.import_colors(dataset)?;\n\n Ok(importer)\n\n }\n\n\n", "file_path": "import/src/line/importer.rs", "rank": 72, "score": 19.996505879207483 }, { "content": " fn import_lines(dataset: &mut impl Dataset) -> Result<Self, Box<dyn Error>> {\n\n let mut id_mapping = HashMap::new();\n\n let mut incomplete_lines = Vec::new();\n\n\n\n let action = Action::start(\"Importing lines\");\n\n for result in action.read_csv(dataset, \"routes.txt\")? {\n\n let record: LineRecord = result?;\n\n record.deduplicate(&mut id_mapping, &mut incomplete_lines);\n\n }\n\n action.complete(&format!(\"Imported {} lines\", incomplete_lines.len()));\n\n Ok(Self {\n\n id_mapping,\n\n incomplete_lines,\n\n })\n\n }\n\n\n\n fn import_colors(&mut self, dataset: &mut impl Dataset) -> Result<(), Box<dyn Error>> {\n\n let mut colors = HashMap::new();\n\n\n\n let action = Action::start(\"Importing colors\");\n", "file_path": "import/src/line/importer.rs", "rank": 73, "score": 19.587066232275046 }, { "content": "use std::collections::HashMap;\n\nuse std::error::Error;\n\nuse std::rc::Rc;\n\n\n\nuse super::{Service, ServiceExceptionRecord, ServiceId, ServiceRecord};\n\nuse crate::utils::{Action, Dataset};\n\n\n\npub(crate) struct Importer;\n\n\n\nimpl Importer {\n\n pub(crate) fn import(\n\n dataset: &mut impl Dataset,\n\n ) -> Result<HashMap<ServiceId, Rc<Service>>, Box<dyn Error>> {\n\n let mut services = Self::import_services(dataset)?;\n\n Self::add_service_exceptions(dataset, &mut services)?;\n\n\n\n let services = services\n\n .into_iter()\n\n .map(|(id, service)| (id, Rc::new(service)))\n\n .collect();\n", "file_path": "import/src/service/importer.rs", "rank": 74, "score": 19.452409064737438 }, { "content": " mut dataset: impl Dataset,\n\n shape_smoothing: SmoothMode,\n\n ) -> Result<Self, Box<dyn Error>> {\n\n let services = service::Importer::import(&mut dataset)?;\n\n let locations = location::Importer::import(&mut dataset)?;\n\n let shapes = shape::Importer::import(&mut dataset, shape_smoothing)?;\n\n let line_importer = line::Importer::import(&mut dataset)?;\n\n let trip_importer = trip::Importer::new(\n\n &services,\n\n &locations,\n\n &shapes,\n\n line_importer.id_mapping(),\n\n line_importer.line_count(),\n\n );\n\n let routes = trip_importer.import(&mut dataset)?;\n\n let lines = line_importer.finish(routes)?;\n\n let agencies = agency::Importer::import(&mut dataset, lines)?;\n\n Ok(Self { agencies })\n\n }\n\n\n", "file_path": "import/src/lib.rs", "rank": 75, "score": 18.89803322570797 }, { "content": "use std::collections::HashMap;\n\n\n\nuse serde_derive::Deserialize;\n\n\n\nuse super::{Shape, ShapeId};\n\nuse crate::coord::project;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub(super) struct ShapeRecord {\n\n shape_id: ShapeId,\n\n shape_pt_lat: f64,\n\n shape_pt_lon: f64,\n\n}\n\n\n\nimpl ShapeRecord {\n\n pub(super) fn import(self, shapes: &mut HashMap<ShapeId, Shape>) {\n\n let position = project(self.shape_pt_lat, self.shape_pt_lon);\n\n shapes\n\n .entry(self.shape_id)\n\n .or_insert_with(Shape::new)\n", "file_path": "import/src/shape/record.rs", "rank": 76, "score": 18.889106571733564 }, { "content": " id_mapping.insert(self.line_id, position);\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub(super) struct LineColorRecord {\n\n line: String,\n\n #[serde(deserialize_with = \"deserialize::color\")]\n\n color: Color,\n\n}\n\n\n\nimpl LineColorRecord {\n\n pub(super) fn import(self, colors: &mut HashMap<String, Color>) {\n\n colors.insert(self.line, self.color);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "import/src/line/record.rs", "rank": 77, "score": 18.780834582499626 }, { "content": " type Err = InvalidModeError;\n\n\n\n fn from_str(value: &str) -> Result<Self, Self::Err> {\n\n match value {\n\n \"off\" => Ok(Self::Off),\n\n \"deduplicate\" => Ok(Self::Deduplicate),\n\n \"full\" => Ok(Self::Full),\n\n _ => Err(InvalidModeError(value.to_string())),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "import/src/shape/smoother.rs", "rank": 78, "score": 18.53483626403924 }, { "content": " pub(super) fn new(locations: Vec<Rc<Location>>, shape: Shape) -> Self {\n\n Self {\n\n locations,\n\n shape,\n\n trips: Vec::new(),\n\n }\n\n }\n\n\n\n pub(super) fn matches(&self, locations: &[Rc<Location>], shape: &Shape) -> bool {\n\n self.locations == locations && &self.shape == shape\n\n }\n\n\n\n pub(super) fn difference(&self, downstream: &Self) -> impl Ord {\n\n let mut sub_results = iter::repeat_with(|| {\n\n iter::repeat(0)\n\n .take(downstream.locations.len())\n\n .collect::<Vec<_>>()\n\n })\n\n .take(self.locations.len() + 1)\n\n .collect::<Vec<_>>();\n", "file_path": "import/src/trip/route_variant.rs", "rank": 79, "score": 18.50281418782201 }, { "content": "pub(super) struct StopRecord {\n\n trip_id: TripId,\n\n stop_id: LocationId,\n\n #[serde(deserialize_with = \"deserialize::duration\")]\n\n arrival_time: Duration,\n\n #[serde(deserialize_with = \"deserialize::duration\")]\n\n departure_time: Duration,\n\n}\n\n\n\nimpl StopRecord {\n\n pub(super) fn import(\n\n self,\n\n locations: &HashMap<LocationId, Rc<Location>>,\n\n buffers: &mut HashMap<TripId, TripBuffer>,\n\n ) {\n\n buffers.get_mut(&self.trip_id).unwrap().add_stop(\n\n Rc::clone(&locations[&self.stop_id]),\n\n self.arrival_time,\n\n self.departure_time,\n\n );\n", "file_path": "import/src/trip/record.rs", "rank": 80, "score": 18.44837962216313 }, { "content": "use std::collections::HashMap;\n\n\n\nuse serde_derive::Deserialize;\n\n\n\nuse super::{Agency, AgencyId};\n\nuse crate::Line;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub(super) struct AgencyRecord {\n\n agency_id: AgencyId,\n\n agency_name: String,\n\n}\n\n\n\nimpl AgencyRecord {\n\n pub(super) fn import(self, lines: &mut HashMap<AgencyId, Vec<Line>>) -> Agency {\n\n let lines = lines.remove(&self.agency_id).unwrap_or_else(Vec::new);\n\n Agency::new(self.agency_name, lines)\n\n }\n\n}\n\n\n", "file_path": "import/src/agency/record.rs", "rank": 81, "score": 18.326034669903684 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::Line;\n\nuse crate::agency::AgencyId;\n\nuse crate::trip::Route;\n\nuse simulation::line::Kind;\n\nuse simulation::Color;\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash)]\n\npub(super) struct IncompleteLine {\n\n agency_id: AgencyId,\n\n name: String,\n\n color: Option<Color>,\n\n kind: Kind,\n\n}\n\n\n\nimpl IncompleteLine {\n\n pub(super) fn new(agency_id: AgencyId, name: String, kind: Kind) -> Self {\n\n Self {\n\n agency_id,\n", "file_path": "import/src/line/incomplete_line.rs", "rank": 82, "score": 17.520249148046126 }, { "content": "use std::collections::HashMap;\n\nuse std::error::Error;\n\n\n\nuse super::{Agency, AgencyId, AgencyRecord};\n\nuse crate::utils::{Action, Dataset};\n\nuse crate::Line;\n\n\n\npub(crate) struct Importer;\n\n\n\nimpl Importer {\n\n pub(crate) fn import(\n\n dataset: &mut impl Dataset,\n\n mut lines: HashMap<AgencyId, Vec<Line>>,\n\n ) -> Result<Vec<Agency>, Box<dyn Error>> {\n\n let mut agencies = Vec::new();\n\n\n\n let action = Action::start(\"Importing agencies\");\n\n for result in action.read_csv(dataset, \"agency.txt\")? {\n\n let record: AgencyRecord = result?;\n\n let agency = record.import(&mut lines);\n", "file_path": "import/src/agency/importer.rs", "rank": 83, "score": 17.31849518096228 }, { "content": "use std::collections::HashMap;\n\nuse std::rc::Rc;\n\n\n\nuse serde_derive::Deserialize;\n\n\n\nuse chrono::Duration;\n\n\n\nuse super::{TripBuffer, TripId};\n\nuse crate::deserialize;\n\nuse crate::line::LineId;\n\nuse crate::location::{Location, LocationId};\n\nuse crate::service::{Service, ServiceId};\n\nuse crate::shape::ShapeId;\n\nuse simulation::Direction;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub(super) struct TripRecord {\n\n trip_id: TripId,\n\n route_id: LineId,\n\n service_id: ServiceId,\n", "file_path": "import/src/trip/record.rs", "rank": 84, "score": 17.01794571213947 }, { "content": "\n\n Ok(services)\n\n }\n\n\n\n fn import_services(\n\n dataset: &mut impl Dataset,\n\n ) -> Result<HashMap<ServiceId, Service>, Box<dyn Error>> {\n\n let mut services = HashMap::new();\n\n\n\n let action = Action::start(\"Importing services\");\n\n for result in action.read_csv(dataset, \"calendar.txt\")? {\n\n let record: ServiceRecord = result?;\n\n record.import(&mut services);\n\n }\n\n action.complete(&format!(\"Imported {} services\", services.len()));\n\n Ok(services)\n\n }\n\n\n\n fn add_service_exceptions(\n\n dataset: &mut impl Dataset,\n", "file_path": "import/src/service/importer.rs", "rank": 85, "score": 16.841275151956474 }, { "content": "use std::collections::HashMap;\n\nuse std::error::Error;\n\n\n\nuse super::smoother::Mode;\n\nuse super::{Shape, ShapeId, ShapeRecord};\n\nuse crate::utils::Action;\n\nuse crate::utils::Dataset;\n\n\n\npub(crate) struct Importer;\n\n\n\nimpl Importer {\n\n pub(crate) fn import(\n\n dataset: &mut impl Dataset,\n\n mode: Mode,\n\n ) -> Result<HashMap<ShapeId, Shape>, Box<dyn Error>> {\n\n let mut shapes = HashMap::new();\n\n\n\n let action = Action::start(\"Importing shapes\");\n\n for result in action.read_csv(dataset, \"shapes.txt\")? {\n\n let record: ShapeRecord = result?;\n", "file_path": "import/src/shape/importer.rs", "rank": 86, "score": 16.8326030443491 }, { "content": "use serde_derive::{Deserialize, Serialize};\n\n\n\nuse crate::Node;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Direction {\n\n Upstream,\n\n Downstream,\n\n}\n\n\n\nimpl Direction {\n\n pub(crate) fn start(self, len: usize) -> usize {\n\n match self {\n\n Self::Upstream => 0,\n\n Self::Downstream => len - 1,\n\n }\n\n }\n\n\n\n pub(crate) fn find_next(self, current: usize, nodes: &[Node]) -> Option<usize> {\n\n match self {\n", "file_path": "simulation/src/direction.rs", "rank": 87, "score": 16.773346590929147 }, { "content": " dataset: &'s mut S,\n\n name: &str,\n\n ) -> Result<impl Iterator<Item = csv::Result<D>> + 's, S::Error>\n\n where\n\n D: DeserializeOwned + 's,\n\n S: Dataset,\n\n {\n\n let table = dataset.open_csv(name)?;\n\n let reader = Self::percent_bar(table.size, self.message).wrap_read(table.reader);\n\n Ok(csv::Reader::from_reader(reader).into_deserialize())\n\n }\n\n\n\n pub(crate) fn wrap_iter<T>(&mut self, collection: T) -> impl Iterator<Item = T::Item>\n\n where\n\n T: IntoIterator,\n\n {\n\n self.needs_last_line_cleared = true;\n\n let iterator = collection.into_iter();\n\n let progress_bar = Self::percent_bar(iterator.size_hint().0 as u64, self.message);\n\n iterator.progress_with(progress_bar)\n", "file_path": "import/src/utils/action.rs", "rank": 88, "score": 16.77153215465916 }, { "content": " at: linerarizer.retrieve(location),\n\n },\n\n };\n\n let position = transform(self.position);\n\n storage::Node::new(position, kind, self.in_directions)\n\n }\n\n}\n\n\n\n#[cfg(not(tarpaulin_include))]\n\nimpl fmt::Debug for Node {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n let position = PointDebug::new(self.position, if formatter.alternate() { 6 } else { 3 });\n\n match &self.kind {\n\n Kind::Waypoint => formatter\n\n .debug_struct(\"Waypoint\")\n\n .field(\"position\", &position)\n\n .field(\"in_directions\", &self.in_directions)\n\n .finish(),\n\n Kind::Stop { location } => formatter\n\n .debug_struct(\"Stop\")\n", "file_path": "import/src/trip/node.rs", "rank": 89, "score": 16.30372728118317 }, { "content": "use std::fmt;\n\nuse std::iter;\n\n\n\nuse crate::coord::{Point, PointDebug};\n\nuse crate::create_id_type;\n\n\n\ncreate_id_type!(ShapeId);\n\n\n\n#[derive(PartialEq, Clone)]\n\npub(crate) struct Shape {\n\n points: Vec<Point>,\n\n}\n\n\n\nimpl Shape {\n\n pub(super) fn new() -> Self {\n\n Self { points: Vec::new() }\n\n }\n\n\n\n pub(super) fn add(&mut self, position: Point) {\n\n self.points.push(position);\n", "file_path": "import/src/shape/shape.rs", "rank": 90, "score": 16.265779567347128 }, { "content": " fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"either 1 or 2\")\n\n }\n\n\n\n fn visit_u64<E>(self, value: u64) -> Result<ExceptionType, E>\n\n where\n\n E: DeserializeError,\n\n {\n\n match value {\n\n 1 => Ok(ExceptionType::Added),\n\n 2 => Ok(ExceptionType::Removed),\n\n _ => Err(E::custom(format!(\n\n \"unknown exception type of value: {}\",\n\n value\n\n ))),\n\n }\n\n }\n\n }\n\n\n\n deserializer.deserialize_u64(ServiceExceptionTypeVisitor)\n", "file_path": "import/src/service/exception_type.rs", "rank": 91, "score": 16.23059001359243 }, { "content": "use std::fmt;\n\n\n\nuse serde::de::{Deserialize, Deserializer, Error as DeserializeError, Visitor};\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub(super) enum ExceptionType {\n\n Added,\n\n Removed,\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for ExceptionType {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n struct ServiceExceptionTypeVisitor;\n\n\n\n impl<'de> Visitor<'de> for ServiceExceptionTypeVisitor {\n\n type Value = ExceptionType;\n\n\n", "file_path": "import/src/service/exception_type.rs", "rank": 92, "score": 16.21401686007004 }, { "content": "\n\n for (a, location_a) in self.locations.iter().enumerate() {\n\n for (b, location_b) in downstream.locations.iter().rev().enumerate() {\n\n if a == 0 || b == 0 {\n\n sub_results[a][b] = a.max(b);\n\n continue;\n\n }\n\n\n\n let mut option_match_or_replace = sub_results[a - 1][b - 1];\n\n if location_a != location_b {\n\n option_match_or_replace += 1;\n\n }\n\n let option_add = sub_results[a - 1][b] + 1;\n\n let option_remove = sub_results[a][b - 1] + 1;\n\n sub_results[a][b] = option_match_or_replace.min(option_add).min(option_remove);\n\n }\n\n }\n\n\n\n sub_results[self.locations.len() - 1][downstream.locations.len() - 1]\n\n }\n", "file_path": "import/src/trip/route_variant.rs", "rank": 93, "score": 16.142098536977645 }, { "content": "use chrono::NaiveDate;\n\n\n\nuse crate::create_id_type;\n\nuse crate::location::Linearizer;\n\nuse crate::trip::{Route, Scheduler};\n\nuse simulation::line::Kind;\n\nuse simulation::Color;\n\n\n\ncreate_id_type!(LineId);\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Line {\n\n name: String,\n\n color: Color,\n\n kind: Kind,\n\n routes: Vec<Route>,\n\n}\n\n\n\nimpl Line {\n\n pub(crate) fn new(name: String, color: Color, kind: Kind, routes: Vec<Route>) -> Line {\n", "file_path": "import/src/line/line.rs", "rank": 94, "score": 15.955304206632974 }, { "content": "}\n\n\n\npub(crate) trait Dataset {\n\n type Error: Error + 'static;\n\n\n\n fn open_csv(&mut self, name: &str) -> Result<Table, Self::Error>;\n\n}\n\n\n\nimpl Dataset for PathBuf {\n\n type Error = io::Error;\n\n\n\n fn open_csv(&mut self, name: &str) -> Result<Table, Self::Error> {\n\n self.set_file_name(name);\n\n let file = File::open(&self)?;\n\n Ok(Table::new(file.metadata().unwrap().len(), file))\n\n }\n\n}\n\n\n\nimpl<R: Read + Seek> Dataset for ZipArchive<R> {\n\n type Error = zip::result::ZipError;\n", "file_path": "import/src/utils/dataset.rs", "rank": 95, "score": 15.871228029790956 }, { "content": "use std::collections::HashMap;\n\n\n\nuse itertools::Itertools;\n\n\n\nuse serde_derive::Deserialize;\n\n\n\nuse super::{IncompleteLine, LineId};\n\nuse crate::agency::AgencyId;\n\nuse crate::deserialize;\n\nuse simulation::line::Kind;\n\nuse simulation::Color;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub(super) struct LineRecord {\n\n #[serde(rename = \"route_id\")]\n\n line_id: LineId,\n\n agency_id: AgencyId,\n\n route_short_name: String,\n\n #[serde(rename = \"route_type\", deserialize_with = \"deserialize::line_kind\")]\n\n line_kind: Kind,\n", "file_path": "import/src/line/record.rs", "rank": 96, "score": 15.658859375960633 }, { "content": " }\n\n\n\n pub(crate) fn read_csv<'s, D, S>(\n\n &self,\n\n dataset: &'s mut S,\n\n name: &str,\n\n ) -> Result<impl Iterator<Item = csv::Result<D>> + 's, S::Error>\n\n where\n\n D: DeserializeOwned + 's,\n\n S: Dataset,\n\n {\n\n let table = dataset.open_csv(name)?;\n\n Ok(csv::Reader::from_reader(table.reader).into_deserialize())\n\n }\n\n\n\n pub(crate) fn wrap_iter<T>(&mut self, collection: T) -> impl Iterator<Item = T::Item>\n\n where\n\n T: IntoIterator,\n\n {\n\n collection.into_iter()\n\n }\n\n\n\n pub(crate) fn complete(self, _message: &str) {}\n\n}\n", "file_path": "import/src/utils/action.rs", "rank": 97, "score": 15.395849897784187 }, { "content": "use na::Point2;\n\nuse wasm_bindgen::prelude::*;\n\n\n\nuse crate::view::View;\n\n\n\n#[wasm_bindgen]\n\npub struct Dataset {\n\n inner: simulation::Dataset,\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl Dataset {\n\n pub fn parse(data: &[u8]) -> Self {\n\n let dataset = bincode::deserialize::<storage::Dataset>(data).unwrap();\n\n Self {\n\n inner: dataset.load(),\n\n }\n\n }\n\n\n\n pub fn update(&mut self, time_passed: u32) {\n", "file_path": "wasm/src/dataset.rs", "rank": 98, "score": 15.287657496169704 }, { "content": " ) -> Result<HashMap<TripId, TripBuffer>, Box<dyn Error>> {\n\n let mut buffers = HashMap::new();\n\n\n\n let action = Action::start(\"Importing trips\");\n\n for result in action.read_csv(dataset, \"trips.txt\")? {\n\n let record: TripRecord = result?;\n\n record.import(self.id_mapping, self.services, &mut buffers);\n\n }\n\n action.complete(&format!(\"Imported {} trips\", buffers.len()));\n\n Ok(buffers)\n\n }\n\n\n\n fn add_trip_stops(\n\n &self,\n\n dataset: &mut impl Dataset,\n\n buffers: &mut HashMap<TripId, TripBuffer>,\n\n ) -> Result<(), Box<dyn Error>> {\n\n let action = Action::start(\"Importing trip stops\");\n\n for result in action.read_csv(dataset, \"stop_times.txt\")? {\n\n let record: StopRecord = result?;\n", "file_path": "import/src/trip/importer.rs", "rank": 99, "score": 15.245798111091336 } ]
Rust
idp2p-client/did/microledger.rs
idp2p/idp2p
c5dec982dd03d4c7c0ea6af605042df21f62906f
use super::{ eventlog::{EventLog, EventLogChange, EventLogPayload} }; use crate::IdentityError; use idp2p_common::{ anyhow::Result, chrono::prelude::*, encode, encode_vec, generate_json_cid, hash, IdKeyDigest, IDP2P_ED25519, Idp2pCodec, }; use serde::{Deserialize, Serialize}; use std::collections::HashMap; #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct AssertionMethod { pub valid_at: i64, pub expired_at: Option<i64>, pub ver_method: VerificationMethod, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedgerState { pub event_id: String, #[serde(with = "encode_vec")] pub next_key_digest: IdKeyDigest, #[serde(with = "encode_vec")] pub recovery_key_digest: IdKeyDigest, pub assertion_keys: Vec<AssertionMethod>, pub authentication_key: Option<VerificationMethod>, pub agreement_key: Option<VerificationMethod>, pub proofs: HashMap<String, String>, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedgerInception { #[serde(rename = "keyType")] pub key_type: String, #[serde(with = "encode_vec", rename = "recoveryKeyDigest")] pub recovery_key_digest: Vec<u8>, #[serde(with = "encode_vec", rename = "nextKeyDigest")] pub next_key_digest: IdKeyDigest, } impl MicroLedgerInception { pub fn get_id(&self) -> String { generate_json_cid(self).unwrap() } } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedger { pub inception: MicroLedgerInception, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub events: Vec<EventLog>, } impl MicroLedger { pub fn new(recovery_key_digest: &[u8], next_key_digest: &[u8]) -> Self { let inception = MicroLedgerInception { key_type: IDP2P_ED25519.to_owned(), recovery_key_digest: recovery_key_digest.to_owned(), next_key_digest: next_key_digest.to_owned(), }; MicroLedger { inception, events: vec![], } } pub fn create_event( &self, signer_key: &[u8], next_digest: &[u8], change: Vec<EventLogChange>, ) -> EventLogPayload { let previous = self.get_previous_id(); EventLogPayload { previous: previous, signer_key: signer_key.to_owned(), next_key_digest: next_digest.to_owned(), change: change, timestamp: Utc::now().timestamp(), } } pub fn save_event(&mut self, payload: EventLogPayload, proof: &[u8]) { let event_log = EventLog::new(payload, proof); self.events.push(event_log); } pub fn verify(&self, cid: &str) -> Result<MicroLedgerState, IdentityError> { let mut state = MicroLedgerState { event_id: self.inception.get_id(), recovery_key_digest: self.inception.recovery_key_digest.clone(), next_key_digest: self.inception.next_key_digest.clone(), assertion_keys: vec![], authentication_key: None, agreement_key: None, proofs: HashMap::new(), }; check!(cid == self.inception.get_id(), IdentityError::InvalidId); for event in &self.events { let previous_valid = event.payload.previous == state.event_id; check!(previous_valid, IdentityError::InvalidPrevious); let event_valid = event.verify(&event.payload.signer_key); check!(event_valid, IdentityError::InvalidEventSignature); let signer_digest = hash(&event.payload.signer_key); check!( signer_digest == state.next_key_digest, IdentityError::InvalidSigner ); for change in &event.payload.change { match &change { EventLogChange::SetAssertionKey { verification_method, } => { let previous_key = state.assertion_keys.last_mut(); if let Some(previous_key) = previous_key { previous_key.expired_at = Some(event.payload.timestamp); } let assertion_method = AssertionMethod { valid_at: event.payload.timestamp, expired_at: None, ver_method: verification_method.clone(), }; state.assertion_keys.push(assertion_method); } EventLogChange::SetAuthenticationKey { verification_method, } => { state.authentication_key = Some(verification_method.clone()); } EventLogChange::SetAgreementKey { verification_method, } => { state.agreement_key = Some(verification_method.clone()); } EventLogChange::SetProof(stmt) => { let key = encode(&stmt.key); let value = encode(&stmt.value); state.proofs.insert(key, value); } } } state.next_key_digest = event.payload.next_key_digest.clone(); state.event_id = event.get_id(); } Ok(state) } pub fn get_previous_id(&self) -> String { let previous_id = if self.events.len() == 0 { self.inception.get_id() } else { let e = self.events.last().unwrap(); e.get_id() }; previous_id } } #[cfg(test)] mod tests { use super::*; use crate::json::did::eventlog::*; use idp2p_common::secret::EdSecret; use idp2p_common::ED25519; #[test] fn id_test() { let expected_id = "bagaaieraqun2pn4ycd3b4nq4ptyzfnxea4hohwlgd7vdu3cifiy2fowvvpuq"; let ledger = create_microledger().0; assert_eq!(ledger.inception.get_id(), expected_id); } #[test] fn verify_test() { let ledger = create_microledger().0; let result = ledger.verify(&ledger.inception.get_id()); assert!(result.is_ok(), "{:?}", result); } #[test] fn verify_invalid_id_test() { let ledger = create_microledger().0; let result = ledger.verify("1"); let is_err = matches!(result, Err(crate::IdentityError::InvalidId)); assert!(is_err, "{:?}", result); } #[test] fn verify_valid_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_proof = EventLogChange::SetProof(ProofStatement { key: vec![1], value: vec![1], }); let ver_method = VerificationMethod { id: id.clone(), controller: id.clone(), typ: ED25519.to_string(), bytes: secret.to_publickey().to_vec(), }; let set_assertion = EventLogChange::SetAssertionKey { verification_method: ver_method.clone(), }; let set_authentication = EventLogChange::SetAuthenticationKey { verification_method: ver_method.clone(), }; let set_agreement = EventLogChange::SetAgreementKey { verification_method: ver_method.clone(), }; let change = vec![ set_proof, set_assertion.clone(), set_authentication, set_agreement, ]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &secret.to_publickey_digest().unwrap(), change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let change = vec![set_assertion]; let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let result = ledger.verify(&id); assert!(result.is_ok()); } #[test] fn verify_invalid_previous_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); ledger.events[0].payload.previous = "1".to_owned(); let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidPrevious)); assert!(is_err, "{:?}", result); } #[test] fn verify_invalid_signature_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); ledger.events[0].proof = vec![0; 64]; let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidEventSignature)); assert!(is_err, "{:?}", result); } #[test] fn verify_invalid_signer_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let new_secret = EdSecret::new(); let new_ed_key = new_secret.to_publickey(); ledger.events[0].payload.signer_key = new_ed_key.to_vec(); ledger.events[0].proof = new_secret.sign(&ledger.events[0].payload).to_vec(); let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidSigner)); assert!(is_err, "{:?}", result); } fn create_microledger() -> (MicroLedger, idp2p_common::secret::EdSecret) { let secret_str = "bd6yg2qeifnixj4x3z2fclp5wd3i6ysjlfkxewqqt2thie6lfnkma"; let secret = idp2p_common::secret::EdSecret::from_str(secret_str).unwrap(); let d = secret.to_publickey_digest().unwrap(); let ledger = MicroLedger::new(&d, &d); (ledger, secret) } }
use super::{ eventlog::{EventLog, EventLogChange, EventLogPayload} }; use crate::IdentityError; use idp2p_common::{ anyhow::Result, chrono::prelude::*, encode, encode_vec, generate_json_cid, hash, IdKeyDigest, IDP2P_ED25519, Idp2pCodec, }; use serde::{Deserialize, Serialize}; use std::collections::HashMap; #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct AssertionMethod { pub valid_at: i64, pub expired_at: Option<i64>, pub ver_method: VerificationMethod, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedgerState { pub event_id: String, #[serde(with = "encode_vec")] pub next_key_digest: IdKeyDigest, #[serde(with = "encode_vec")] pub recovery_key_digest: IdKeyDigest, pub assertion_keys: Vec<AssertionMethod>, pub authentication_key: Option<VerificationMethod>, pub agreement_key: Option<VerificationMethod>, pub proofs: HashMap<String, String>, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedgerInception { #[serde(rename = "keyType")] pub key_type: String, #[serde(with = "encode_vec", rename = "recoveryKeyDigest")] pub recovery_key_digest: Vec<u8>, #[serde(with = "encode_vec", rename = "nextKeyDigest")] pub next_key_digest: IdKeyDigest, } impl MicroLedgerInception { pub fn get_id(&self) -> String { generate_json_cid(self).unwrap() } } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct MicroLedger { pub inception: MicroLedgerInception, #[serde(skip_serializing_if = "Vec::is_empty", default)] pub events: Vec<EventLog>, } impl MicroLedger { pub fn new(recovery_key_digest: &[u8], next_key_digest: &[u8]) -> Self { let inception = MicroLedgerInception { key_type: IDP2P_ED25519.to_owned(), recovery_key_digest: recovery_key_digest.to_owned(), next_key_digest: next_key_digest.to_owned(), }; MicroLedger { inception, events: vec![], } } pub fn create_event( &self, signer_key: &[u8], next_digest: &[u8], change: Vec<EventLogChange>, ) -> EventLogPayload { let previous = self.get_previous_id(); EventLogPayload { previous: previous, signer_key: signer_key.to_owned(), next_key_digest: next_digest.to_owned(), change: change, timestamp: Utc::now().timestamp(), } } pub fn save_event(&mut self, payload: EventLogPayload, proof: &[u8]) { let event_log = EventLog::new(payload, proof); self.events.push(event_log); } pub fn verify(&self, cid: &str) -> Result<MicroLedgerState, IdentityError> { let mut state = MicroLedgerState { event_id: self.inception.get_id(), recovery_key_digest: self.inception.recovery_key_digest.clone(), next_key_digest: self.inception.next_key_digest.clone(), assertion_keys: vec![], authentication_key: None, agreement_key: None, proofs: HashMap::new(), }; check!(cid == self.inception.get_id(), IdentityError::InvalidId); for event in &self.events { let previous_valid = event.payload.previous == state.event_id; check!(previous_valid, IdentityError::InvalidPrevious); let event_valid = event.verify(&event.payload.signer_key); check!(event_valid, IdentityError::InvalidEventSignature); let signer_digest = hash(&event.payload.signer_key); check!( signer_digest == state.next_key_digest, IdentityError::InvalidSigner ); for change in &event.payload.change { match &change { EventLogChange::SetAssertionKey { verification_method, } => { let previous_key = state.assertion_keys.last_mut(); if let Some(previous_key) = previous_key { previous_key.expired_at = Some(event.payload.timestamp); } let assertion_method = AssertionMethod { valid_at: event.payload.timestamp, expired_at: None, ver_method: verification_method.clone(), }; state.assertion_keys.push(assertion_method); } EventLogChange::SetAuthenticationKey { verification_method, } => { state.authentication_key = Some(verification_method.clone()); } EventLogChange::SetAgreementKey { verification_method, } => { state.agreement_key = Some(verification_method.clone()); } EventLogChange::SetProof(stmt) => { let key = encode(&stmt.key); let value = encode(&stmt.value); state.proofs.insert(key, value); } } } state.next_key_digest = event.payload.next_key_digest.clone(); state.event_id = event.get_id(); } Ok(state) } pub fn get_previous_id(&self) -> String { let previous_id = if self.events.len() == 0 { self.inception.get_id() } else { let e = self.events.last().unwrap(); e.get_id() }; previous_id } } #[cfg(test)] mod tests { use super::*; use crate::json::did::eventlog::*; use idp2p_common::secret::EdSecret; use idp2p_common::ED25519; #[test] fn id_test() { let expected_id = "bagaaieraqun2pn4ycd3b4nq4ptyzfnxea4hohwlgd7vdu3cifiy2fowvvpuq"; let ledger = create_microledger().0; assert_eq!(ledger.inception.get_id(), expected_id); } #[test] fn verify_test() { let ledger = create_microledger().0; let result = ledger.verify(&ledger.inception.get_id()); assert!(result.is_ok(), "{:?}", result); } #[test] fn verify_invalid_id_test() { let ledger = create_microledger().0; let result = ledger.verify("1"); let is_err = matches!(result, Err(crate::IdentityError::InvalidId)); assert!(is_err, "{:?}", result); } #[test] fn verify_valid_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_proof = EventLogChange::SetProof(ProofStatement { key: vec![1], value: vec![1], }); let ver_method = VerificationMethod { id: id.clone(), controller: id.clone(), typ: ED25519.to_string(), bytes: secret.to_publickey().to_vec(), }; let set_assertion = EventLogChange::SetAssertionKey { verification_method: ver_method.clone(), }; let set_authentication = EventLogChange::SetAuthenticationKey { verification_method: ver_method.clone(), }; let set_agreement = EventLogChange::SetAgreementKey { verification_method: ver_method.clone(), }; let change = vec![ set_proof, set_assertion.clone(), set_authentication, set_agreement, ]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &secret.to_publickey_digest().unwrap(
) -> (MicroLedger, idp2p_common::secret::EdSecret) { let secret_str = "bd6yg2qeifnixj4x3z2fclp5wd3i6ysjlfkxewqqt2thie6lfnkma"; let secret = idp2p_common::secret::EdSecret::from_str(secret_str).unwrap(); let d = secret.to_publickey_digest().unwrap(); let ledger = MicroLedger::new(&d, &d); (ledger, secret) } }
), change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let change = vec![set_assertion]; let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let result = ledger.verify(&id); assert!(result.is_ok()); } #[test] fn verify_invalid_previous_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); ledger.events[0].payload.previous = "1".to_owned(); let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidPrevious)); assert!(is_err, "{:?}", result); } #[test] fn verify_invalid_signature_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); ledger.events[0].proof = vec![0; 64]; let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidEventSignature)); assert!(is_err, "{:?}", result); } #[test] fn verify_invalid_signer_test() { let (mut ledger, secret) = create_microledger(); let id = ledger.inception.get_id(); let set_change = EventLogChange::SetProof(ProofStatement { key: vec![], value: vec![], }); let change = vec![set_change]; let signer = secret.to_publickey(); let payload = ledger.create_event(&signer, &signer, change); let proof = secret.sign(&payload); ledger.save_event(payload, &proof); let new_secret = EdSecret::new(); let new_ed_key = new_secret.to_publickey(); ledger.events[0].payload.signer_key = new_ed_key.to_vec(); ledger.events[0].proof = new_secret.sign(&ledger.events[0].payload).to_vec(); let result = ledger.verify(&id); let is_err = matches!(result, Err(crate::IdentityError::InvalidSigner)); assert!(is_err, "{:?}", result); } fn create_microledger(
random
[ { "content": "pub fn encode_bytes(value: &[u8]) -> Result<String> {\n\n let mb64 = multibase::encode(Base::Base64Url, value);\n\n Ok(mb64[1..].to_owned())\n\n}\n\n\n", "file_path": "idp2p-common/src/base64url.rs", "rank": 0, "score": 310203.4537153712 }, { "content": "pub fn decode_str(value: &str) -> Result<Vec<u8>> {\n\n let m64 = format!(\"u{}\", value);\n\n let vec = multibase::decode(&m64)?.1;\n\n Ok(vec)\n\n}\n\n\n", "file_path": "idp2p-common/src/base64url.rs", "rank": 1, "score": 296163.97123867465 }, { "content": "fn handle_id_event(event: EventType, ts: i64, state: &mut IdentityState) -> Result<()> {\n\n match event {\n\n EventType::CreateAssertionKey(key) => {\n\n let previous_key = state.assertion_keys.last_mut();\n\n if let Some(previous_key) = previous_key {\n\n previous_key.expired_at = Some(ts);\n\n }\n\n let assertion_method = AssertionKeyState {\n\n valid_at: ts,\n\n expired_at: None,\n\n key: key.try_into()?,\n\n };\n\n state.assertion_keys.push(assertion_method);\n\n }\n\n EventType::CreateAuthenticationKey(key) => {\n\n let previous_key = state.authentication_keys.last_mut();\n\n if let Some(previous_key) = previous_key {\n\n previous_key.expired_at = Some(ts);\n\n }\n\n let authentication_method = AuthenticationKeyState {\n", "file_path": "idp2p-client/did/_protobuf.rs", "rank": 2, "score": 290615.1812277242 }, { "content": "pub fn encode_str(value: &str) -> Result<String> {\n\n let mb64 = multibase::encode(Base::Base64Url, value.as_bytes());\n\n Ok(mb64[1..].to_owned())\n\n}\n\n\n\npub mod encode_vec {\n\n use multibase::Base;\n\n use serde::{Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let s = String::deserialize(deserializer)?;\n\n let (_, data) = multibase::decode(&s).map_err(|_| serde::de::Error::custom(\"\"))?;\n\n Ok(data)\n\n }\n\n\n\n pub fn serialize<S, T>(value: &T, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n T: AsRef<[u8]>,\n\n {\n\n format!(\"{}\", multibase::encode(Base::Base64Url, value.as_ref())).serialize(serializer)\n\n }\n\n}\n\n\n\n\n", "file_path": "idp2p-common/src/base64url.rs", "rank": 3, "score": 290461.8657902238 }, { "content": "pub fn get_enc_key(password: &str, salt: &[u8]) -> anyhow::Result<Vec<u8>> {\n\n let salt_b64 = crate::multibase::encode(crate::multibase::Base::Base64, salt);\n\n let salt = SaltString::new(&salt_b64[1..]).map_err(|_| anyhow::anyhow!(\"\"))?;\n\n let enc_key = Pbkdf2\n\n .hash_password(password.as_bytes(), &salt)\n\n .map_err(|_| anyhow::anyhow!(\"\"))?;\n\n let enc_key_hash = enc_key.hash.ok_or(anyhow::anyhow!(\"\"))?;\n\n Ok(enc_key_hash.as_bytes().to_vec())\n\n}\n\n\n", "file_path": "idp2p-common/src/cipher.rs", "rank": 4, "score": 271911.82861179195 }, { "content": "pub fn encrypt(enc_key_bytes: &[u8], iv: &[u8], content: &[u8]) -> Result<Vec<u8>> {\n\n let enc_key = Key::from_slice(&enc_key_bytes);\n\n let cipher = ChaCha20Poly1305::new(enc_key);\n\n let nonce = Nonce::from_slice(iv);\n\n let ciphertext = cipher\n\n .encrypt(nonce, content)\n\n .map_err(|_| anyhow::anyhow!(\"\"))?;\n\n Ok(ciphertext)\n\n}\n\n\n", "file_path": "idp2p-common/src/cipher.rs", "rank": 5, "score": 266060.2259418866 }, { "content": "pub fn decrypt(enc_key_bytes: &[u8], iv: &[u8], ciphertext: &[u8]) -> Result<Vec<u8>> {\n\n let enc_key = Key::from_slice(&enc_key_bytes);\n\n let cipher = ChaCha20Poly1305::new(enc_key);\n\n let nonce = Nonce::from_slice(iv);\n\n let result = cipher\n\n .decrypt(nonce, ciphertext)\n\n .map_err(|_| anyhow::anyhow!(\"\"))?;\n\n Ok(result)\n\n}\n", "file_path": "idp2p-common/src/cipher.rs", "rank": 6, "score": 266060.2259418866 }, { "content": "pub fn add_sent_message(&mut self, id: &str, msg: &str) {\n\n let conn = self\n\n .connections\n\n .iter_mut()\n\n .find(|conn| conn.id == id)\n\n .unwrap();\n\n conn.sent_messages.push(SentMessage::new(msg));\n\n}\n\n\n", "file_path": "idp2p-wallet/src/message.rs", "rank": 7, "score": 263015.4237668165 }, { "content": "pub fn add_received_message(&mut self, id: &str, msg: &str) {\n\n let conn = self\n\n .connections\n\n .iter_mut()\n\n .find(|conn| conn.id == id)\n\n .unwrap();\n\n conn.received_messages.push(ReceivedMessage::new(msg));\n\n}\n\n\n\n#[test]\n\n fn add_sent_message() {\n\n let did = Identity::from_secret(EdSecret::new());\n\n let did2 = Identity::from_secret(EdSecret::new());\n\n let profile = IdProfile::new(\"adem\", &vec![]);\n\n let profile2 = IdProfile::new(\"caglin\", &vec![]);\n\n let mut w = create_raw_wallet(profile, did.id.as_str());\n\n w.add_conn(Connection::new(&did2.id, profile2));\n\n w.add_sent_message(&did2.id, \"Heyy\");\n\n assert_eq!(w.connections[0].sent_messages[0].text, \"Heyy\");\n\n }\n", "file_path": "idp2p-wallet/src/message.rs", "rank": 8, "score": 263015.4237668165 }, { "content": "pub fn encode<T: Serialize>(value: T) -> Result<String> {\n\n let s = serde_json::to_string(&value)?;\n\n let mb64 = multibase::encode(Base::Base64Url, s.as_bytes());\n\n Ok(mb64[1..].to_owned())\n\n}\n\n\n", "file_path": "idp2p-common/src/base64url.rs", "rank": 9, "score": 253743.23860434463 }, { "content": "fn ensure_cid(cid: &[u8], inception: &[u8]) -> Result<()> {\n\n let cid: Cid = cid.to_vec().try_into()?;\n\n if cid.codec() != Idp2pCodec::Protobuf as u64 {\n\n bail!(IdentityError::InvalidId)\n\n }\n\n if !cid.hash().is_hash_of(inception)? {\n\n bail!(IdentityError::InvalidId)\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use idp2p_common::secret::EdSecret;\n\n\n\n use super::*;\n\n #[test]\n\n fn create_test() -> Result<()> {\n\n let secret = Idp2pSecret::Idp2p25519 {\n\n secret: EdSecret::new(),\n", "file_path": "idp2p-client/did/_protobuf.rs", "rank": 10, "score": 237975.15265362215 }, { "content": "pub fn derive_secret(seed: [u8; 16], derivation_index: &mut u32) -> Result<[u8;32]> {\n\n let extended_secret = ExtendedSecretKey::from_seed(seed)?;\n\n let index = ChildIndex::hardened(derivation_index.clone()).unwrap();\n\n let key = extended_secret.derive_child(index)?;\n\n *derivation_index += 1;\n\n Ok(key.secret_key)\n\n}\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::base64url::decode;\n\n use derivation_path::DerivationPath;\n\n\n\n fn root(seed: &str) -> ExtendedSecretKey {\n\n ExtendedSecretKey::from_seed(decode(seed).unwrap()).unwrap()\n\n }\n\n\n\n #[test]\n\n fn from_seed_test() {\n\n let vector1_path: DerivationPath = \"m/0'/1'/2'/2'/1000000000'\".parse().unwrap();\n", "file_path": "idp2p-common/src/bip32.rs", "rank": 11, "score": 230108.73501290422 }, { "content": "pub fn create_msg(secret: Idp2pSecret, from: Identity, to: Identity, body: &[u8]) -> Result<()> {\n\n let to_state = from.verify()?;\n\n let raw = IdRawMessage {\n\n id: create_random::<32>().to_vec(),\n\n from: from.id,\n\n to: to.id,\n\n created_at: 0,\n\n body: vec![],\n\n };\n\n let raw_bytes = raw.encode_to_vec(); \n\n let proof = secret.sign(&raw_bytes);\n\n let signed = IdSignedMessage{\n\n signer_kid : None,\n\n raw: raw_bytes,\n\n proof: proof\n\n };\n\n let signed_bytes = signed.encode_to_vec();\n\n //let encrypted = \n\n todo!()\n\n}\n", "file_path": "idp2p-core/src/didcomm/protobuf/mod.rs", "rank": 12, "score": 220595.31673525463 }, { "content": "pub fn decode<T: DeserializeOwned>(value: &str) -> Result<T> {\n\n let m64 = format!(\"u{}\", value);\n\n let s = multibase::decode(&m64)?.1;\n\n let bytes = std::str::from_utf8(&s)?;\n\n let t: T = serde_json::from_str(&bytes)?;\n\n Ok(t)\n\n}\n\n\n", "file_path": "idp2p-common/src/base64url.rs", "rank": 13, "score": 219903.50507176615 }, { "content": "pub fn decode_sized<const N: usize>(s: &str) -> anyhow::Result<[u8; N]> {\n\n let r = multibase::decode(s)?.1;\n\n let data: [u8; N] = r.try_into().expect(\"Data size is not equal to given size\");\n\n Ok(data)\n\n}\n\n\n", "file_path": "idp2p-common/src/base64url.rs", "rank": 14, "score": 190632.38915175683 }, { "content": "pub fn is_idp2p(id: &str) -> bool {\n\n let re = regex::Regex::new(r\"did:p2p:*\").unwrap();\n\n re.is_match(id)\n\n}\n\n\n\nmod idp2p_proto {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/idp2p.pb.rs\"));\n\n}\n\n//pub mod identity;\n\n//pub mod didcomm;\n", "file_path": "idp2p-core/src/lib.rs", "rank": 15, "score": 186558.91880321765 }, { "content": "fn save_doc(did: &mut Identity, secret: EdSecret) {\n\n let set_change = EventLogChange::SetProof(ProofStatement {\n\n key: vec![],\n\n value: vec![],\n\n });\n\n let change = vec![set_change];\n\n let signer = secret.to_publickey();\n\n let payload = did\n\n .microledger\n\n .create_event(&signer, &hash(&signer), change);\n\n let proof = secret.sign(&payload);\n\n did.microledger.save_event(payload, &proof);\n\n}\n\n\n", "file_path": "idp2p-client/benches/verify_did.rs", "rank": 16, "score": 144688.30562353175 }, { "content": "pub fn create_random<const N: usize>() -> [u8; N] {\n\n let mut key_data = [0u8; N];\n\n let mut key_rng = thread_rng();\n\n key_rng.fill_bytes(&mut key_data);\n\n key_data\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde_json::json;\n\n /*#[test]\n\n fn hash_test() {\n\n let data = json!({\n\n \"name\": \"John Doe\"\n\n });\n\n let expected = \"botmu6ay364t223hj4akn7amds6rpwquuavx54demvy5e4vkn5uuq\";\n\n let digest = hash(serde_json::to_string(&data).unwrap().as_bytes());\n\n let result = encode(&digest);\n\n assert_eq!(result, expected);\n", "file_path": "idp2p-common/src/lib.rs", "rank": 17, "score": 125453.99559066494 }, { "content": "pub trait IdentityStateEventHandler<T> {\n\n fn handle_event(&mut self, timestamp: i64, event: T) -> Result<()>;\n\n}\n\n\n\nimpl Into<IdentityDocument> for IdentityState {\n\n fn into(self) -> IdentityDocument {\n\n for assetion_key in self.assertion_keys {}\n\n let doc = IdentityDocument {\n\n context: vec![\n\n \"https://www.w3.org/ns/did/v1\".to_string(),\n\n \"https://w3id.org/security/suites/ed25519-2020/v1\".to_string(),\n\n \"https://w3id.org/security/suites/x25519-2020/v1\".to_string(),\n\n ],\n\n id: format!(\"did:p2p:{}\", idp2p_common::encode(&self.id)),\n\n controller: format!(\"did:p2p:{}\", idp2p_common::encode(&self.id)),\n\n verification_method: todo!(),\n\n assertion_method: todo!(),\n\n authentication: todo!(),\n\n key_agreement: todo!(),\n\n };\n\n todo!()\n\n }\n\n}\n", "file_path": "idp2p-core/src/identity/state.rs", "rank": 18, "score": 113271.05477367704 }, { "content": "pub trait Idp2pCid {\n\n fn new_cid(content: &[u8], codec: Idp2pCodec) -> Cid;\n\n}\n\n\n\nimpl Idp2pCid for Cid {\n\n fn new_cid(content: &[u8], codec: Idp2pCodec) -> Cid {\n\n let mh = Code::Sha2_256.digest(content);\n\n Cid::new_v1(codec as u64, mh)\n\n }\n\n}", "file_path": "idp2p-core/src/multi_id.rs", "rank": 19, "score": 112254.29982181315 }, { "content": "pub trait IdentityStateEventHandler{\n\n fn handle<T>(&mut self, timestamp: i64, event: T) -> Result<()>;\n\n}\n\n\n", "file_path": "idp2p-client/did/core.rs", "rank": 20, "score": 112219.48543905074 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let secret_str = \"beilmx4d76udjmug5ykpy657qa3pfsqbcu7fbbtuk3mgrdrxssseq\";\n\n let secret = EdSecret::from_str(secret_str).unwrap();\n\n let ed_key_digest = secret.to_publickey_digest().unwrap();\n\n let mut did = Identity::new(&ed_key_digest, &ed_key_digest);\n\n for _ in 1..10 {\n\n save_doc(&mut did, secret.clone());\n\n }\n\n c.bench_function(\"verify identity\", |b| {\n\n b.iter(|| black_box(did.verify()))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);*/\n", "file_path": "idp2p-client/benches/verify_did.rs", "rank": 21, "score": 97327.93862370714 }, { "content": "pub fn build_gossipsub() -> Gossipsub {\n\n let message_id_fn = |message: &GossipsubMessage| {\n\n let mut s = DefaultHasher::new();\n\n message.data.hash(&mut s);\n\n MessageId::from(s.finish().to_string())\n\n };\n\n let gossipsub_config = GossipsubConfigBuilder::default()\n\n .heartbeat_interval(Duration::from_secs(10))\n\n .validation_mode(ValidationMode::Anonymous)\n\n .message_id_fn(message_id_fn)\n\n .build()\n\n .expect(\"Valid config\");\n\n let gossipsub_result = Gossipsub::new(MessageAuthenticity::Anonymous, gossipsub_config);\n\n let gossipsub = gossipsub_result.expect(\"Correct configuration\");\n\n gossipsub\n\n}\n\n\n", "file_path": "idp2p-gossip/src/protocol/gossip.rs", "rank": 22, "score": 92931.48818043788 }, { "content": "struct IdP2pMessage {\n\n #[prost(bytes, tag = \"1\")]\n\n pub from: Vec<u8>,\n\n #[prost(bytes, tag = \"2\")]\n\n pub seqno: Vec<u8>,\n\n #[prost(oneof = \"Idp2pMessageType\", tags = \"3\")]\n\n pub payload: Option<Idp2pMessageType>,\n\n}\n\n\n\n#[derive(Eq, PartialOrd, Ord, Clone, PartialEq, ::prost::Oneof)]\n\npub enum Idp2pMessageType {\n\n #[prost(string, tag = \"1\")]\n\n Publish(::prost::alloc::string::String),\n\n #[prost(message, tag = \"2\")]\n\n Subscribe(::prost::alloc::string::String),\n\n #[prost(string, tag = \"3\")]\n\n Connect(::prost::alloc::string::String),\n\n}*/\n", "file_path": "idp2p-gossip/src/lib.rs", "rank": 23, "score": 89883.07906788473 }, { "content": "pub trait IdGossip {\n\n fn subscribe_to(&mut self, id: &str) -> Result<()>;\n\n fn publish_get(&mut self, id: &str) -> Result<()>;\n\n fn publish_post(&mut self, did: Identity) -> Result<()>;\n\n}\n\n\n\nimpl IdGossip for Gossipsub {\n\n fn subscribe_to(&mut self, id: &str) -> Result<()> {\n\n let topic = IdentTopic::new(id);\n\n self.subscribe(&topic)?;\n\n Ok(())\n\n }\n\n\n\n fn publish_get(&mut self, id: &str) -> Result<()> {\n\n let topic = IdentTopic::new(id);\n\n let msg = IdGossipMessage::new_get();\n\n idp2p_common::log::info!(\n\n \"Published get: {}\",\n\n idp2p_common::serde_json::to_string_pretty(&msg)?\n\n );\n", "file_path": "idp2p-gossip/src/protocol/gossip.rs", "rank": 24, "score": 80091.79713059524 }, { "content": "pub trait IdentityBehaviour {\n\n fn create(input: CreateIdentityInput) -> Result<Self>\n\n where\n\n Self: Sized;\n\n fn change(&mut self, input: ChangeInput) -> Result<()>;\n\n fn recover(&mut self, input: RecoverInput) -> Result<()>;\n\n fn verify(&self) -> Result<IdentityState, IdentityError>;\n\n}\n", "file_path": "idp2p-core/src/identity/mod.rs", "rank": 25, "score": 79935.94223365808 }, { "content": "pub trait EventLogResolver {\n\n fn try_resolve_payload(&self, event_id: &Idp2pDigest) -> Result<EventLogPayload>;\n\n}\n\n\n\nimpl EventLogResolver for EventLog {\n\n fn try_resolve_payload(&self, event_id: &Idp2pDigest) -> Result<EventLogPayload> {\n\n let digest = self\n\n .event_id\n\n .to_owned()\n\n .ok_or(IdentityError::InvalidProtobuf)?;\n\n let digest: Idp2pDigest = digest.try_into()?;\n\n if !digest.is_hash_of(&self.payload) {\n\n bail!(IdentityError::InvalidId)\n\n }\n\n let payload = EventLogPayload::decode(&*self.payload)?;\n\n let prev: Idp2pDigest = payload\n\n .clone()\n\n .previous\n\n .ok_or(IdentityError::InvalidProtobuf)?\n\n .try_into()?;\n", "file_path": "idp2p-core/src/identity/protobuf/mapper.rs", "rank": 26, "score": 76722.95118593777 }, { "content": "type ReqResEvent = RequestResponseEvent<IdRequest, IdResponse>;\n\nimpl IdentityNodeBehaviour {\n\n pub async fn handle_client_request(&mut self, event: ReqResEvent) -> Result<()> {\n\n if let RequestResponseEvent::Message { message, peer } = event {\n\n match message {\n\n RequestResponseMessage::Request {\n\n request, channel, ..\n\n } => {\n\n let mut response = IdResponse(IdResponsePayload::Ok(IdResponsePayloadOk::None));\n\n match request.0 {\n\n IdRequestPayload::WalletMessage(_) => {\n\n response = IdResponse(IdResponsePayload::Error(\n\n \"InvalidMessageType\".to_owned(),\n\n ));\n\n log::error!(\"Wallet message is not supported on node\");\n\n }\n\n _ => {\n\n response = IdResponse(IdResponsePayload::Error(\n\n \"InvalidMessageType\".to_owned(),\n\n ));\n", "file_path": "idp2p-client/src/swarm.rs", "rank": 27, "score": 74419.02294272481 }, { "content": "type ReqResEvent = RequestResponseEvent<IdRequest, IdResponse>;\n\nimpl IdentityNodeBehaviour {\n\n pub fn handle_mdns_event(&mut self, event: MdnsEvent) {\n\n match event {\n\n MdnsEvent::Discovered(list) => {\n\n for (peer, _) in list {\n\n self.gossipsub.add_explicit_peer(&peer);\n\n }\n\n }\n\n MdnsEvent::Expired(list) => {\n\n for (peer, _) in list {\n\n if !self.mdns.has_node(&peer) {\n\n self.gossipsub.remove_explicit_peer(&peer);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub async fn handle_gossip_event(&mut self, event: GossipsubEvent) -> Result<()> {\n", "file_path": "idp2p-node/src/swarm.rs", "rank": 28, "score": 74419.02294272481 }, { "content": "fn main(){\n\n println!(\"Hello idp2p\");\n\n}", "file_path": "examples/simple.rs", "rank": 29, "score": 55370.61416157974 }, { "content": "fn main() {\n\n let mut prost_build = prost_build::Config::new();\n\n prost_build.protoc_arg(\"--experimental_allow_proto3_optional\");\n\n\n\n prost_build.compile_protos(\n\n &[\n\n \"proto/identity.proto\",\n\n \"proto/message.proto\",\n\n \"proto/gossip_msg.proto\",\n\n ],\n\n &[\"proto\"],\n\n )\n\n .unwrap();\n\n}\n", "file_path": "idp2p-core/build.rs", "rank": 30, "score": 54203.640860931366 }, { "content": "fn main() {\n\n prost_build::compile_protos(&[\"src/rpc.proto\"], &[\"src\"]).unwrap();\n\n}", "file_path": "idp2p-gossip/build.rs", "rank": 31, "score": 54203.640860931366 }, { "content": "pub trait IdentityBehaviour {\n\n fn create(input: CreateIdentityInput) -> Result<Self>\n\n where\n\n Self: Sized;\n\n fn verify(&self) -> Result<IdentityState>;\n\n fn recover(&mut self, signer: Idp2pSecret, next_key_digest: Idp2pKeyDigest, rec_digest: Idp2pKeyDigest) -> Result<()>;\n\n fn add_events(&mut self, signer: Idp2pSecret, next_key_digest: Idp2pKeyDigest, events: IdentityEvents) -> Result<()>;\n\n}\n\n\n\n#[derive(PartialEq, Debug, Clone)]\n\npub struct IdentityEvents {\n\n pub proofs: HashMap<Vec<u8>, Vec<u8>>,\n\n pub authentication_key: Idp2pKey,\n\n pub agreement_key: Idp2pAgreementKey,\n\n pub assertion_key: Idp2pKey,\n\n}\n\n\n\n#[derive(PartialEq, Debug, Clone)]\n\npub struct AssertionKeyState {\n\n pub valid_at: i64,\n", "file_path": "idp2p-client/did/core.rs", "rank": 32, "score": 49952.14651065488 }, { "content": "pub trait WalletPersister {\n\n fn wallet_exists(&self) -> bool;\n\n fn get_wallet(&self) -> Result<PersistedWallet>;\n\n fn persist_wallet(&self, wallet: PersistedWallet) -> Result<()>;\n\n //fn persist_messages(&self, wallet: PersistedWallet) -> Result<()>;\n\n}\n\n\n\npub mod bip32;\n\npub mod raw;\n\npub mod session;\n\npub mod store;\n\npub mod wallet;\n", "file_path": "idp2p-wallet/src/lib.rs", "rank": 33, "score": 48982.118189173605 }, { "content": "pub trait WalletPersister {\n\n fn wallet_exists(&self) -> bool;\n\n fn get_wallet(&self) -> Result<PersistedWallet>;\n\n fn persist_wallet(&self, wallet: PersistedWallet) -> Result<()>;\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct PersistedWallet {\n\n pub raw: RawWallet,\n\n #[serde(with = \"encode_vec\")]\n\n pub ciphertext: Vec<u8>,\n\n}\n\n\n\npub struct Wallet {\n\n pub raw: Option<RawWallet>,\n\n pub session: Option<WalletSession>,\n\n}\n\n\n\nimpl Wallet {\n\n pub fn persist<T: WalletPersister>(&self, persister: T) -> Result<()> {\n", "file_path": "idp2p-client/src/wallet.rs", "rank": 34, "score": 48982.118189173605 }, { "content": "pub mod identity;\n\npub mod eventlog;\n\npub mod microledger;", "file_path": "idp2p-client/did/mod.rs", "rank": 50, "score": 37074.96439478391 }, { "content": "mod tests {\n\n use super::*;\n\n use crate::base64url::encode;\n\n use ed25519_dalek::{PublicKey, Signer, Verifier};\n\n #[test]\n\n fn proof_test() {\n\n let secret_str = \"bclc5pn2tfuhkqmupbr3lkyc5o4g4je6glfwkix6nrtf7hch7b3kq\";\n\n let secret = EdSecret::from_str(secret_str).unwrap();\n\n let keypair = secret.to_keypair();\n\n let sig = keypair.sign(&vec![0]);\n\n let public_key_bytes = secret.to_publickey();\n\n let public_key = PublicKey::from_bytes(&public_key_bytes).unwrap();\n\n let is_valid = public_key.verify(&vec![0], &sig).is_ok();\n\n assert!(is_valid);\n\n }\n\n\n\n #[test]\n\n fn to_publickey_test() {\n\n let secret_str = \"bclc5pn2tfuhkqmupbr3lkyc5o4g4je6glfwkix6nrtf7hch7b3kq\";\n\n let secret = EdSecret::from_str(secret_str).unwrap();\n", "file_path": "idp2p-core/src/secret.rs", "rank": 51, "score": 36089.03760319199 }, { "content": "use anyhow::*;\n\nuse ed25519_dalek::{Keypair, PublicKey, SecretKey, Signer};\n\nuse serde::{Deserialize, Serialize};\n\nuse sha2::{Digest, Sha256};\n\nuse std::convert::TryInto;\n\nuse x25519_dalek::StaticSecret;\n\n\n\nuse crate::{key::{Idp2pAgreementKey, Idp2pKey}, base64url::decode_sized};\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub enum Idp2pSecret {\n\n Idp2p25519 { secret: EdSecret },\n\n}\n\n\n\nimpl Into<Idp2pKey> for Idp2pSecret {\n\n fn into(self) -> Idp2pKey {\n\n match self {\n\n Idp2pSecret::Idp2p25519 { secret } => Idp2pKey::Idp2pEd25519 {\n\n public: secret.to_keypair().public.to_bytes().to_vec(),\n\n },\n", "file_path": "idp2p-core/src/secret.rs", "rank": 52, "score": 36086.49592602285 }, { "content": "\n\n pub fn from_str(s: &str) -> Result<Self> {\n\n Ok(EdSecret(decode_sized(s)?))\n\n }\n\n\n\n pub fn to_bytes(&self) -> [u8; 32] {\n\n self.0\n\n }\n\n\n\n pub fn to_keypair(&self) -> Keypair {\n\n let secret_key = SecretKey::from_bytes(&self.0).unwrap();\n\n let public_key: PublicKey = PublicKey::from(&secret_key);\n\n let public: [u8; 32] = public_key.to_bytes();\n\n let mut new_secret: Vec<u8> = vec![];\n\n new_secret.extend(self.0);\n\n new_secret.extend(public);\n\n Keypair::from_bytes(&new_secret).unwrap()\n\n }\n\n\n\n pub fn to_publickey(&self) -> [u8; 32] {\n", "file_path": "idp2p-core/src/secret.rs", "rank": 53, "score": 36084.639166316374 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct EdSecret([u8; 32]);\n\n\n\nimpl EdSecret {\n\n pub fn new() -> Self {\n\n EdSecret(crate::create_random::<32>())\n\n }\n\n\n\n pub fn from(data: [u8; 32]) -> Self {\n\n EdSecret(data)\n\n }\n\n\n\n pub fn from_bytes(data: &[u8]) -> Self {\n\n EdSecret(data.try_into().unwrap())\n\n }\n", "file_path": "idp2p-core/src/secret.rs", "rank": 54, "score": 36083.103863166725 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Into<Idp2pAgreementKey> for Idp2pSecret {\n\n fn into(self) -> Idp2pAgreementKey {\n\n match self {\n\n Idp2pSecret::Idp2p25519 { secret } => Idp2pAgreementKey::Idp2pX25519 {\n\n public: secret.to_key_agreement().to_bytes().to_vec(),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Idp2pSecret {\n\n pub fn sign(&self, payload: &[u8]) -> Vec<u8> {\n\n match self {\n\n Idp2pSecret::Idp2p25519 { secret } => {\n\n let keypair = secret.to_keypair();\n\n keypair.sign(&payload).to_bytes().to_vec()\n", "file_path": "idp2p-core/src/secret.rs", "rank": 55, "score": 36082.01793906344 }, { "content": " let sender_secret = StaticSecret::from(self.0);\n\n let receiver_public = x25519_dalek::PublicKey::from(public);\n\n sender_secret.diffie_hellman(&receiver_public)\n\n }\n\n\n\n pub fn sign<T: Serialize>(&self, t: &T) -> [u8; 64] {\n\n let payload_json = serde_json::to_string(t).unwrap();\n\n let bytes = payload_json.as_bytes();\n\n let keypair = self.to_keypair();\n\n keypair.sign(&bytes).to_bytes()\n\n }\n\n\n\n pub fn sign_str(&self, s: &str) -> [u8; 64] {\n\n let bytes = s.as_bytes();\n\n let keypair = self.to_keypair();\n\n keypair.sign(&bytes).to_bytes()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "idp2p-core/src/secret.rs", "rank": 56, "score": 36081.34515200924 }, { "content": " let keypair: Keypair = self.to_keypair();\n\n let public = keypair.public.to_bytes();\n\n public\n\n }\n\n\n\n pub fn to_publickey_digest(&self) -> Result<[u8; 32]> {\n\n let public = self.to_publickey();\n\n let hash = Sha256::digest(&public);\n\n let result: [u8; 32] = hash.try_into()?;\n\n Ok(result)\n\n }\n\n\n\n pub fn to_key_agreement(&self) -> x25519_dalek::PublicKey {\n\n let secret_data: [u8; 32] = self.0.clone().try_into().unwrap();\n\n let secret_key = StaticSecret::from(secret_data);\n\n let public_key = x25519_dalek::PublicKey::from(&secret_key);\n\n public_key\n\n }\n\n\n\n pub fn to_shared_secret(&self, public: [u8; 32]) -> x25519_dalek::SharedSecret {\n", "file_path": "idp2p-core/src/secret.rs", "rank": 57, "score": 36077.86702639429 }, { "content": " let expected = \"brgzkmbdnyevdth3sczvxjumd6bdl6ngn6eqbsbpazuvq42bfzk2a\";\n\n let public_key = secret.to_publickey();\n\n assert_eq!(encode(&public_key), expected);\n\n }\n\n\n\n /*#[test]\n\n fn to_agreement_key_test() {\n\n let secret_str = \"bclc5pn2tfuhkqmupbr3lkyc5o4g4je6glfwkix6nrtf7hch7b3kq\";\n\n let secret = EdSecret::from_str(secret_str).unwrap();\n\n let expected = \"bbgitzmdocc3y2gvcmtiihr2gyw4xjppux7ea3gdo6afwy6gbrmpa\";\n\n let public_key = secret.to_key_agreement();\n\n assert_eq!(encode(&public_key), expected);\n\n }\n\n\n\n #[test]\n\n fn to_shared_key_test() {\n\n let alice_secret = EdSecret::new();\n\n let bob_secret = EdSecret::new();\n\n let alice_public = alice_secret.to_key_agreement();\n\n let bob_public = bob_secret.to_key_agreement();\n\n let alice_shared = alice_secret.to_shared_secret(bob_public);\n\n let bob_shared = bob_secret.to_shared_secret(alice_public);\n\n assert_eq!(alice_shared.as_bytes(), bob_shared.as_bytes());\n\n }*/\n\n}\n", "file_path": "idp2p-core/src/secret.rs", "rank": 58, "score": 36071.33860680688 }, { "content": "use crate::{base64url::encode_vec, ED25519_CODE};\n\nuse anyhow::Result;\n\nuse ed25519_dalek::{PublicKey, Signature, Verifier};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(PartialEq, Debug, Clone, Serialize, Deserialize)]\n\n#[serde(tag = \"type\")]\n\npub enum Idp2pKey {\n\n Idp2pEd25519 {\n\n #[serde(with = \"encode_vec\")]\n\n public: Vec<u8>,\n\n },\n\n}\n\n\n\n#[derive(PartialEq, Debug, Clone, Serialize, Deserialize)]\n\n#[serde(tag = \"type\")]\n\npub enum Idp2pAgreementKey {\n\n Idp2pX25519 {\n\n #[serde(with = \"encode_vec\")]\n\n public: Vec<u8>,\n", "file_path": "idp2p-core/src/key.rs", "rank": 59, "score": 36068.44543734056 }, { "content": " public: ed25519_dalek::PublicKey::from_bytes(&[1u8; 32])?,\n\n };\n\n let encoded = serde_json::to_string_pretty(&key)?;\n\n eprintln!(\"{encoded}\");\n\n let decoded: Idp2pKey = serde_json::from_str(&encoded)?;\n\n eprintln!(\"{:?}\", decoded);\n\n Ok(())\n\n }\n\n #[test]\n\n fn key_from_bytes_test() -> Result<()> {\n\n let key = Idp2pKey::Idp2pEd25519 {\n\n public: PublicKey::from_bytes(&[1u8; 32])?,\n\n };\n\n let bytes: Vec<u8> = key.into();\n\n let decoded_key = Idp2pKey::from_bytes(&bytes)?;\n\n matches!(decoded_key, Idp2pKey::Idp2pEd25519 { public } if public.to_bytes() == [1u8;32] );\n\n Ok(())\n\n }\n\n #[test]\n\n fn key_serialize_test() -> Result<()> {\n", "file_path": "idp2p-core/src/key.rs", "rank": 60, "score": 36065.97302198247 }, { "content": " match &self {\n\n Idp2pKey::Idp2pEd25519 { public } => {\n\n let pubkey = PublicKey::from_bytes(public)?;\n\n let sig_bytes: [u8; 64] = sig.try_into()?;\n\n let signature = Signature::from(sig_bytes);\n\n let result = pubkey\n\n .verify(payload, &signature)\n\n .map_err(|e| anyhow::anyhow!(e.to_string()))?;\n\n Ok(result)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n /*#[test]\n\n fn key_from_bytes_test() -> Result<()> {\n\n let key = Idp2pKey::Idp2pEd25519 {\n", "file_path": "idp2p-core/src/key.rs", "rank": 61, "score": 36065.67443274854 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn try_from_test() {\n\n let mut bytes = [0u8; 33];\n\n bytes[1..].copy_from_slice(&[1u8; 32]);\n\n let key: Idp2pKey = bytes.to_vec().try_into().unwrap();\n\n assert!(\n\n matches!(key, Idp2pKey::Idp2pEd25519 { public } if public.as_bytes().to_owned() == [1u8; 32])\n\n );\n\n }\n\n\n", "file_path": "idp2p-core/src/key.rs", "rank": 62, "score": 36065.488526889305 }, { "content": " };\n\n let sig = secret.sign(&[0u8; 10]);\n\n let key: Idp2pKey = secret.into();\n\n matches!(key.verify(&[0u8; 10], &sig), Ok(true));\n\n Ok(())\n\n }*/\n\n}\n\n\n\n/*impl FromStr for Idp2pKey {\n\n type Err = anyhow::Error;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let (_, key_data) = multibase::decode(&s).unwrap();\n\n Ok(key_data.try_into().unwrap())\n\n }\n\n}\n\n\n\nimpl TryFrom<Vec<u8>> for Idp2pKey {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {\n", "file_path": "idp2p-core/src/key.rs", "rank": 63, "score": 36064.967890554435 }, { "content": " },\n\n}\n\n\n\nimpl Idp2pKey {\n\n pub fn new(code: u64, public: &[u8]) -> Result<Self> {\n\n match code {\n\n ED25519_CODE => Ok(Idp2pKey::Idp2pEd25519 {\n\n public: public.to_vec(),\n\n }),\n\n _ => anyhow::bail!(\"Invalid key code\"),\n\n }\n\n }\n\n\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n match &self {\n\n Idp2pKey::Idp2pEd25519 { public } => public.to_vec(),\n\n }\n\n }\n\n\n\n pub fn verify(&self, payload: &[u8], sig: &[u8]) -> Result<()> {\n", "file_path": "idp2p-core/src/key.rs", "rank": 64, "score": 36064.13043892759 }, { "content": " let key_type = value[0];\n\n match key_type {\n\n 0 => {\n\n let bytes: [u8; 32] = value[1..].try_into()?;\n\n Ok(Self::Idp2pEd25519 {\n\n public: ed25519_dalek::PublicKey::from_bytes(&bytes)?,\n\n })\n\n }\n\n _ => anyhow::bail!(\"Not supported\"),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Idp2pKey> for Vec<u8> {\n\n fn from(value: Idp2pKey) -> Self {\n\n match value {\n\n Idp2pKey::Idp2pEd25519 { public } => {\n\n let mut bytes = [0u8; 33];\n\n bytes[1..].copy_from_slice(public.as_bytes());\n\n bytes.to_vec()\n", "file_path": "idp2p-core/src/key.rs", "rank": 65, "score": 36063.21948846532 }, { "content": " let key = Idp2pKey::try_from(0xed, &[0u8; 32])?;\n\n let encoded = serde_json::to_string(&key).unwrap();\n\n let decoded: Idp2pKey = serde_json::from_str(&encoded)?;\n\n assert_eq!(decoded, key);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn agreement_key_serialize_test() -> Result<()> {\n\n let key = Idp2pAgreementKey::try_from(0xec, &[0u8; 32])?;\n\n let encoded = serde_json::to_string(&key).unwrap();\n\n let decoded: Idp2pAgreementKey = serde_json::from_str(&encoded)?;\n\n assert_eq!(decoded, key);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn verify_test() -> Result<()> {\n\n let secret = Idp2pSecret::Idp2p25519 {\n\n secret: EdSecret::new(),\n", "file_path": "idp2p-core/src/key.rs", "rank": 66, "score": 36061.536721238816 }, { "content": " #[test]\n\n fn encode_test() {\n\n let proof = Idp2pKey::Idp2pEd25519 {\n\n public: ed25519_dalek::PublicKey::from_bytes(&[0u8; 32]).unwrap(),\n\n };\n\n let vec: Vec<u8> = proof.into();\n\n assert_eq!(vec.len(), 33);\n\n }\n\n}\n\n */\n", "file_path": "idp2p-core/src/key.rs", "rank": 67, "score": 36056.34131320588 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\npub struct JwmHeader {\n\n pub kid: String,\n\n}\n\n\n\npub mod jwe;\n\npub mod jwk;\n\npub mod jws;\n\npub mod jwm;\n\npub mod jpm;\n", "file_path": "idp2p-client/json/mod.rs", "rank": 68, "score": 35921.56888320693 }, { "content": "pub mod identity;\n\npub mod didcomm;", "file_path": "idp2p-client/did/json/mod.rs", "rank": 69, "score": 35901.1335644202 }, { "content": "pub mod identity;", "file_path": "idp2p-client/did/protobuf/mod.rs", "rank": 70, "score": 35900.149416141016 }, { "content": "}\n\n\n\n#[derive(PartialEq, Debug, Clone)]\n\npub struct ProofState {\n\n pub valid_at: i64,\n\n pub expired_at: Option<i64>,\n\n pub value: Vec<u8>,\n\n}\n\n\n\n#[derive(PartialEq, Debug, Clone)]\n\npub struct IdentityState {\n\n pub id: Vec<u8>,\n\n pub event_id: Idp2pDigest,\n\n pub next_key_digest: Idp2pKeyDigest,\n\n pub recovery_key_digest: Idp2pKeyDigest,\n\n pub assertion_keys: Vec<KeyState>,\n\n pub authentication_keys: Vec<KeyState>,\n\n pub agreement_keys: Vec<AgreementKeyState>,\n\n pub proofs: HashMap<Vec<u8>, ProofState>,\n\n}\n\n\n", "file_path": "idp2p-core/src/identity/state.rs", "rank": 71, "score": 35101.5536219079 }, { "content": "use std::collections::HashMap;\n\n\n\nuse idp2p_common::{\n\n anyhow::Result, key::{Idp2pKey,Idp2pAgreementKey}, digest::{Idp2pKeyDigest, Idp2pDigest},\n\n};\n\n\n\nuse super::doc::IdentityDocument;\n\n\n\n#[derive(PartialEq, Debug, Clone)]\n\npub struct KeyState {\n\n pub valid_at: i64,\n\n pub expired_at: Option<i64>,\n\n pub key: Idp2pKey,\n\n}\n\n\n\n#[derive(PartialEq, Debug, Clone)]\n\npub struct AgreementKeyState {\n\n pub valid_at: i64,\n\n pub expired_at: Option<i64>,\n\n pub key: Idp2pAgreementKey,\n", "file_path": "idp2p-core/src/identity/state.rs", "rank": 72, "score": 35091.38515682408 }, { "content": "use cid::{multihash::{Code, MultihashDigest}, Cid};\n\n\n\nuse crate::Idp2pCodec;\n\n\n", "file_path": "idp2p-core/src/multi_id.rs", "rank": 73, "score": 34973.35372921573 }, { "content": "use crate::IdentityError;\n\n\n\nuse self::{\n\n input::{ChangeInput, CreateIdentityInput, RecoverInput},\n\n state::IdentityState,\n\n};\n\nuse idp2p_common::anyhow::Result;\n\n\n\npub mod doc;\n\npub mod input;\n\npub mod state;\n\npub mod protobuf;\n\npub mod json;\n\n\n", "file_path": "idp2p-core/src/identity/mod.rs", "rank": 74, "score": 34817.756713140545 }, { "content": "pub mod gossip;\n\npub mod req_res;", "file_path": "idp2p-gossip/src/protocol/mod.rs", "rank": 75, "score": 34803.07444208843 }, { "content": "pub mod protobuf;", "file_path": "idp2p-core/src/didcomm/mod.rs", "rank": 76, "score": 34802.142847793424 }, { "content": "", "file_path": "idp2p-core/src/protocol/mod.rs", "rank": 77, "score": 34794.31592632003 }, { "content": "trait EventLogResolver {\n\n fn try_resolve(&self, event_id: &[u8]) -> Result<EventLogPayload>;\n\n}\n\n\n\nimpl EventLogResolver for idp2p_proto::EventLog {\n\n fn try_resolve(&self, event_id: &[u8]) -> Result<EventLogPayload> {\n\n let multi_id = Multihash::from_bytes(&self.event_id)?;\n\n if !multi_id.is_hash_of(&self.payload)? {\n\n bail!(IdentityError::InvalidId)\n\n }\n\n let payload = EventLogPayload::decode(&*self.payload)?;\n\n if payload.previous != event_id {\n\n bail!(IdentityError::InvalidPrevious)\n\n }\n\n Ok(payload)\n\n }\n\n}\n\n\n\nimpl IdentityBehaviour for Identity {\n\n fn create(input: CreateIdentityInput) -> Result<Self> {\n", "file_path": "idp2p-client/did/_protobuf.rs", "rank": 78, "score": 33944.71043220662 }, { "content": " pub signer_key: Vec<u8>,\n\n pub next_key_digest: Idp2pKeyDigest,\n\n pub timestamp: i64,\n\n pub change: EventLogChange,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct EventLog {\n\n pub event_id: Idp2pDigest,\n\n pub payload: EventLogPayload,\n\n #[serde(with = \"encode_vec\")]\n\n pub proof: Vec<u8>, // if recover assume recovery key\n\n}\n", "file_path": "idp2p-core/src/identity/json/mod.rs", "rank": 79, "score": 33815.224454746305 }, { "content": "use idp2p_common::{\n\n encode_vec, key::{Idp2pKey,Idp2pAgreementKey}, digest::{Idp2pKeyDigest, Idp2pDigest},\n\n};\n\nuse serde::{Deserialize, Serialize};\n\npub mod handler;\n\npub mod mapper;\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct Identity {\n\n pub id: String,\n\n pub microledger: Microledger,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct Microledger {\n\n pub inception: IdentityInception,\n\n #[serde(skip_serializing_if = \"Vec::is_empty\", default)]\n\n pub event_logs: Vec<EventLog>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n", "file_path": "idp2p-core/src/identity/json/mod.rs", "rank": 80, "score": 33811.923486929474 }, { "content": "pub struct IdentityInception {\n\n pub version: i32,\n\n pub timestamp: i64,\n\n pub next_key_digest: Idp2pKeyDigest,\n\n pub recovery_key_digest: Idp2pKeyDigest,\n\n pub events: Vec<EventType>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\n#[serde(tag = \"type\")]\n\npub enum EventLogChange {\n\n Recover { digest: Idp2pKeyDigest },\n\n Events { events: Vec<EventType> },\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct Idp2pProof {\n\n #[serde(with = \"encode_vec\")]\n\n pub key: Vec<u8>,\n\n #[serde(with = \"encode_vec\")]\n", "file_path": "idp2p-core/src/identity/json/mod.rs", "rank": 81, "score": 33808.80754027662 }, { "content": " pub value: Vec<u8>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\n#[serde(tag = \"type\")]\n\npub enum EventType {\n\n CreateAssertionKey { key: Idp2pKey },\n\n CreateAuthenticationKey { key: Idp2pKey },\n\n CreateAgreementKey { key: Idp2pAgreementKey },\n\n RevokeAssertionKey { kid: Idp2pDigest },\n\n RevokeAuthenticationKey { kid: Idp2pDigest },\n\n RevokeAgreementKey { kid: Idp2pDigest },\n\n SetProof { proof: Idp2pProof },\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct EventLogPayload {\n\n pub version: i32,\n\n pub previous: Idp2pDigest,\n\n #[serde(with = \"encode_vec\")]\n", "file_path": "idp2p-core/src/identity/json/mod.rs", "rank": 82, "score": 33801.07183087704 }, { "content": "use crate::identity::{IdentityBehaviour, protobuf::*};\n\nuse crate::idp2p_proto::{IdRawMessage, IdSignedMessage, IdEncryptedMessage , Identity};\n\nuse idp2p_common::key::Idp2pKey;\n\nuse idp2p_common::{anyhow::Result, create_random, secret::Idp2pSecret};\n\nuse prost::Message;\n\n\n", "file_path": "idp2p-core/src/didcomm/protobuf/mod.rs", "rank": 83, "score": 33779.70534256295 }, { "content": "pub mod event_handler;\n\npub mod handler;\n\npub mod mapper;", "file_path": "idp2p-core/src/identity/protobuf/mod.rs", "rank": 84, "score": 33774.321116396866 }, { "content": "", "file_path": "idp2p-core/src/didcomm/json/mod.rs", "rank": 85, "score": 33761.49495943496 }, { "content": "", "file_path": "idp2p-core/src/protocol/owner/mod.rs", "rank": 86, "score": 33761.49495943496 }, { "content": "", "file_path": "idp2p-core/src/protocol/gossip/mod.rs", "rank": 87, "score": 33761.49495943496 }, { "content": "use idp2p_common::anyhow::Result;\n\nuse crate::{\n\n identity::state::{\n\n KeyState, IdentityState, IdentityStateEventHandler,\n\n },\n\n idp2p_proto::identity_event::EventType,\n\n};\n\n\n\nimpl IdentityStateEventHandler<EventType> for IdentityState {\n\n fn handle_event(&mut self, timestamp: i64, event: EventType) -> Result<()> {\n\n match event {\n\n EventType::CreateAssertionKey(key) => {\n\n let previous_key = self.assertion_keys.last_mut();\n\n if let Some(previous_key) = previous_key {\n\n previous_key.expired_at = Some(timestamp);\n\n }\n\n let assertion_method = KeyState {\n\n valid_at: timestamp,\n\n expired_at: None,\n\n key: key.try_into()?,\n", "file_path": "idp2p-core/src/identity/protobuf/event_handler.rs", "rank": 88, "score": 33002.52015309018 }, { "content": " };\n\n self.assertion_keys.push(assertion_method);\n\n }\n\n EventType::CreateAuthenticationKey(key) => {\n\n let previous_key = self.authentication_keys.last_mut();\n\n if let Some(previous_key) = previous_key {\n\n previous_key.expired_at = Some(timestamp);\n\n }\n\n let authentication_method = KeyState {\n\n valid_at: timestamp,\n\n expired_at: None,\n\n key: key.try_into()?,\n\n };\n\n self.authentication_keys.push(authentication_method);\n\n }\n\n EventType::CreateAgreementKey(key) => {}\n\n EventType::RevokeAssertionKey(kid) => {}\n\n EventType::RevokeAuthenticationKey(kid) => {}\n\n EventType::RevokeAgreementKey(kid) => {}\n\n EventType::SetProof(proof) => {}\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "idp2p-core/src/identity/protobuf/event_handler.rs", "rank": 89, "score": 32988.93972163285 }, { "content": "use super::eventlog::{EventLogChange};\n\nuse crate::IdentityError;\n\nuse idp2p_common::secret::EdSecret;\n\nuse idp2p_common::{anyhow::Result, encode, hash, serde_json, serde_with::skip_serializing_none};\n\nuse idp2p_common::{log, ED25519, X25519};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct Identity {\n\n pub id: String,\n\n pub microledger: MicroLedger,\n\n}\n\n\n\nimpl Identity {\n\n pub fn new(recovery_key_digest: &[u8], next_key_digest: &[u8]) -> Self {\n\n let ledger = MicroLedger::new(recovery_key_digest, next_key_digest);\n\n let id = ledger.inception.get_id();\n\n let did = Identity {\n\n id: format!(\"did:p2p:{id}\"),\n\n microledger: ledger,\n", "file_path": "idp2p-client/did/identity.rs", "rank": 90, "score": 62.07532363526079 }, { "content": "use std::collections::HashMap;\n\n\n\nuse idp2p_common::{\n\n agreement_key::Idp2pAgreementKey, anyhow::Result, encode_vec,\n\n key::Idp2pKey, key_digest::Idp2pKeyDigest, secret::Idp2pSecret, serde_with::skip_serializing_none,\n\n};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[skip_serializing_none]\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct VerificationMethod {\n\n pub id: String,\n\n pub controller: String,\n\n #[serde(rename = \"type\")]\n\n pub typ: String,\n\n #[serde(with = \"encode_vec\", rename = \"publicKeyMultibase\")]\n\n pub bytes: Vec<u8>,\n\n}\n\n\n", "file_path": "idp2p-client/did/core.rs", "rank": 91, "score": 60.6679093520268 }, { "content": " pub next_key_digest: IdKeyDigest,\n\n pub change: Vec<EventLogChange>,\n\n pub timestamp: i64,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct EventLog {\n\n pub payload: EventLogPayload,\n\n #[serde(with = \"encode_vec\")]\n\n pub proof: Vec<u8>, // if recover assume recovery key\n\n}\n\n\n\nimpl EventLog {\n\n pub fn get_id(&self) -> String {\n\n generate_json_cid(self).unwrap()\n\n }\n\n\n\n pub fn verify(&self, public_data: &[u8]) -> bool {\n\n let payload_json = serde_json::to_string(&self.payload).unwrap();\n\n let bytes = payload_json.as_bytes();\n", "file_path": "idp2p-client/did/eventlog.rs", "rank": 92, "score": 59.6007489343249 }, { "content": "use idp2p_common::{\n\n encode_vec, hash, serde_json, serde_with::skip_serializing_none\n\n};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[skip_serializing_none]\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct VerificationMethod {\n\n pub id: String,\n\n pub controller: String,\n\n #[serde(rename = \"type\")]\n\n pub typ: String,\n\n #[serde(with = \"encode_vec\", rename = \"publicKeyMultibase\")]\n\n pub bytes: Vec<u8>,\n\n}\n\n\n\n#[skip_serializing_none]\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct IdDocument {\n\n #[serde(rename = \"@context\")]\n", "file_path": "idp2p-client/did/identity_doc.rs", "rank": 93, "score": 58.38228545929264 }, { "content": "use idp2p_common::{encode_vec, serde_with::skip_serializing_none};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[skip_serializing_none]\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct VerificationMethod {\n\n pub id: String,\n\n pub controller: String,\n\n #[serde(rename = \"type\")]\n\n pub typ: String,\n\n #[serde(with = \"encode_vec\", rename = \"publicKeyMultibase\")]\n\n pub bytes: Vec<u8>,\n\n}\n\n\n\n#[skip_serializing_none]\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct IdentityDocument {\n\n #[serde(rename = \"@context\")]\n\n pub context: Vec<String>,\n\n pub id: String,\n", "file_path": "idp2p-core/src/identity/doc.rs", "rank": 94, "score": 56.00373941549239 }, { "content": " controller: did.id.clone(),\n\n typ: ED25519.to_string(),\n\n bytes: secret.to_publickey().to_vec(),\n\n },\n\n };\n\n let set_agreement = EventLogChange::SetAgreementKey {\n\n verification_method: VerificationMethod {\n\n id: format!(\"{}#{}\", did.id.clone(), encode(&secret.to_key_agreement())),\n\n controller: did.id.clone(),\n\n typ: X25519.to_string(),\n\n bytes: secret.to_key_agreement().to_vec(),\n\n },\n\n };\n\n let change = vec![set_assertion.clone(), set_authentication, set_agreement];\n\n let signer = secret.to_publickey();\n\n let payload = did.microledger.create_event(&signer, &signer, change);\n\n let proof = secret.sign(&payload);\n\n did.microledger.save_event(payload, &proof);\n\n log::info!(\"Created id: {}\", did.id);\n\n did\n", "file_path": "idp2p-client/did/identity.rs", "rank": 95, "score": 55.763978494078685 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::json::did::eventlog::ProofStatement;\n\n\n\n use super::*;\n\n #[test]\n\n fn is_next_ok_test() {\n\n let secret_str = \"beilmx4d76udjmug5ykpy657qa3pfsqbcu7fbbtuk3mgrdrxssseq\";\n\n let secret = EdSecret::from_str(secret_str).unwrap();\n\n let ed_key_digest = secret.to_publickey_digest().unwrap();\n\n let mut did = Identity::new(&ed_key_digest, &ed_key_digest);\n\n let previous = did.clone();\n\n\n\n let set_proof = EventLogChange::SetProof(ProofStatement {\n\n key: vec![1],\n\n value: vec![1],\n\n });\n\n let change = vec![set_proof];\n\n let signer = secret.to_publickey();\n\n let payload = did.microledger.create_event(&signer, &signer, change);\n\n let proof = secret.sign(&payload);\n\n did.microledger.save_event(payload, &proof);\n\n let r = previous.is_next(did.clone());\n\n assert!(r.is_ok(), \"{:?}\", r);\n\n }\n\n}\n", "file_path": "idp2p-client/did/identity.rs", "rank": 96, "score": 55.65726239120932 }, { "content": " pub recovery_key_digest: Idp2pKeyDigest,\n\n pub next_key_digest: Idp2pKeyDigest,\n\n pub events: Vec<IdentityEvent>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\n#[serde(tag = \"type\")]\n\npub enum EventLogChange {\n\n Recover { digest: Idp2pKeyDigest },\n\n Events { events: Vec<IdentityEvent> },\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct EventLogPayload {\n\n pub previous: String,\n\n #[serde(with = \"encode_vec\")]\n\n pub signer_key: Vec<u8>,\n\n pub next_key_digest: Idp2pKeyDigest,\n\n pub timestamp: i64,\n\n pub change: EventLogChange,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct EventLog {\n\n pub payload: EventLogPayload,\n\n #[serde(with = \"encode_vec\")]\n\n pub proof: Vec<u8>, // if recover assume recovery key\n\n}\n", "file_path": "idp2p-client/did/json/identity.rs", "rank": 97, "score": 52.674734239672304 }, { "content": " pub recovery_key_digest: Idp2pKeyDigest,\n\n pub next_key_digest: Idp2pKeyDigest,\n\n //pub events: Vec<IdentityEvent>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\n#[serde(tag = \"type\")]\n\npub enum EventLogChange {\n\n Recover { digest: Idp2pKeyDigest },\n\n //Events { events: Vec<IdentityEvent> },\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct EventLogPayload {\n\n pub previous: String,\n\n #[serde(with = \"encode_vec\")]\n\n pub signer_key: Vec<u8>,\n\n pub next_key_digest: Idp2pKeyDigest,\n\n pub timestamp: i64,\n\n pub change: EventLogChange,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct EventLog {\n\n pub payload: EventLogPayload,\n\n #[serde(with = \"encode_vec\")]\n\n pub proof: Vec<u8>, // if recover assume recovery key\n\n}*/\n", "file_path": "idp2p-client/did/json.rs", "rank": 98, "score": 52.67473423967229 }, { "content": "use super::jpm::Jpm;\n\nuse super::jwe::Jwe;\n\nuse super::jws::Jws;\n\nuse idp2p_common::secret::EdSecret;\n\nuse idp2p_common::{anyhow::Result, chrono::Utc, encode_vec};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\npub struct IdProfile {\n\n pub name: String,\n\n #[serde(with = \"encode_vec\")]\n\n pub photo: Vec<u8>,\n\n}\n\n\n\nimpl IdProfile {\n\n pub fn new(name: &str, photo: &[u8]) -> Self {\n\n Self {\n\n name: name.to_owned(),\n\n photo: photo.to_owned(),\n\n }\n", "file_path": "idp2p-client/json/jwm.rs", "rank": 99, "score": 52.35130755224298 } ]
Rust
src/demo.rs
Tri-stone/cosmwasm
4c3f22abdc4ec6e957abe15ac58e2eefc2751fe8
#![allow(dead_code)] use crate::traits::{ReadonlyStorage, Storage}; fn len(prefix: &[u8]) -> [u8; 2] { if prefix.len() > 0xFFFF { panic!("only supports namespaces up to length 0xFFFF") } let length_bytes = (prefix.len() as u64).to_be_bytes(); [length_bytes[6], length_bytes[7]] } fn key_prefix(namespace: &[u8]) -> Vec<u8> { let mut out = Vec::with_capacity(namespace.len() + 2); out.extend_from_slice(&len(namespace)); out.extend_from_slice(namespace); out } fn key_prefix_nested(namespaces: &[&[u8]]) -> Vec<u8> { let mut size = namespaces.len(); for &namespace in namespaces { size += namespace.len() + 2; } let mut out = Vec::with_capacity(size); for &namespace in namespaces { let prefix = key_prefix(namespace); out.extend_from_slice(&prefix); } out } pub struct ReadonlyPrefixedStorage<'a, T: ReadonlyStorage> { prefix: Vec<u8>, storage: &'a T, } impl<'a, T: ReadonlyStorage> ReadonlyPrefixedStorage<'a, T> { fn new(namespace: &[u8], storage: &'a T) -> Self { ReadonlyPrefixedStorage { prefix: key_prefix(namespace), storage, } } fn multilevel(namespaces: &[&[u8]], storage: &'a T) -> Self { ReadonlyPrefixedStorage { prefix: key_prefix_nested(namespaces), storage, } } } impl<'a, T: ReadonlyStorage> ReadonlyStorage for ReadonlyPrefixedStorage<'a, T> { fn get(&self, key: &[u8]) -> Option<Vec<u8>> { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.get(&k) } } pub struct PrefixedStorage<'a, T: Storage> { prefix: Vec<u8>, storage: &'a mut T, } impl<'a, T: Storage> PrefixedStorage<'a, T> { fn new(namespace: &[u8], storage: &'a mut T) -> Self { PrefixedStorage { prefix: key_prefix(namespace), storage, } } fn multilevel(namespaces: &[&[u8]], storage: &'a mut T) -> Self { PrefixedStorage { prefix: key_prefix_nested(namespaces), storage, } } } impl<'a, T: Storage> ReadonlyStorage for PrefixedStorage<'a, T> { fn get(&self, key: &[u8]) -> Option<Vec<u8>> { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.get(&k) } } impl<'a, T: Storage> Storage for PrefixedStorage<'a, T> { fn set(&mut self, key: &[u8], value: &[u8]) { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.set(&k, value) } } #[cfg(test)] mod test { use super::*; use crate::mock::MockStorage; #[test] fn key_prefix_works() { assert_eq!(key_prefix(b""), b"\x00\x00"); assert_eq!(key_prefix(b"a"), b"\x00\x01a"); assert_eq!(key_prefix(b"ab"), b"\x00\x02ab"); assert_eq!(key_prefix(b"abc"), b"\x00\x03abc"); } #[test] fn key_prefix_works_for_long_prefix() { let long_namespace1 = vec![0; 256]; let prefix1 = key_prefix(&long_namespace1); assert_eq!(prefix1.len(), 256 + 2); assert_eq!(&prefix1[0..2], b"\x01\x00"); let long_namespace2 = vec![0; 30000]; let prefix2 = key_prefix(&long_namespace2); assert_eq!(prefix2.len(), 30000 + 2); assert_eq!(&prefix2[0..2], b"\x75\x30"); let long_namespace3 = vec![0; 0xFFFF]; let prefix3 = key_prefix(&long_namespace3); assert_eq!(prefix3.len(), 0xFFFF + 2); assert_eq!(&prefix3[0..2], b"\xFF\xFF"); } #[test] #[should_panic(expected = "only supports namespaces up to length 0xFFFF")] fn key_prefix_panics_for_too_long_prefix() { let limit = 0xFFFF; let long_namespace = vec![0; limit + 1]; key_prefix(&long_namespace); } #[test] fn key_prefix_nested_works() { assert_eq!(key_prefix_nested(&[]), b""); assert_eq!(key_prefix_nested(&[b""]), b"\x00\x00"); assert_eq!(key_prefix_nested(&[b"", b""]), b"\x00\x00\x00\x00"); assert_eq!(key_prefix_nested(&[b"a"]), b"\x00\x01a"); assert_eq!(key_prefix_nested(&[b"a", b"ab"]), b"\x00\x01a\x00\x02ab"); assert_eq!( key_prefix_nested(&[b"a", b"ab", b"abc"]), b"\x00\x01a\x00\x02ab\x00\x03abc" ); } #[test] fn prefix_safe() { let mut storage = MockStorage::new(); let mut foo = PrefixedStorage::new(b"foo", &mut storage); foo.set(b"bar", b"gotcha"); assert_eq!(Some(b"gotcha".to_vec()), foo.get(b"bar")); let rfoo = ReadonlyPrefixedStorage::new(b"foo", &storage); assert_eq!(Some(b"gotcha".to_vec()), rfoo.get(b"bar")); let fo = ReadonlyPrefixedStorage::new(b"fo", &storage); assert_eq!(None, fo.get(b"obar")); } #[test] fn multi_level() { let mut storage = MockStorage::new(); let mut foo = PrefixedStorage::new(b"foo", &mut storage); let mut bar = PrefixedStorage::new(b"bar", &mut foo); bar.set(b"baz", b"winner"); let loader = ReadonlyPrefixedStorage::multilevel(&[b"foo", b"bar"], &storage); assert_eq!(Some(b"winner".to_vec()), loader.get(b"baz")); let mut foobar = PrefixedStorage::multilevel(&[b"foo", b"bar"], &mut storage); foobar.set(b"second", b"time"); let a = ReadonlyPrefixedStorage::new(b"foo", &storage); let b = ReadonlyPrefixedStorage::new(b"bar", &a); assert_eq!(Some(b"time".to_vec()), b.get(b"second")); } }
#![allow(dead_code)] use crate::traits::{ReadonlyStorage, Storage}; fn len(prefix: &[u8]) -> [u8; 2] { if prefix.len() > 0xFFFF { panic!("only supports namespaces up to length 0xFFFF") } let length_bytes = (prefix.len() as u64).to_be_bytes(); [length_bytes[6], length_bytes[7]] } fn key_prefix(namespace: &[u8]) -> Vec<u8> { let mut out = Vec::with_capacity(namespace.len() + 2); out.extend_from_slice(&len(namespace)); out.extend_from_slice(namespace); out } fn key_prefix_nested(namespaces: &[&[u8]]) -> Vec<u8> { let mut size = namespaces.len(); for &namespace in namespaces { size += namespace.len() + 2; } let mut out = Vec::with_capacity(size); for &namespace in namespaces { let prefix = key_prefix(namespace); out.extend_from_slice(&prefix); } out } pub struct ReadonlyPrefixedStorage<'a, T: ReadonlyStorage> { prefix: Vec<u8>, storage: &'a T, } impl<'a, T: ReadonlyStorage> ReadonlyPrefixedStorage<'a, T> { fn new(namespace: &[u8], storage: &'a T) -> Self { ReadonlyPrefixedStorage { prefix: key_prefix(namespace), storage, } } fn multilevel(namespaces: &[&[u8]], storage: &'a T) -> Self { ReadonlyPrefixedStorage { prefix: key_prefix_nested(namespaces), storage, } } } impl<'a, T: ReadonlyStorage> ReadonlyStorage for ReadonlyPrefixedStorage<'a, T> { fn get(&self, key: &[u8]) -> Option<Vec<u8>> { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.get(&k) } } pub struct PrefixedStorage<'a, T: Storage> { prefix: Vec<u8>, storage: &'a mut T, } impl<'a, T: Storage> PrefixedStorage<'a, T> { fn new(namespace: &[u8], storage: &'a mut T) -> Self { PrefixedStorage { prefix: key_prefix(namespace), storage, } } fn multilevel(namespaces: &[&[u8]], storage: &'a mut T) -> Self { PrefixedStorage { prefix: key_prefix_nested(namespaces), storage, } } } impl<'a, T: Storage> ReadonlyStorage for PrefixedStorage<'a, T> { fn get(&self, key: &[u8]) -> Option<Vec<u8>> { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.get(&k) } } impl<'a, T: Storage> Storage for PrefixedStorage<'a, T> { fn set(&mut self, key: &[u8], value: &[u8]) { let mut k = self.prefix.clone(); k.extend_from_slice(key); self.storage.set(&k, value) } } #[cfg(test)] mod test { use super::*; use crate::mock::MockStorage; #[test] fn key_prefix_works() { assert_eq!(key_prefix(b""), b"\x00\x00"); assert_eq!(key_prefix(b"a"), b"\x00\x01a"); assert_eq!(key_prefix(b"ab"), b"\x00\x02ab"); assert_eq!(key_prefix(b"abc"), b"\x00\x03abc"); } #[test] fn key_prefix_works_for_long_prefix() { let long_namespace1 = vec![0; 256]; let prefix1 = key_prefix(&long_namespace1); assert_eq!(prefix1.len(), 256 + 2); assert_eq!(&prefix1[0..2], b"\x01\x00"); let long_namespace2 = vec![0; 30000]; let prefix2 = key_prefix(&long_namespace2); assert_eq!(prefix2.len(), 30000 + 2); assert_eq!(&prefix2[0..2], b"\x75\x30"); let long_namespace3 = vec![0; 0xFFFF]; let prefix3 = key_prefix(&long_namespace3); assert_eq!(prefix3.len(), 0xFFFF + 2); assert_eq!(&prefix3[0..2], b"\xFF\xFF"); } #[test] #[should_panic(expected = "only supports namespaces up to length 0xFFFF")] fn key_prefix_panics_for_too_long_prefix() { let limit = 0xFFFF; let long_namespace = vec![0; limit + 1]; key_prefix(&long_namespace); } #[test] fn key_prefix_nested_works() { assert_eq!(key_prefix_nested(&[]), b""); assert_eq!(key_prefix_nested(&[b""]), b"\x00\x00"); assert_eq!(key_prefix_nested(&[b"", b""]), b"\x00\x00\x00\x00"); assert_eq!(key_prefix_nested(&[b"a"]), b"\x00\x01a"); assert_eq!(key_prefix_nested(&[b"a", b"ab"]), b"\x00\x01a\x00\x02ab"); assert_eq!( key_prefix_nested(&[b"a", b"ab", b"abc"]), b"\x00\x01a\x00\x02ab\x00\x03abc" ); } #[test]
#[test] fn multi_level() { let mut storage = MockStorage::new(); let mut foo = PrefixedStorage::new(b"foo", &mut storage); let mut bar = PrefixedStorage::new(b"bar", &mut foo); bar.set(b"baz", b"winner"); let loader = ReadonlyPrefixedStorage::multilevel(&[b"foo", b"bar"], &storage); assert_eq!(Some(b"winner".to_vec()), loader.get(b"baz")); let mut foobar = PrefixedStorage::multilevel(&[b"foo", b"bar"], &mut storage); foobar.set(b"second", b"time"); let a = ReadonlyPrefixedStorage::new(b"foo", &storage); let b = ReadonlyPrefixedStorage::new(b"bar", &a); assert_eq!(Some(b"time".to_vec()), b.get(b"second")); } }
fn prefix_safe() { let mut storage = MockStorage::new(); let mut foo = PrefixedStorage::new(b"foo", &mut storage); foo.set(b"bar", b"gotcha"); assert_eq!(Some(b"gotcha".to_vec()), foo.get(b"bar")); let rfoo = ReadonlyPrefixedStorage::new(b"foo", &storage); assert_eq!(Some(b"gotcha".to_vec()), rfoo.get(b"bar")); let fo = ReadonlyPrefixedStorage::new(b"fo", &storage); assert_eq!(None, fo.get(b"obar")); }
function_block-full_function
[ { "content": "pub fn do_write<T: Storage>(ctx: &mut Ctx, key: u32, value: u32) {\n\n let key = read_memory(ctx, key);\n\n let value = read_memory(ctx, value);\n\n with_storage_from_context(ctx, |store: &mut T| store.set(&key, &value));\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 0, "score": 209795.11266725228 }, { "content": "/// alloc is the same as external allocate, but designed to be called internally\n\npub fn alloc(size: usize) -> *mut c_void {\n\n // allocate the space in memory\n\n let buffer = vec![0u8; size];\n\n release_buffer(buffer)\n\n}\n\n\n", "file_path": "src/memory.rs", "rank": 4, "score": 194532.0809909548 }, { "content": "/// release_buffer is like alloc, but instead of creating a new vector\n\n/// it consumes an existing one and returns a pointer to the slice\n\n/// (preventing the memory from being freed until explicitly called later)\n\npub fn release_buffer(buffer: Vec<u8>) -> *mut c_void {\n\n let slice = build_slice(&buffer);\n\n mem::forget(buffer);\n\n Box::into_raw(slice) as *mut c_void\n\n}\n\n\n\n/// consume_slice will return the data referenced by the slice and\n\n/// deallocates the slice (and the vector when finished).\n\n/// Warning: only use this when you are sure the caller will never use (or free) the slice later\n\n///\n\n/// # Safety\n\n///\n\n/// If ptr is non-nil, it must refer to a valid slice, which was previously returned by alloc,\n\n/// and not yet deallocated. This call will deallocate the Slice and return an owner vector\n\n/// to the caller containing the referenced data.\n\n///\n\n/// Naturally, calling this function twice on the same pointer will double deallocate data\n\n/// and lead to a crash. Make sure to call it exactly once (either consuming the input in\n\n/// the wasm code OR deallocating the buffer from the caller).\n\npub unsafe fn consume_slice(ptr: *mut c_void) -> Result<Vec<u8>, Error> {\n", "file_path": "src/memory.rs", "rank": 5, "score": 183603.33025715523 }, { "content": "pub fn set_gas(instance: &mut Instance, limit: u64) {\n\n let used = if limit > GAS_LIMIT {\n\n 0\n\n } else {\n\n GAS_LIMIT - limit\n\n };\n\n metering::set_points_used(instance, used)\n\n}\n\n\n", "file_path": "lib/vm/src/backends/singlepass.rs", "rank": 6, "score": 176500.7696971482 }, { "content": "pub fn mock_instance(wasm: &[u8]) -> Instance<MockStorage, MockApi> {\n\n let deps = dependencies(20);\n\n Instance::from_code(wasm, deps).unwrap()\n\n}\n\n\n", "file_path": "lib/vm/src/testing.rs", "rank": 7, "score": 175320.40982484777 }, { "content": "pub fn setup_context<T: Storage>() -> (*mut c_void, fn(*mut c_void)) {\n\n (\n\n create_unmanaged_storage::<T>(),\n\n destroy_unmanaged_storage::<T>,\n\n )\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 8, "score": 175230.74957545794 }, { "content": "pub fn with_storage_from_context<T: Storage, F: FnMut(&mut T)>(ctx: &Ctx, mut func: F) {\n\n let mut storage: Option<T> = take_storage(ctx);\n\n if let Some(data) = &mut storage {\n\n func(data);\n\n }\n\n leave_storage(ctx, storage);\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 9, "score": 168567.7771764087 }, { "content": "pub fn do_read<T: Storage>(ctx: &mut Ctx, key_ptr: u32, val_ptr: u32) -> i32 {\n\n let key = read_memory(ctx, key_ptr);\n\n let mut value: Option<Vec<u8>> = None;\n\n with_storage_from_context(ctx, |store: &mut T| value = store.get(&key));\n\n match value {\n\n Some(buf) => write_memory(ctx, val_ptr, &buf),\n\n None => 0,\n\n }\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 10, "score": 155429.31799763607 }, { "content": "pub fn set_gas(_instance: &mut Instance, _limit: u64) {}\n\n\n", "file_path": "lib/vm/src/backends/cranelift.rs", "rank": 11, "score": 153423.01922487075 }, { "content": "pub fn wasm_hash(wasm: &[u8]) -> Vec<u8> {\n\n Sha256::digest(wasm).to_vec()\n\n}\n\n\n", "file_path": "lib/vm/src/wasm_store.rs", "rank": 12, "score": 147599.6068819281 }, { "content": "/// build_slice returns a box of a slice, which can be sent over a call to extern\n\n/// note that this DOES NOT take ownership of the data, and we MUST NOT consume_slice\n\n/// the resulting data.\n\n/// The Box must be dropped (with scope), but not the data\n\npub fn build_slice(data: &[u8]) -> Box<Slice> {\n\n Box::new(Slice {\n\n offset: data.as_ptr() as u32,\n\n len: data.len() as u32,\n\n })\n\n}\n", "file_path": "src/memory.rs", "rank": 13, "score": 137912.82133203064 }, { "content": "pub fn check_api_compatibility(wasm_code: &[u8]) -> Result<()> {\n\n let mut reader = std::io::Cursor::new(wasm_code);\n\n let symbols = wasm_nm::symbols(PUBLIC_SYMBOLS.clone(), &mut reader).unwrap();\n\n if !only_imports(&symbols, SUPPORTED_IMPORTS) {\n\n return ValidationErr {\n\n msg: EXTRA_IMPORT_MSG,\n\n }\n\n .fail();\n\n }\n\n if !has_all_exports(&symbols, REQUIRED_EXPORTS) {\n\n return ValidationErr {\n\n msg: MISSING_EXPORT_MSG,\n\n }\n\n .fail();\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/vm/src/compatability.rs", "rank": 14, "score": 133920.7557552734 }, { "content": "// dependencies are all external requirements that can be injected for unit tests\n\npub fn dependencies(canonical_length: usize) -> Extern<MockStorage, MockApi> {\n\n Extern {\n\n storage: MockStorage::new(),\n\n api: MockApi::new(canonical_length),\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct MockStorage {\n\n data: HashMap<Vec<u8>, Vec<u8>>,\n\n}\n\n\n\nimpl MockStorage {\n\n pub fn new() -> Self {\n\n MockStorage {\n\n data: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/mock.rs", "rank": 15, "score": 133306.18645548075 }, { "content": "pub fn compile(code: &[u8]) -> Result<Module, Error> {\n\n compile_with(code, compiler().as_ref()).context(CompileErr {})\n\n}\n\n\n", "file_path": "lib/vm/src/backends/cranelift.rs", "rank": 16, "score": 129100.70978395647 }, { "content": "pub fn compile(code: &[u8]) -> Result<Module, Error> {\n\n compile_with(code, compiler().as_ref()).context(CompileErr {})\n\n}\n\n\n", "file_path": "lib/vm/src/backends/singlepass.rs", "rank": 17, "score": 129100.70978395647 }, { "content": "pub fn transactional<S: Storage, T>(\n\n storage: &mut S,\n\n tx: &dyn Fn(&mut StorageTransaction<S>) -> Result<T>,\n\n) -> Result<T> {\n\n let mut c = StorageTransaction::new(storage);\n\n let res = tx(&mut c)?;\n\n c.commit();\n\n Ok(res)\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 18, "score": 128985.14507970461 }, { "content": "pub fn query<S: Storage, A: Api>(deps: &Extern<S, A>, msg: QueryMsg) -> Result<Vec<u8>> {\n\n match msg {\n\n QueryMsg::Verifier {} => query_verifier(deps),\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 19, "score": 128070.63165957047 }, { "content": "fn create_unmanaged_storage<T: Storage>() -> *mut c_void {\n\n let data = ContextData::<T> { data: None };\n\n let state = Box::new(data);\n\n Box::into_raw(state) as *mut c_void\n\n}\n\n\n\nunsafe fn get_data<T: Storage>(ptr: *mut c_void) -> Box<ContextData<T>> {\n\n Box::from_raw(ptr as *mut ContextData<T>)\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 20, "score": 125494.79654506344 }, { "content": "// Expects a (fixed size) Slice struct at ptr, which is read. This links to the\n\n// memory region, which is read in the second step.\n\npub fn read_memory(ctx: &Ctx, ptr: u32) -> Vec<u8> {\n\n let slice = to_slice(ctx, ptr);\n\n let memory = ctx.memory(0);\n\n\n\n // TODO: there must be a faster way to copy memory\n\n match WasmPtr::<u8, Array>::new(slice.offset).deref(memory, 0, slice.len) {\n\n Some(cells) => {\n\n let len = slice.len as usize;\n\n let mut result = vec![0u8; len];\n\n for i in 0..len {\n\n // result[i] = unsafe { cells.get_unchecked(i).get() }\n\n // resolved to memcpy, but only if we really start copying huge arrays\n\n result[i] = cells[i].get();\n\n }\n\n result\n\n }\n\n None => panic!(\n\n \"Error dereferencing slice {:?} in wasm memory of size {}. This typically happens when the given pointer does not point to a Slice struct.\",\n\n slice,\n\n memory.size().bytes().0\n\n ),\n\n }\n\n}\n\n\n", "file_path": "lib/vm/src/memory.rs", "rank": 21, "score": 125041.43497377008 }, { "content": "pub fn load<P: Into<PathBuf>>(dir: P, id: &[u8]) -> Result<Vec<u8>, Error> {\n\n // this requires the directory and file to exist\n\n let path = dir.into().join(hex::encode(id));\n\n let mut file = File::open(path).context(IoErr {})?;\n\n\n\n let mut wasm = Vec::<u8>::new();\n\n let _ = file.read_to_end(&mut wasm).context(IoErr {})?;\n\n Ok(wasm)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::fs::create_dir_all;\n\n use tempfile::TempDir;\n\n\n\n #[test]\n\n fn save_and_load() {\n\n let tmp_dir = TempDir::new().unwrap();\n\n let path = tmp_dir.path();\n", "file_path": "lib/vm/src/wasm_store.rs", "rank": 22, "score": 124947.50818312477 }, { "content": "/// save stores the wasm code in the given directory and returns an ID for lookup.\n\n/// It will create the directory if it doesn't exist.\n\n/// If the file already exists, it will return an error.\n\npub fn save<P: Into<PathBuf>>(dir: P, wasm: &[u8]) -> Result<Vec<u8>, Error> {\n\n // calculate filename\n\n let id = wasm_hash(wasm);\n\n let filename = hex::encode(&id);\n\n let filepath = dir.into().join(&filename);\n\n\n\n // write data to file\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create_new(true)\n\n .open(filepath)\n\n .context(IoErr {})?;\n\n file.write_all(wasm).context(IoErr {})?;\n\n\n\n Ok(id)\n\n}\n\n\n", "file_path": "lib/vm/src/wasm_store.rs", "rank": 23, "score": 124947.50818312477 }, { "content": "fn destroy_unmanaged_storage<T: Storage>(ptr: *mut c_void) {\n\n if !ptr.is_null() {\n\n // auto-dropped with scope\n\n let _ = unsafe { get_data::<T>(ptr) };\n\n }\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 24, "score": 121862.28704014755 }, { "content": "pub fn transactional_deps<S: Storage, A: Api, T>(\n\n deps: &mut Extern<S, A>,\n\n tx: &dyn Fn(&mut Extern<StorageTransaction<S>, A>) -> Result<T>,\n\n) -> Result<T> {\n\n let c = StorageTransaction::new(&mut deps.storage);\n\n let mut deps = Extern {\n\n storage: c,\n\n api: deps.api,\n\n };\n\n let res = tx(&mut deps);\n\n if res.is_ok() {\n\n deps.storage.commit();\n\n } else {\n\n deps.storage.rollback();\n\n }\n\n res\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/storage.rs", "rank": 25, "score": 121538.37793114793 }, { "content": "// write_memory returns how many bytes written on success\n\n// negative result is how many bytes requested if too small\n\npub fn write_memory(ctx: &Ctx, ptr: u32, data: &[u8]) -> i32 {\n\n let slice = to_slice(ctx, ptr);\n\n if data.len() > (slice.len as usize) {\n\n return -(data.len() as i32);\n\n }\n\n if data.is_empty() {\n\n return 0;\n\n }\n\n\n\n let memory = ctx.memory(0);\n\n // TODO: there must be a faster way to copy memory\n\n let buffer = unsafe {\n\n WasmPtr::<u8, Array>::new(slice.offset)\n\n .deref_mut(memory, 0, slice.len)\n\n .unwrap()\n\n };\n\n for i in 0..data.len() {\n\n buffer[i].set(data[i])\n\n }\n\n data.len() as i32\n\n}\n\n\n", "file_path": "lib/vm/src/memory.rs", "rank": 26, "score": 121229.88930856201 }, { "content": "// init mimicks the call signature of the smart contracts.\n\n// thus it moves params and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn init<S: Storage + 'static, A: Api + 'static, T: Serialize + JsonSchema>(\n\n instance: &mut Instance<S, A>,\n\n params: Params,\n\n msg: T,\n\n) -> ContractResult {\n\n match to_vec(&msg) {\n\n Err(e) => ContractResult::Err(e.to_string()),\n\n Ok(serialized_msg) => call_init(instance, &params, &serialized_msg).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "lib/vm/src/testing.rs", "rank": 27, "score": 121203.20152006797 }, { "content": "// query mimicks the call signature of the smart contracts.\n\n// thus it moves params and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn query<S: Storage + 'static, A: Api + 'static, T: Serialize + JsonSchema>(\n\n instance: &mut Instance<S, A>,\n\n msg: T,\n\n) -> QueryResult {\n\n match to_vec(&msg) {\n\n Err(e) => QueryResult::Err(e.to_string()),\n\n Ok(serialized_msg) => call_query(instance, &serialized_msg).unwrap(),\n\n }\n\n}\n", "file_path": "lib/vm/src/testing.rs", "rank": 28, "score": 121203.20152006797 }, { "content": "// handle mimicks the call signature of the smart contracts.\n\n// thus it moves params and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn handle<S: Storage + 'static, A: Api + 'static, T: Serialize + JsonSchema>(\n\n instance: &mut Instance<S, A>,\n\n params: Params,\n\n msg: T,\n\n) -> ContractResult {\n\n match to_vec(&msg) {\n\n Err(e) => ContractResult::Err(e.to_string()),\n\n Ok(serialized_msg) => call_handle(instance, &params, &serialized_msg).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "lib/vm/src/testing.rs", "rank": 29, "score": 121203.20152006797 }, { "content": "// dependencies are all external requirements that can be injected in a real-wasm contract\n\npub fn dependencies() -> Extern<ExternalStorage, ExternalApi> {\n\n Extern {\n\n storage: ExternalStorage::new(),\n\n api: ExternalApi::new(),\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ExternalStorage {}\n\n\n\nimpl ExternalStorage {\n\n pub fn new() -> ExternalStorage {\n\n ExternalStorage {}\n\n }\n\n}\n\n\n\nimpl ReadonlyStorage for ExternalStorage {\n\n fn get(&self, key: &[u8]) -> Option<Vec<u8>> {\n\n let key = build_slice(key);\n\n let key_ptr = &*key as *const Slice as *const c_void;\n", "file_path": "src/imports.rs", "rank": 30, "score": 115647.79360088546 }, { "content": "pub fn handle<S: Storage, A: Api>(\n\n deps: &mut Extern<S, A>,\n\n params: Params,\n\n msg: HandleMsg,\n\n) -> Result<Response> {\n\n match msg {\n\n HandleMsg::Release {} => do_release(deps, params),\n\n HandleMsg::CpuLoop {} => do_cpu_loop(),\n\n HandleMsg::StorageLoop {} => do_storage_loop(deps),\n\n HandleMsg::Panic {} => do_panic(),\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 31, "score": 114201.61218987967 }, { "content": "pub fn init<S: Storage, A: Api>(\n\n deps: &mut Extern<S, A>,\n\n params: Params,\n\n msg: InitMsg,\n\n) -> Result<Response> {\n\n deps.storage.set(\n\n CONFIG_KEY,\n\n &to_vec(&State {\n\n verifier: deps.api.canonical_address(&msg.verifier)?,\n\n beneficiary: deps.api.canonical_address(&msg.beneficiary)?,\n\n funder: params.message.signer,\n\n })\n\n .context(SerializeErr { kind: \"State\" })?,\n\n );\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 32, "score": 114201.61218987967 }, { "content": "pub fn leave_storage<T: Storage>(ctx: &Ctx, storage: Option<T>) {\n\n let mut b = unsafe { get_data(ctx.data) };\n\n // clean-up if needed\n\n let _ = b.data.take();\n\n b.data = storage;\n\n mem::forget(b); // we do this to avoid cleanup\n\n}\n", "file_path": "lib/vm/src/context.rs", "rank": 33, "score": 114018.77561278648 }, { "content": "pub fn take_storage<T: Storage>(ctx: &Ctx) -> Option<T> {\n\n let mut b = unsafe { get_data(ctx.data) };\n\n let res = b.data.take();\n\n mem::forget(b); // we do this to avoid cleanup\n\n res\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 34, "score": 112251.34621389068 }, { "content": "fn do_storage_loop<S: Storage, A: Api>(deps: &mut Extern<S, A>) -> Result<Response> {\n\n let mut test_case = 0u64;\n\n loop {\n\n deps.storage\n\n .set(b\"test.key\", test_case.to_string().as_bytes());\n\n test_case += 1;\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 35, "score": 108972.60844260872 }, { "content": "pub fn do_human_address<A: Api>(api: A, ctx: &mut Ctx, canonical_ptr: u32, human_ptr: u32) -> i32 {\n\n let canon = read_memory(ctx, canonical_ptr);\n\n match api.human_address(&CanonicalAddr(canon)) {\n\n Ok(human) => {\n\n let bz = human.as_str().as_bytes();\n\n write_memory(ctx, human_ptr, bz);\n\n bz.len() as i32\n\n }\n\n Err(_) => -1,\n\n }\n\n}\n\n\n\n/** context data **/\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 36, "score": 105408.71327447426 }, { "content": "pub fn call_handle<S: Storage + 'static, A: Api + 'static>(\n\n instance: &mut Instance<S, A>,\n\n params: &Params,\n\n msg: &[u8],\n\n) -> Result<ContractResult, Error> {\n\n let params = to_vec(params).context(SerializeErr {})?;\n\n let data = call_handle_raw(instance, &params, msg)?;\n\n let res: ContractResult = from_slice(&data).context(ParseErr {})?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "lib/vm/src/calls.rs", "rank": 37, "score": 105197.03760615873 }, { "content": "pub fn call_query<S: Storage + 'static, A: Api + 'static>(\n\n instance: &mut Instance<S, A>,\n\n msg: &[u8],\n\n) -> Result<QueryResult, Error> {\n\n let data = call_query_raw(instance, msg)?;\n\n let res: QueryResult = from_slice(&data).context(ParseErr {})?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "lib/vm/src/calls.rs", "rank": 38, "score": 105197.03760615873 }, { "content": "pub fn call_init<S: Storage + 'static, A: Api + 'static>(\n\n instance: &mut Instance<S, A>,\n\n params: &Params,\n\n msg: &[u8],\n\n) -> Result<ContractResult, Error> {\n\n let params = to_vec(params).context(SerializeErr {})?;\n\n let data = call_init_raw(instance, &params, msg)?;\n\n let res: ContractResult = from_slice(&data).context(ParseErr {})?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "lib/vm/src/calls.rs", "rank": 39, "score": 105197.03760615873 }, { "content": "pub fn call_init_raw<S: Storage + 'static, A: Api + 'static>(\n\n instance: &mut Instance<S, A>,\n\n params: &[u8],\n\n msg: &[u8],\n\n) -> Result<Vec<u8>, Error> {\n\n call_raw(instance, \"init\", params, msg)\n\n}\n\n\n", "file_path": "lib/vm/src/calls.rs", "rank": 40, "score": 103260.47756865738 }, { "content": "pub fn call_handle_raw<S: Storage + 'static, A: Api + 'static>(\n\n instance: &mut Instance<S, A>,\n\n params: &[u8],\n\n msg: &[u8],\n\n) -> Result<Vec<u8>, Error> {\n\n call_raw(instance, \"handle\", params, msg)\n\n}\n\n\n", "file_path": "lib/vm/src/calls.rs", "rank": 41, "score": 103260.47756865738 }, { "content": "pub fn call_query_raw<S: Storage + 'static, A: Api + 'static>(\n\n instance: &mut Instance<S, A>,\n\n msg: &[u8],\n\n) -> Result<Vec<u8>, Error> {\n\n // we cannot resuse the call_raw functionality as it assumes a param variable... just do it inline\n\n let msg_offset = instance.allocate(msg)?;\n\n let func: Func<u32, u32> = instance.func(\"query\")?;\n\n let res_offset = func.call(msg_offset).context(RuntimeErr {})?;\n\n let data = instance.memory(res_offset);\n\n // free return value in wasm (arguments were freed in wasm code)\n\n instance.deallocate(res_offset)?;\n\n Ok(data)\n\n}\n\n\n", "file_path": "lib/vm/src/calls.rs", "rank": 42, "score": 103260.47756865738 }, { "content": "pub fn compiler_for_backend(backend: &str) -> Option<Box<dyn Compiler>> {\n\n match backend {\n\n #[cfg(any(feature = \"cranelift\", feature = \"default-cranelift\"))]\n\n \"cranelift\" => Some(cranelift::compiler()),\n\n\n\n #[cfg(any(feature = \"singlepass\", feature = \"default-singlepass\"))]\n\n \"singlepass\" => Some(singlepass::compiler()),\n\n\n\n _ => None,\n\n }\n\n}\n\n\n\n#[cfg(feature = \"default-cranelift\")]\n\npub use cranelift::{backend, compile, get_gas, set_gas};\n\n\n\n#[cfg(feature = \"default-singlepass\")]\n\npub use singlepass::{backend, compile, get_gas, set_gas};\n", "file_path": "lib/vm/src/backends/mod.rs", "rank": 43, "score": 102174.7201831749 }, { "content": "fn query_verifier<S: Storage, A: Api>(deps: &Extern<S, A>) -> Result<Vec<u8>> {\n\n let data = deps\n\n .storage\n\n .get(CONFIG_KEY)\n\n .context(NotFound { kind: \"State\" })?;\n\n let state: State = from_slice(&data).context(ParseErr { kind: \"State\" })?;\n\n let addr = deps.api.human_address(&state.verifier)?;\n\n // we just pass the address as raw bytes\n\n // these will be base64 encoded into the json we return, and parsed on the way out.\n\n // maybe we should wrap this in a struct then json encode it into a vec?\n\n // other ideas?\n\n Ok(addr.as_str().as_bytes().to_vec())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cosmwasm::mock::{dependencies, mock_params};\n\n use cosmwasm::storage::transactional_deps;\n\n // import trait to get access to read\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 44, "score": 99824.14831424455 }, { "content": "fn do_release<S: Storage, A: Api>(deps: &mut Extern<S, A>, params: Params) -> Result<Response> {\n\n let data = deps\n\n .storage\n\n .get(CONFIG_KEY)\n\n .context(NotFound { kind: \"State\" })?;\n\n let state: State = from_slice(&data).context(ParseErr { kind: \"State\" })?;\n\n\n\n if params.message.signer == state.verifier {\n\n let to_addr = deps.api.human_address(&state.beneficiary)?;\n\n let from_addr = deps.api.human_address(&params.contract.address)?;\n\n let res = Response {\n\n log: Some(format!(\"released funds to {}\", to_addr)),\n\n messages: vec![CosmosMsg::Send {\n\n from_address: from_addr,\n\n to_address: to_addr,\n\n amount: params.contract.balance.unwrap_or_default(),\n\n }],\n\n data: None,\n\n };\n\n Ok(res)\n\n } else {\n\n unauthorized()\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 45, "score": 96375.91500300483 }, { "content": "// Storage extends ReadonlyStorage to give mutable access\n\npub trait Storage: ReadonlyStorage {\n\n fn set(&mut self, key: &[u8], value: &[u8]);\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 46, "score": 91322.98099180088 }, { "content": "pub fn unauthorized<T>() -> Result<T> {\n\n Unauthorized {}.fail()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn use_invalid() {\n\n let e: Result<()> = invalid(\"demo\", \"not implemented\");\n\n match e {\n\n Err(Error::ValidationErr { field, msg, .. }) => {\n\n assert_eq!(field, \"demo\");\n\n assert_eq!(msg, \"not implemented\");\n\n }\n\n Err(e) => panic!(\"unexpected error, {:?}\", e),\n\n Ok(_) => panic!(\"invalid must return error\"),\n\n }\n\n }\n", "file_path": "src/errors.rs", "rank": 47, "score": 90660.14519510813 }, { "content": "pub fn do_canonical_address<A: Api>(\n\n api: A,\n\n ctx: &mut Ctx,\n\n human_ptr: u32,\n\n canonical_ptr: u32,\n\n) -> i32 {\n\n let human = read_memory(ctx, human_ptr);\n\n let human = match String::from_utf8(human) {\n\n Ok(human_str) => HumanAddr(human_str),\n\n Err(_) => return -2,\n\n };\n\n match api.canonical_address(&human) {\n\n Ok(canon) => {\n\n write_memory(ctx, canonical_ptr, canon.as_bytes());\n\n canon.len() as i32\n\n }\n\n Err(_) => -1,\n\n }\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 48, "score": 90566.36014801796 }, { "content": "pub fn backend() -> &'static str {\n\n \"singlepass\"\n\n}\n\n\n", "file_path": "lib/vm/src/backends/singlepass.rs", "rank": 49, "score": 88980.85832134237 }, { "content": "pub fn backend() -> &'static str {\n\n \"cranelift\"\n\n}\n\n\n", "file_path": "lib/vm/src/backends/cranelift.rs", "rank": 50, "score": 88980.85832134237 }, { "content": "/// do_query should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\npub fn do_query<T: DeserializeOwned + JsonSchema>(\n\n query_fn: &dyn Fn(&Extern<ExternalStorage, ExternalApi>, T) -> Result<Vec<u8>, Error>,\n\n msg_ptr: *mut c_void,\n\n) -> *mut c_void {\n\n match _do_query(query_fn, msg_ptr) {\n\n Ok(res) => res,\n\n Err(err) => make_query_error_c_string(err),\n\n }\n\n}\n\n\n", "file_path": "src/exports.rs", "rank": 51, "score": 87394.44012171216 }, { "content": "/// do_handle should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\npub fn do_handle<T: DeserializeOwned + JsonSchema>(\n\n handle_fn: &dyn Fn(\n\n &mut Extern<ExternalStorage, ExternalApi>,\n\n Params,\n\n T,\n\n ) -> Result<Response, Error>,\n\n params_ptr: *mut c_void,\n\n msg_ptr: *mut c_void,\n\n) -> *mut c_void {\n\n match _do_handle(handle_fn, params_ptr, msg_ptr) {\n\n Ok(res) => res,\n\n Err(err) => make_error_c_string(err),\n\n }\n\n}\n\n\n", "file_path": "src/exports.rs", "rank": 52, "score": 87394.44012171216 }, { "content": "/// do_init should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\npub fn do_init<T: DeserializeOwned + JsonSchema>(\n\n init_fn: &dyn Fn(\n\n &mut Extern<ExternalStorage, ExternalApi>,\n\n Params,\n\n T,\n\n ) -> Result<Response, Error>,\n\n params_ptr: *mut c_void,\n\n msg_ptr: *mut c_void,\n\n) -> *mut c_void {\n\n match _do_init(init_fn, params_ptr, msg_ptr) {\n\n Ok(res) => res,\n\n Err(err) => make_error_c_string(err),\n\n }\n\n}\n\n\n", "file_path": "src/exports.rs", "rank": 53, "score": 87394.44012171216 }, { "content": "struct ContextData<T: Storage> {\n\n data: Option<T>,\n\n}\n\n\n", "file_path": "lib/vm/src/context.rs", "rank": 54, "score": 86862.77169554558 }, { "content": "fn make_error_c_string<T: Display>(error: T) -> *mut c_void {\n\n let v = to_vec(&ContractResult::Err(error.to_string())).unwrap();\n\n release_buffer(v)\n\n}\n\n\n", "file_path": "src/exports.rs", "rank": 55, "score": 86752.01597123427 }, { "content": "// ReadonlyStorage is access to the contracts persistent data store\n\n//pub trait ReadonlyStorage: Clone {\n\npub trait ReadonlyStorage {\n\n fn get(&self, key: &[u8]) -> Option<Vec<u8>>;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 56, "score": 86238.0084170942 }, { "content": "pub fn compiler() -> Box<dyn Compiler> {\n\n Box::new(CraneliftCompiler::new())\n\n}\n\n\n", "file_path": "lib/vm/src/backends/cranelift.rs", "rank": 57, "score": 85895.85267121783 }, { "content": "pub fn compiler() -> Box<dyn Compiler> {\n\n let c: StreamingCompiler<SinglePassMCG, _, _, _, _> = StreamingCompiler::new(move || {\n\n let mut chain = MiddlewareChain::new();\n\n chain.push(DeterministicMiddleware::new());\n\n chain.push(metering::Metering::new(GAS_LIMIT));\n\n chain\n\n });\n\n Box::new(c)\n\n}\n\n\n", "file_path": "lib/vm/src/backends/singlepass.rs", "rank": 58, "score": 85895.85267121783 }, { "content": "fn make_query_error_c_string<T: Display>(error: T) -> *mut c_void {\n\n let v = to_vec(&QueryResult::Err(error.to_string())).unwrap();\n\n release_buffer(v)\n\n}\n", "file_path": "src/exports.rs", "rank": 59, "score": 85379.94630064521 }, { "content": "pub fn get_gas(instance: &Instance) -> u64 {\n\n let used = metering::get_points_used(instance);\n\n if used > GAS_LIMIT {\n\n 0\n\n } else {\n\n GAS_LIMIT - used\n\n }\n\n}\n", "file_path": "lib/vm/src/backends/singlepass.rs", "rank": 60, "score": 84477.22352061447 }, { "content": "pub fn get_gas(_instance: &Instance) -> u64 {\n\n FAKE_GAS_AVAILABLE\n\n}\n", "file_path": "lib/vm/src/backends/cranelift.rs", "rank": 61, "score": 84477.22352061447 }, { "content": "// just set signer, sent funds, and balance - rest given defaults\n\n// this is intended for use in testcode only\n\npub fn mock_params<T: Api, U: Into<HumanAddr>>(\n\n api: &T,\n\n signer: U,\n\n sent: &[Coin],\n\n balance: &[Coin],\n\n) -> Params {\n\n let signer = signer.into();\n\n Params {\n\n block: BlockInfo {\n\n height: 12_345,\n\n time: 1_571_797_419,\n\n chain_id: \"cosmos-testnet-14002\".to_string(),\n\n },\n\n message: MessageInfo {\n\n signer: api.canonical_address(&signer).unwrap(),\n\n sent_funds: if sent.is_empty() {\n\n None\n\n } else {\n\n Some(sent.to_vec())\n\n },\n", "file_path": "src/mock.rs", "rank": 62, "score": 83129.10067212109 }, { "content": "// coin is a shortcut constructor for a set of one denomination of coins\n\npub fn coin(amount: &str, denom: &str) -> Vec<Coin> {\n\n vec![Coin {\n\n amount: amount.to_string(),\n\n denom: denom.to_string(),\n\n }]\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::serde::{from_slice, to_vec};\n\n\n\n #[test]\n\n fn can_deser_error_result() {\n\n let fail = ContractResult::Err(\"foobar\".to_string());\n\n let bin = to_vec(&fail).expect(\"encode contract result\");\n\n println!(\"error: {}\", std::str::from_utf8(&bin).unwrap());\n\n let back: ContractResult = from_slice(&bin).expect(\"decode contract result\");\n\n assert_eq!(fail, back);\n\n }\n", "file_path": "src/types.rs", "rank": 63, "score": 81909.67964998407 }, { "content": "pub fn dyn_contract_err<T>(msg: String) -> Result<T> {\n\n DynContractErr { msg }.fail()\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 64, "score": 81707.28952848174 }, { "content": "pub fn contract_err<T>(msg: &'static str) -> Result<T> {\n\n ContractErr { msg }.fail()\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 65, "score": 80491.05049938073 }, { "content": "#[test]\n\nfn init_and_query() {\n\n let mut deps = mock_instance(WASM);\n\n\n\n let verifier = HumanAddr(String::from(\"verifies\"));\n\n let beneficiary = HumanAddr(String::from(\"benefits\"));\n\n let creator = HumanAddr(String::from(\"creator\"));\n\n let msg = InitMsg {\n\n verifier: verifier.clone(),\n\n beneficiary,\n\n };\n\n let params = mock_params(&deps.api, creator.as_str(), &coin(\"1000\", \"earth\"), &[]);\n\n let res = init(&mut deps, params, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n // now let's query\n\n let qres = query(&mut deps, QueryMsg::Verifier {}).unwrap();\n\n let returned = from_utf8(&qres).unwrap();\n\n assert_eq!(verifier.as_str(), returned);\n\n\n\n // bad query returns parse error (pass wrong type - this connection is not enforced)\n\n let qres = query(&mut deps, HandleMsg::Release {});\n\n match qres {\n\n QueryResult::Err(msg) => assert!(msg.starts_with(\"Error parsing QueryMsg:\"), msg),\n\n _ => panic!(\"Call should fail\"),\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 66, "score": 76615.04957861685 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_instance(WASM);\n\n let verifier = HumanAddr(String::from(\"verifies\"));\n\n let beneficiary = HumanAddr(String::from(\"benefits\"));\n\n let creator = HumanAddr(String::from(\"creator\"));\n\n let expected_state = State {\n\n verifier: deps.api.canonical_address(&verifier).unwrap(),\n\n beneficiary: deps.api.canonical_address(&beneficiary).unwrap(),\n\n funder: deps.api.canonical_address(&creator).unwrap(),\n\n };\n\n\n\n let msg = InitMsg {\n\n verifier,\n\n beneficiary,\n\n };\n\n let params = mock_params(&deps.api, \"creator\", &coin(\"1000\", \"earth\"), &[]);\n\n let res = init(&mut deps, params, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n // it worked, let's check the state\n\n deps.with_storage(|store| {\n\n let data = store.get(CONFIG_KEY).expect(\"no data stored\");\n\n let state: State = from_slice(&data).unwrap();\n\n assert_eq!(state, expected_state);\n\n });\n\n}\n\n\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 67, "score": 76615.04957861685 }, { "content": "#[test]\n\nfn failed_handle() {\n\n let mut deps = mock_instance(WASM);\n\n\n\n // initialize the store\n\n let verifier = HumanAddr(String::from(\"verifies\"));\n\n let beneficiary = HumanAddr(String::from(\"benefits\"));\n\n let creator = HumanAddr(String::from(\"creator\"));\n\n\n\n let init_msg = InitMsg {\n\n verifier: verifier.clone(),\n\n beneficiary: beneficiary.clone(),\n\n };\n\n let init_params = mock_params(\n\n &deps.api,\n\n creator.as_str(),\n\n &coin(\"1000\", \"earth\"),\n\n &coin(\"1000\", \"earth\"),\n\n );\n\n let init_res = init(&mut deps, init_params, init_msg).unwrap();\n\n assert_eq!(0, init_res.messages.len());\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 68, "score": 76615.04957861685 }, { "content": "#[test]\n\nfn proper_handle() {\n\n let mut deps = mock_instance(WASM);\n\n\n\n // initialize the store\n\n let verifier = HumanAddr(String::from(\"verifies\"));\n\n let beneficiary = HumanAddr(String::from(\"benefits\"));\n\n\n\n let init_msg = InitMsg {\n\n verifier: verifier.clone(),\n\n beneficiary: beneficiary.clone(),\n\n };\n\n let init_params = mock_params(\n\n &deps.api,\n\n \"creator\",\n\n &coin(\"1000\", \"earth\"),\n\n &coin(\"1000\", \"earth\"),\n\n );\n\n let init_res = init(&mut deps, init_params, init_msg).unwrap();\n\n assert_eq!(0, init_res.messages.len());\n\n\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 69, "score": 76615.04957861685 }, { "content": "#[test]\n\nfn handle_panic_and_loops() {\n\n let mut deps = mock_instance(WASM);\n\n // Gas must be set so we die early on infinite loop\n\n deps.set_gas(1_000_000);\n\n\n\n // initialize the store\n\n let verifier = HumanAddr(String::from(\"verifies\"));\n\n let beneficiary = HumanAddr(String::from(\"benefits\"));\n\n let creator = HumanAddr(String::from(\"creator\"));\n\n\n\n let init_msg = InitMsg {\n\n verifier: verifier.clone(),\n\n beneficiary: beneficiary.clone(),\n\n };\n\n let init_params = mock_params(\n\n &deps.api,\n\n creator.as_str(),\n\n &coin(\"1000\", \"earth\"),\n\n &coin(\"1000\", \"earth\"),\n\n );\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 70, "score": 74796.13015335146 }, { "content": "#[test]\n\nfn fails_on_bad_init() {\n\n let mut deps = mock_instance(WASM);\n\n let params = mock_params(&deps.api, \"creator\", &coin(\"1000\", \"earth\"), &[]);\n\n // bad init returns parse error (pass wrong type - this connection is not enforced)\n\n let res = init(&mut deps, params, HandleMsg::Release {});\n\n assert_eq!(true, res.is_err());\n\n}\n\n\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 71, "score": 74796.13015335146 }, { "content": "pub fn invalid<T>(field: &'static str, msg: &'static str) -> Result<T> {\n\n ValidationErr { field, msg }.fail()\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 72, "score": 74748.97305886429 }, { "content": "fn call_raw<S: Storage + 'static, A: Api + 'static>(\n\n instance: &mut Instance<S, A>,\n\n name: &str,\n\n params: &[u8],\n\n msg: &[u8],\n\n) -> Result<Vec<u8>, Error> {\n\n let param_offset = instance.allocate(params)?;\n\n let msg_offset = instance.allocate(msg)?;\n\n\n\n let func: Func<(u32, u32), u32> = instance.func(name)?;\n\n let res_offset = func.call(param_offset, msg_offset).context(RuntimeErr {})?;\n\n\n\n let data = instance.memory(res_offset);\n\n // free return value in wasm (arguments were freed in wasm code)\n\n instance.deallocate(res_offset)?;\n\n Ok(data)\n\n}\n", "file_path": "lib/vm/src/calls.rs", "rank": 73, "score": 63963.750658052966 }, { "content": "// Api are callbacks to system functions defined outside of the wasm modules.\n\n// This is a trait to allow Mocks in the test code.\n\n//\n\n// Currently it just supports address conversion, we could add eg. crypto functions here.\n\n// These should all be pure (stateless) functions. If you need state, you probably want\n\n// to use the Querier (TODO)\n\n//\n\n// We should consider if there is a way for modules to opt-in to only a subset of these\n\n// Api for backwards compatibility in systems that don't have them all.\n\npub trait Api: Copy + Clone {\n\n fn canonical_address(&self, human: &HumanAddr) -> Result<CanonicalAddr>;\n\n fn human_address(&self, canonical: &CanonicalAddr) -> Result<HumanAddr>;\n\n}\n", "file_path": "src/traits.rs", "rank": 74, "score": 51190.44008431054 }, { "content": "pub trait CacheExt<T: Debug> {\n\n fn convert_cache(self) -> Result<T>;\n\n}\n\n\n\nimpl<T: Debug> CacheExt<T> for Result<T, CacheError> {\n\n fn convert_cache(self) -> Result<T> {\n\n self.map_err(|err| {\n\n let msg = format!(\"{:?}\", err);\n\n // construct like this (not just Err(Error::CacheErr)) to allow backtraces\n\n let res: Result<T> = CacheErr { msg }.fail();\n\n res.unwrap_err()\n\n })\n\n }\n\n}\n", "file_path": "lib/vm/src/errors.rs", "rank": 75, "score": 48621.3361222088 }, { "content": "fn main() {\n\n let mut pwd = current_dir().unwrap();\n\n pwd.push(\"schema\");\n\n create_dir_all(&pwd).unwrap();\n\n\n\n let schema = schema_for!(Params);\n\n export_schema(&schema, &pwd, \"params.json\");\n\n\n\n let schema = schema_for!(CosmosMsg);\n\n export_schema(&schema, &pwd, \"cosmos_msg.json\");\n\n\n\n let schema = schema_for!(ContractResult);\n\n export_schema(&schema, &pwd, \"contract_result.json\");\n\n\n\n let schema = schema_for!(ContractResult);\n\n export_schema(&schema, &pwd, \"query_result.json\");\n\n}\n\n\n", "file_path": "examples/schema.rs", "rank": 76, "score": 48125.895240819234 }, { "content": "fn main() {\n\n let mut pwd = current_dir().unwrap();\n\n pwd.push(\"schema\");\n\n create_dir_all(&pwd).unwrap();\n\n\n\n let schema = schema_for!(InitMsg);\n\n export_schema(&schema, &pwd, \"init_msg.json\");\n\n\n\n let schema = schema_for!(HandleMsg);\n\n export_schema(&schema, &pwd, \"handle_msg.json\");\n\n\n\n let schema = schema_for!(QueryMsg);\n\n export_schema(&schema, &pwd, \"query_msg.json\");\n\n\n\n let schema = schema_for!(State);\n\n export_schema(&schema, &pwd, \"state.json\");\n\n}\n\n\n", "file_path": "contracts/hackatom/examples/schema.rs", "rank": 77, "score": 46066.477893591946 }, { "content": "fn do_panic() -> Result<Response> {\n\n panic!(\"This page intentionally faulted\");\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 78, "score": 42652.42115330541 }, { "content": "fn do_cpu_loop() -> Result<Response> {\n\n let mut counter = 0u64;\n\n loop {\n\n counter += 1;\n\n if counter >= 9_000_000_000 {\n\n counter = 0;\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 79, "score": 41824.60027897274 }, { "content": "fn _do_handle<T: DeserializeOwned + JsonSchema>(\n\n handle_fn: &dyn Fn(\n\n &mut Extern<ExternalStorage, ExternalApi>,\n\n Params,\n\n T,\n\n ) -> Result<Response, Error>,\n\n params_ptr: *mut c_void,\n\n msg_ptr: *mut c_void,\n\n) -> Result<*mut c_void, Error> {\n\n let params: Vec<u8> = unsafe { consume_slice(params_ptr)? };\n\n let msg: Vec<u8> = unsafe { consume_slice(msg_ptr)? };\n\n\n\n let params: Params = from_slice(&params).context(ParseErr { kind: \"Params\" })?;\n\n let msg: T = from_slice(&msg).context(ParseErr { kind: \"HandleMsg\" })?;\n\n let mut deps = dependencies();\n\n let res = handle_fn(&mut deps, params, msg)?;\n\n let json = to_vec(&ContractResult::Ok(res)).context(SerializeErr {\n\n kind: \"ContractResult\",\n\n })?;\n\n Ok(release_buffer(json))\n\n}\n\n\n", "file_path": "src/exports.rs", "rank": 80, "score": 41140.68454421091 }, { "content": "fn _do_query<T: DeserializeOwned + JsonSchema>(\n\n query_fn: &dyn Fn(&Extern<ExternalStorage, ExternalApi>, T) -> Result<Vec<u8>, Error>,\n\n msg_ptr: *mut c_void,\n\n) -> Result<*mut c_void, Error> {\n\n let msg: Vec<u8> = unsafe { consume_slice(msg_ptr)? };\n\n\n\n let msg: T = from_slice(&msg).context(ParseErr { kind: \"QueryMsg\" })?;\n\n let deps = dependencies();\n\n let res = query_fn(&deps, msg)?;\n\n let json = to_vec(&QueryResult::Ok(res)).context(SerializeErr {\n\n kind: \"QueryResult\",\n\n })?;\n\n Ok(release_buffer(json))\n\n}\n\n\n", "file_path": "src/exports.rs", "rank": 81, "score": 41140.68454421091 }, { "content": "fn _do_init<T: DeserializeOwned + JsonSchema>(\n\n init_fn: &dyn Fn(\n\n &mut Extern<ExternalStorage, ExternalApi>,\n\n Params,\n\n T,\n\n ) -> Result<Response, Error>,\n\n params_ptr: *mut c_void,\n\n msg_ptr: *mut c_void,\n\n) -> Result<*mut c_void, Error> {\n\n let params: Vec<u8> = unsafe { consume_slice(params_ptr)? };\n\n let msg: Vec<u8> = unsafe { consume_slice(msg_ptr)? };\n\n let params: Params = from_slice(&params).context(ParseErr { kind: \"Params\" })?;\n\n let msg: T = from_slice(&msg).context(ParseErr { kind: \"InitMsg\" })?;\n\n let mut deps = dependencies();\n\n let res = init_fn(&mut deps, params, msg)?;\n\n let json = to_vec(&ContractResult::Ok(res)).context(SerializeErr {\n\n kind: \"ContractResult\",\n\n })?;\n\n Ok(release_buffer(json))\n\n}\n\n\n", "file_path": "src/exports.rs", "rank": 82, "score": 41140.68454421091 }, { "content": "// to_slice reads in a ptr to slice in wasm memory and constructs the object we can use to access it\n\nfn to_slice(ctx: &Ctx, ptr: u32) -> Slice {\n\n let memory = ctx.memory(0);\n\n let wptr = WasmPtr::<Slice>::new(ptr);\n\n let cell = wptr.deref(memory).unwrap();\n\n cell.get()\n\n}\n", "file_path": "lib/vm/src/memory.rs", "rank": 83, "score": 38437.93690729338 }, { "content": "fn has_all_exports(symbols: &Symbols, required: &[&str]) -> bool {\n\n let exports: Vec<&str> = symbols\n\n .iter()\n\n .filter_map(|s| match s {\n\n Symbol::Export { name, .. } => Some(name),\n\n _ => None,\n\n })\n\n .collect();\n\n\n\n for i in required {\n\n if !exports.contains(&i) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n", "file_path": "lib/vm/src/compatability.rs", "rank": 84, "score": 38434.72142382845 }, { "content": "fn only_imports(symbols: &Symbols, allowed: &[&str]) -> bool {\n\n let imports: Vec<&str> = symbols\n\n .iter()\n\n .filter_map(|s| match s {\n\n Symbol::Import { name } => Some(name),\n\n _ => None,\n\n })\n\n .collect();\n\n\n\n for i in imports {\n\n if !allowed.contains(&i) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "lib/vm/src/compatability.rs", "rank": 85, "score": 38434.72142382845 }, { "content": "/// we explicitly whitelist the supported opcodes\n\nfn parse_wasm_opcode(opcode: &Operator) -> Result<(), CompileError> {\n\n match opcode {\n\n Operator::Unreachable\n\n | Operator::Nop\n\n | Operator::Block { .. }\n\n | Operator::Loop { .. }\n\n | Operator::If { .. }\n\n | Operator::Else\n\n | Operator::End\n\n | Operator::Br { .. }\n\n | Operator::BrIf { .. }\n\n | Operator::BrTable { .. }\n\n | Operator::Return\n\n | Operator::Call { .. }\n\n | Operator::CallIndirect { .. }\n\n | Operator::Drop\n\n | Operator::Select\n\n | Operator::LocalGet { .. }\n\n | Operator::LocalSet { .. }\n\n | Operator::LocalTee { .. }\n", "file_path": "lib/vm/src/middleware/deterministic.rs", "rank": 86, "score": 37013.685636824375 }, { "content": "// panics if any error writing out the schema\n\n// overwrites any existing schema\n\nfn export_schema(schema: &RootSchema, dir: &PathBuf, name: &str) -> () {\n\n let path = dir.join(name);\n\n let json = serde_json::to_string_pretty(schema).unwrap();\n\n write(&path, json.as_bytes()).unwrap();\n\n println!(\"{}\", path.to_str().unwrap());\n\n}\n", "file_path": "examples/schema.rs", "rank": 87, "score": 36555.25490245123 }, { "content": "// panics if any error writing out the schema\n\n// overwrites any existing schema\n\nfn export_schema(schema: &RootSchema, dir: &PathBuf, name: &str) -> () {\n\n let path = dir.join(name);\n\n let json = serde_json::to_string_pretty(schema).unwrap();\n\n write(&path, json.as_bytes()).unwrap();\n\n println!(\"{}\", path.to_str().unwrap());\n\n}\n", "file_path": "contracts/hackatom/examples/schema.rs", "rank": 88, "score": 35328.215481293206 }, { "content": " }\n\n\n\n /// rollback will consume the checkpoint and drop all changes (no really needed, going out of scope does the same, but nice for clarity)\n\n pub fn rollback(self) {}\n\n}\n\n\n\nimpl<'a, S: Storage> ReadonlyStorage for StorageTransaction<'a, S> {\n\n fn get(&self, key: &[u8]) -> Option<Vec<u8>> {\n\n match self.local_state.get(key) {\n\n Some(val) => Some(val),\n\n None => self.storage.get(key),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, S: Storage> Storage for StorageTransaction<'a, S> {\n\n fn set(&mut self, key: &[u8], value: &[u8]) {\n\n self.local_state.set(key, value);\n\n self.rep_log.push(Op::Set {\n\n key: key.to_vec(),\n\n value: value.to_vec(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 89, "score": 35216.65732562025 }, { "content": " use super::*;\n\n use crate::errors::Unauthorized;\n\n use crate::mock::MockStorage;\n\n\n\n #[test]\n\n fn commit_writes_through() {\n\n let mut base = MockStorage::new();\n\n base.set(b\"foo\", b\"bar\");\n\n\n\n let mut check = StorageTransaction::new(&mut base);\n\n assert_eq!(check.get(b\"foo\"), Some(b\"bar\".to_vec()));\n\n check.set(b\"subtx\", b\"works\");\n\n check.commit();\n\n\n\n assert_eq!(base.get(b\"subtx\"), Some(b\"works\".to_vec()));\n\n }\n\n\n\n #[test]\n\n fn rollback_has_no_effect() {\n\n let mut base = MockStorage::new();\n", "file_path": "src/storage.rs", "rank": 90, "score": 35212.793622699835 }, { "content": "use crate::errors::Result;\n\nuse crate::mock::MockStorage;\n\nuse crate::traits::{Api, Extern, ReadonlyStorage, Storage};\n\n\n\npub struct StorageTransaction<'a, S: Storage> {\n\n /// a backing storage that is only modified upon commit\n\n storage: &'a mut S,\n\n /// these are local changes not flushed to backing storage\n\n local_state: MockStorage,\n\n /// this is a list of changes to be written to backing storage upon commit\n\n rep_log: Vec<Op>,\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 91, "score": 35209.19094080003 }, { "content": " base.set(b\"foo\", b\"bar\");\n\n\n\n let mut check = StorageTransaction::new(&mut base);\n\n assert_eq!(check.get(b\"foo\"), Some(b\"bar\".to_vec()));\n\n check.set(b\"subtx\", b\"works\");\n\n check.rollback();\n\n\n\n assert_eq!(base.get(b\"subtx\"), None);\n\n }\n\n\n\n #[test]\n\n fn ignore_same_as_rollback() {\n\n let mut base = MockStorage::new();\n\n base.set(b\"foo\", b\"bar\");\n\n\n\n let mut check = StorageTransaction::new(&mut base);\n\n assert_eq!(check.get(b\"foo\"), Some(b\"bar\".to_vec()));\n\n check.set(b\"subtx\", b\"works\");\n\n\n\n assert_eq!(base.get(b\"subtx\"), None);\n", "file_path": "src/storage.rs", "rank": 92, "score": 35205.85773152871 }, { "content": " }\n\n\n\n #[test]\n\n fn transactional_works() {\n\n let mut base = MockStorage::new();\n\n base.set(b\"foo\", b\"bar\");\n\n\n\n // writes on success\n\n let res: Result<i32> = transactional(&mut base, &|store| {\n\n // ensure we can read from the backing store\n\n assert_eq!(store.get(b\"foo\"), Some(b\"bar\".to_vec()));\n\n // we write in the Ok case\n\n store.set(b\"good\", b\"one\");\n\n Ok(5)\n\n });\n\n assert_eq!(5, res.unwrap());\n\n assert_eq!(base.get(b\"good\"), Some(b\"one\".to_vec()));\n\n\n\n // rejects on error\n\n let res: Result<i32> = transactional(&mut base, &|store| {\n", "file_path": "src/storage.rs", "rank": 93, "score": 35204.30733161363 }, { "content": " // ensure we can read from the backing store\n\n assert_eq!(store.get(b\"foo\"), Some(b\"bar\".to_vec()));\n\n assert_eq!(store.get(b\"good\"), Some(b\"one\".to_vec()));\n\n // we write in the Error case\n\n store.set(b\"bad\", b\"value\");\n\n Unauthorized.fail()\n\n });\n\n assert!(res.is_err());\n\n assert_eq!(base.get(b\"bad\"), None);\n\n }\n\n}\n", "file_path": "src/storage.rs", "rank": 94, "score": 35199.36894878062 }, { "content": "enum Op {\n\n Set { key: Vec<u8>, value: Vec<u8> },\n\n}\n\n\n\nimpl<'a, S: Storage> StorageTransaction<'a, S> {\n\n pub fn new(storage: &'a mut S) -> Self {\n\n StorageTransaction {\n\n storage,\n\n local_state: MockStorage::new(),\n\n rep_log: vec![],\n\n }\n\n }\n\n\n\n /// commit will consume the checkpoint and write all changes to the underlying store\n\n pub fn commit(self) {\n\n for op in self.rep_log.iter() {\n\n match op {\n\n Op::Set { key, value } => self.storage.set(&key, &value),\n\n }\n\n }\n", "file_path": "src/storage.rs", "rank": 95, "score": 34002.87279237251 }, { "content": "You can easily convert unit tests to integration tests.\n\n1. First copy them over verbatum,\n\n2. Then change\n\n let mut deps = dependencies(20);\n\nTo\n\n let mut deps = mock_instance(WASM);\n\n3. If you access raw storage, where ever you see something like:\n\n deps.storage.get(CONFIG_KEY).expect(\"no data stored\");\n\n replace it with:\n\n deps.with_storage(|store| {\n\n let data = store.get(CONFIG_KEY).expect(\"no data stored\");\n\n //...\n\n });\n\n4. Anywhere you see query(&deps, ...) you must replace it with query(&mut deps, ...)\n\n\n\n**/\n\nstatic WASM: &[u8] = include_bytes!(\"../target/wasm32-unknown-unknown/release/hackatom.wasm\");\n\n\n\n#[test]\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 96, "score": 33559.99854863894 }, { "content": "use std::str::from_utf8;\n\n\n\nuse cosmwasm::mock::mock_params;\n\nuse cosmwasm::serde::{from_slice, to_vec};\n\nuse cosmwasm::traits::{Api, ReadonlyStorage};\n\nuse cosmwasm::types::{coin, CosmosMsg, HumanAddr, QueryResult};\n\n\n\nuse cosmwasm_vm::call_handle;\n\nuse cosmwasm_vm::testing::{handle, init, mock_instance, query};\n\n\n\nuse hackatom::contract::{HandleMsg, InitMsg, QueryMsg, State, CONFIG_KEY};\n\n\n\n/**\n\nThis integration test tries to run and call the generated wasm.\n\nIt depends on a release build being available already. You can create that with:\n\n\n\ncargo wasm && wasm-gc ./target/wasm32-unknown-unknown/release/hackatom.wasm\n\n\n\nThen running `cargo test` will validate we can properly call into that generated data.\n\n\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 97, "score": 33555.512755307536 }, { "content": "// This file has some helpers for integration tests.\n\n// They should be imported via full path to ensure there is no confusion\n\n// use cosmwasm_vm::testing::X\n\nuse serde::Serialize;\n\n// JsonSchema is a flag for types meant to be publically exposed\n\nuse schemars::JsonSchema;\n\n\n\nuse cosmwasm::mock::{dependencies, MockApi, MockStorage};\n\nuse cosmwasm::serde::to_vec;\n\nuse cosmwasm::traits::{Api, Storage};\n\nuse cosmwasm::types::{ContractResult, Params, QueryResult};\n\n\n\nuse crate::calls::{call_handle, call_init, call_query};\n\nuse crate::instance::Instance;\n\n\n", "file_path": "lib/vm/src/testing.rs", "rank": 98, "score": 33554.84146263401 }, { "content": " let init_res = init(&mut deps, init_params, init_msg).unwrap();\n\n assert_eq!(0, init_res.messages.len());\n\n\n\n // TRY PANIC\n\n let handle_params = mock_params(&deps.api, beneficiary.as_str(), &[], &coin(\"1000\", \"earth\"));\n\n // panic inside contract should not panic out here\n\n // Note: we need to use the production-call, not the testing call (which unwraps any vm error)\n\n let handle_res = call_handle(\n\n &mut deps,\n\n &handle_params,\n\n &to_vec(&HandleMsg::Panic {}).unwrap(),\n\n );\n\n assert!(handle_res.is_err());\n\n\n\n // TRY INFINITE LOOP\n\n // Note: we need to use the production-call, not the testing call (which unwraps any vm error)\n\n let handle_res = call_handle(\n\n &mut deps,\n\n &handle_params,\n\n &to_vec(&HandleMsg::CpuLoop {}).unwrap(),\n\n );\n\n assert!(handle_res.is_err());\n\n assert_eq!(deps.get_gas(), 0);\n\n}\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 99, "score": 33552.90356844629 } ]
Rust
bee-network/bee-autopeering/src/peer/mod.rs
TeeVeeEss/bee
b98bd114e763a0cebe47ac4b8055873e8009e8e6
pub(crate) mod lists; pub mod peer_id; pub mod stores; use std::{ fmt, net::{IpAddr, SocketAddr}, }; use bytes::BytesMut; use crypto::signatures::ed25519::PublicKey; use libp2p_core::{multiaddr::Protocol, Multiaddr}; use prost::{DecodeError, EncodeError, Message}; use serde::{ de::{SeqAccess, Visitor}, ser::SerializeStruct, Deserialize, Serialize, }; use self::lists::{ActivePeersList, ReplacementPeersList}; pub use self::{peer_id::PeerId, stores::PeerStore}; use crate::{ local::{ services::{ServiceMap, ServiceProtocol}, Local, }, proto, }; #[derive(Clone)] pub struct Peer { peer_id: PeerId, ip_address: IpAddr, services: ServiceMap, } impl Peer { pub fn new(address: IpAddr, public_key: PublicKey) -> Self { let peer_id = PeerId::from_public_key(public_key); Self { peer_id, ip_address: address, services: ServiceMap::default(), } } pub fn peer_id(&self) -> &PeerId { &self.peer_id } pub fn public_key(&self) -> &PublicKey { self.peer_id.public_key() } pub fn ip_address(&self) -> IpAddr { self.ip_address } pub fn port(&self, service_name: impl AsRef<str>) -> Option<u16> { self.services().get(service_name).map(|s| s.port()) } pub fn services(&self) -> &ServiceMap { &self.services } pub(crate) fn set_services(&mut self, services: ServiceMap) { self.services = services; } pub fn has_service(&self, service_name: impl AsRef<str>) -> bool { self.services.get(service_name).is_some() } pub fn add_service(&mut self, service_name: impl ToString, protocol: ServiceProtocol, port: u16) { self.services.insert(service_name.to_string(), protocol, port); } pub fn service_socketaddr(&self, service_name: impl AsRef<str>) -> Option<SocketAddr> { self.services .get(service_name) .map(|endpoint| SocketAddr::new(self.ip_address, endpoint.port())) } pub fn service_multiaddr(&self, service_name: impl AsRef<str>) -> Option<Multiaddr> { self.services.get(service_name).map(|endpoint| { let mut multiaddr = Multiaddr::empty(); match self.ip_address { IpAddr::V4(ipv4_addr) => multiaddr.push(Protocol::Ip4(ipv4_addr)), IpAddr::V6(ipv6_addr) => multiaddr.push(Protocol::Ip6(ipv6_addr)), }; multiaddr.push(endpoint.to_libp2p_protocol()); multiaddr }) } pub fn from_protobuf(bytes: &[u8]) -> Result<Self, Error> { proto::Peer::decode(bytes)?.try_into() } pub fn to_protobuf(&self) -> Result<BytesMut, EncodeError> { let services: proto::ServiceMap = self.services().into(); let peer = proto::Peer { ip: self.ip_address.to_string(), public_key: self.public_key().as_ref().to_vec(), services: Some(services), }; let mut buf = BytesMut::with_capacity(peer.encoded_len()); peer.encode(&mut buf)?; Ok(buf) } pub(crate) fn into_id(self) -> PeerId { self.peer_id } } #[cfg(any(feature = "rocksdb1", feature = "sled1"))] impl Peer { pub(crate) fn to_bytes(&self) -> Vec<u8> { bincode::serialize(self).expect("serialization error") } pub(crate) fn from_bytes<B: AsRef<[u8]>>(bytes: B) -> Self { bincode::deserialize(bytes.as_ref()).expect("deserialization error") } } impl fmt::Debug for Peer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Peer") .field("peer_id", &self.peer_id.to_string()) .field("public_key", &bs58::encode(self.public_key().as_ref()).into_string()) .field("ip_address", &self.ip_address) .field("services", &self.services.to_string()) .finish() } } impl TryFrom<proto::Peer> for Peer { type Error = Error; fn try_from(peer: proto::Peer) -> Result<Self, Self::Error> { let proto::Peer { public_key, ip, services, } = peer; let ip_address: IpAddr = ip.parse().map_err(|_| Error::ParseIpAddr)?; let public_key = PublicKey::try_from_bytes(public_key.try_into().map_err(|_| Error::PublicKeyBytes)?) .map_err(|_| Error::PublicKeyBytes)?; let peer_id = PeerId::from_public_key(public_key); let services: ServiceMap = services.ok_or(Error::MissingServices)?.try_into()?; Ok(Self { peer_id, ip_address, services, }) } } impl From<&Peer> for proto::Peer { fn from(peer: &Peer) -> Self { Self { ip: peer.ip_address().to_string(), public_key: peer.public_key().as_ref().to_vec(), services: Some(peer.services().into()), } } } impl AsRef<Peer> for Peer { fn as_ref(&self) -> &Self { self } } impl AsRef<PeerId> for Peer { fn as_ref(&self) -> &PeerId { self.peer_id() } } #[cfg(feature = "sled")] impl From<Peer> for sled::IVec { fn from(peer: Peer) -> Self { peer.to_bytes().into() } } #[cfg(feature = "sled")] impl From<sled::IVec> for Peer { fn from(bytes: sled::IVec) -> Self { Peer::from_bytes(bytes) } } impl<'de> Deserialize<'de> for Peer { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { deserializer.deserialize_struct("Peer", &["peer_id", "ip_address", "services"], PeerVisitor {}) } } impl Serialize for Peer { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { let mut this = serializer.serialize_struct("Peer", 3)?; this.serialize_field("peer_id", &self.peer_id)?; this.serialize_field("ip_address", &self.ip_address)?; this.serialize_field("services", &self.services)?; this.end() } } struct PeerVisitor {} impl<'de> Visitor<'de> for PeerVisitor { type Value = Peer; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("'Peer'") } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: SeqAccess<'de>, { let peer_id = seq .next_element::<PeerId>()? .ok_or_else(|| serde::de::Error::invalid_length(0, &self))?; let ip_address = seq .next_element::<IpAddr>()? .ok_or_else(|| serde::de::Error::invalid_length(1, &self))?; let services = seq .next_element::<ServiceMap>()? .ok_or_else(|| serde::de::Error::invalid_length(2, &self))?; Ok(Peer { peer_id, ip_address, services, }) } } pub(crate) fn is_known( peer_id: &PeerId, local: &Local, active_peers: &ActivePeersList, replacements: &ReplacementPeersList, ) -> bool { peer_id == &local.peer_id() || active_peers.read().contains(peer_id) || replacements.read().contains(peer_id) } pub(crate) fn is_verified(peer_id: &PeerId, active_peers: &ActivePeersList) -> bool { active_peers .read() .find(peer_id) .map_or(false, |e| e.metrics().is_verified()) } pub(crate) fn set_front_and_update(peer_id: &PeerId, active_peers: &ActivePeersList) -> Option<usize> { if let Some(p) = active_peers.write().set_newest_and_get_mut(peer_id) { let metrics = p.metrics_mut(); metrics.set_last_verif_response_timestamp(); let new_count = metrics.increment_verified_count(); Some(new_count) } else { None } } #[derive(Debug, thiserror::Error)] pub enum Error { #[error("parsing peer ip address failed")] ParseIpAddr, #[error("peer services missing")] MissingServices, #[error("invalid service description")] Service(#[from] crate::local::services::Error), #[error("invalid public key bytes")] PublicKeyBytes, #[error("{0}")] ProtobufDecode(#[from] DecodeError), #[error("{0}")] ProtobufEncode(#[from] EncodeError), } #[cfg(test)] mod tests { use crypto::signatures::ed25519::SecretKey as PrivateKey; use super::*; use crate::local::services::AUTOPEERING_SERVICE_NAME; impl Peer { pub(crate) fn new_test_peer(index: u8) -> Self { let mut services = ServiceMap::default(); services.insert(AUTOPEERING_SERVICE_NAME, ServiceProtocol::Udp, 1337); let public_key = PrivateKey::generate().unwrap().public_key(); let peer_id = PeerId::from_public_key(public_key); Self { peer_id, ip_address: format!("127.0.0.{}", index).parse().unwrap(), services, } } pub(crate) fn num_services(&self) -> usize { self.services().len() } } }
pub(crate) mod lists; pub mod peer_id; pub mod stores; use std::{ fmt, net::{IpAddr, SocketAddr}, }; use bytes::BytesMut; use crypto::signatures::ed25519::PublicKey; use libp2p_core::{multiaddr::Protocol, Multiaddr}; use prost::{DecodeError, EncodeError, Message}; use serde::{ de::{SeqAccess, Visitor}, ser::SerializeStruct, Deserialize, Serialize, }; use self::lists::{ActivePeersList, ReplacementPeersList}; pub use self::{peer_id::PeerId, stores::PeerStore}; use crate::{ local::{ services::{ServiceMap, ServiceProtocol}, Local, }, proto, }; #[derive(Clone)] pub struct Peer { peer_id: PeerId, ip_address: IpAddr, services: ServiceMap, } impl Peer { pub fn new(address: IpAddr, public_key: PublicKey) -> Self { let peer_id = PeerId::from_public_key(public_key); Self { peer_id, ip_address: address, services: ServiceMap::default(), } } pub fn peer_id(&self) -> &PeerId { &self.peer_id } pub fn public_key(&self) -> &PublicKey { self.peer_id.public_key() } pub fn ip_address(&self) -> IpAddr { self.ip_address } pub fn port(&self, service_name: impl AsRef<str>) -> Option<u16> { self.services().get(service_name).map(|s| s.port()) } pub fn services(&self) -> &ServiceMap { &self.services } pub(crate) fn set_services(&mut self, services: ServiceMap) { self.services = services; } pub fn has_service(&self, service_name: impl AsRef<str>) -> bool { self.services.get(service_name).is_some() } pub fn add_service(&mut self, service_name: impl ToString, protocol: ServiceProtocol, port: u16) { self.services.insert(service_name.to_string(), protocol, port); }
ip, services, } = peer; let ip_address: IpAddr = ip.parse().map_err(|_| Error::ParseIpAddr)?; let public_key = PublicKey::try_from_bytes(public_key.try_into().map_err(|_| Error::PublicKeyBytes)?) .map_err(|_| Error::PublicKeyBytes)?; let peer_id = PeerId::from_public_key(public_key); let services: ServiceMap = services.ok_or(Error::MissingServices)?.try_into()?; Ok(Self { peer_id, ip_address, services, }) } } impl From<&Peer> for proto::Peer { fn from(peer: &Peer) -> Self { Self { ip: peer.ip_address().to_string(), public_key: peer.public_key().as_ref().to_vec(), services: Some(peer.services().into()), } } } impl AsRef<Peer> for Peer { fn as_ref(&self) -> &Self { self } } impl AsRef<PeerId> for Peer { fn as_ref(&self) -> &PeerId { self.peer_id() } } #[cfg(feature = "sled")] impl From<Peer> for sled::IVec { fn from(peer: Peer) -> Self { peer.to_bytes().into() } } #[cfg(feature = "sled")] impl From<sled::IVec> for Peer { fn from(bytes: sled::IVec) -> Self { Peer::from_bytes(bytes) } } impl<'de> Deserialize<'de> for Peer { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { deserializer.deserialize_struct("Peer", &["peer_id", "ip_address", "services"], PeerVisitor {}) } } impl Serialize for Peer { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { let mut this = serializer.serialize_struct("Peer", 3)?; this.serialize_field("peer_id", &self.peer_id)?; this.serialize_field("ip_address", &self.ip_address)?; this.serialize_field("services", &self.services)?; this.end() } } struct PeerVisitor {} impl<'de> Visitor<'de> for PeerVisitor { type Value = Peer; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("'Peer'") } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: SeqAccess<'de>, { let peer_id = seq .next_element::<PeerId>()? .ok_or_else(|| serde::de::Error::invalid_length(0, &self))?; let ip_address = seq .next_element::<IpAddr>()? .ok_or_else(|| serde::de::Error::invalid_length(1, &self))?; let services = seq .next_element::<ServiceMap>()? .ok_or_else(|| serde::de::Error::invalid_length(2, &self))?; Ok(Peer { peer_id, ip_address, services, }) } } pub(crate) fn is_known( peer_id: &PeerId, local: &Local, active_peers: &ActivePeersList, replacements: &ReplacementPeersList, ) -> bool { peer_id == &local.peer_id() || active_peers.read().contains(peer_id) || replacements.read().contains(peer_id) } pub(crate) fn is_verified(peer_id: &PeerId, active_peers: &ActivePeersList) -> bool { active_peers .read() .find(peer_id) .map_or(false, |e| e.metrics().is_verified()) } pub(crate) fn set_front_and_update(peer_id: &PeerId, active_peers: &ActivePeersList) -> Option<usize> { if let Some(p) = active_peers.write().set_newest_and_get_mut(peer_id) { let metrics = p.metrics_mut(); metrics.set_last_verif_response_timestamp(); let new_count = metrics.increment_verified_count(); Some(new_count) } else { None } } #[derive(Debug, thiserror::Error)] pub enum Error { #[error("parsing peer ip address failed")] ParseIpAddr, #[error("peer services missing")] MissingServices, #[error("invalid service description")] Service(#[from] crate::local::services::Error), #[error("invalid public key bytes")] PublicKeyBytes, #[error("{0}")] ProtobufDecode(#[from] DecodeError), #[error("{0}")] ProtobufEncode(#[from] EncodeError), } #[cfg(test)] mod tests { use crypto::signatures::ed25519::SecretKey as PrivateKey; use super::*; use crate::local::services::AUTOPEERING_SERVICE_NAME; impl Peer { pub(crate) fn new_test_peer(index: u8) -> Self { let mut services = ServiceMap::default(); services.insert(AUTOPEERING_SERVICE_NAME, ServiceProtocol::Udp, 1337); let public_key = PrivateKey::generate().unwrap().public_key(); let peer_id = PeerId::from_public_key(public_key); Self { peer_id, ip_address: format!("127.0.0.{}", index).parse().unwrap(), services, } } pub(crate) fn num_services(&self) -> usize { self.services().len() } } }
pub fn service_socketaddr(&self, service_name: impl AsRef<str>) -> Option<SocketAddr> { self.services .get(service_name) .map(|endpoint| SocketAddr::new(self.ip_address, endpoint.port())) } pub fn service_multiaddr(&self, service_name: impl AsRef<str>) -> Option<Multiaddr> { self.services.get(service_name).map(|endpoint| { let mut multiaddr = Multiaddr::empty(); match self.ip_address { IpAddr::V4(ipv4_addr) => multiaddr.push(Protocol::Ip4(ipv4_addr)), IpAddr::V6(ipv6_addr) => multiaddr.push(Protocol::Ip6(ipv6_addr)), }; multiaddr.push(endpoint.to_libp2p_protocol()); multiaddr }) } pub fn from_protobuf(bytes: &[u8]) -> Result<Self, Error> { proto::Peer::decode(bytes)?.try_into() } pub fn to_protobuf(&self) -> Result<BytesMut, EncodeError> { let services: proto::ServiceMap = self.services().into(); let peer = proto::Peer { ip: self.ip_address.to_string(), public_key: self.public_key().as_ref().to_vec(), services: Some(services), }; let mut buf = BytesMut::with_capacity(peer.encoded_len()); peer.encode(&mut buf)?; Ok(buf) } pub(crate) fn into_id(self) -> PeerId { self.peer_id } } #[cfg(any(feature = "rocksdb1", feature = "sled1"))] impl Peer { pub(crate) fn to_bytes(&self) -> Vec<u8> { bincode::serialize(self).expect("serialization error") } pub(crate) fn from_bytes<B: AsRef<[u8]>>(bytes: B) -> Self { bincode::deserialize(bytes.as_ref()).expect("deserialization error") } } impl fmt::Debug for Peer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Peer") .field("peer_id", &self.peer_id.to_string()) .field("public_key", &bs58::encode(self.public_key().as_ref()).into_string()) .field("ip_address", &self.ip_address) .field("services", &self.services.to_string()) .finish() } } impl TryFrom<proto::Peer> for Peer { type Error = Error; fn try_from(peer: proto::Peer) -> Result<Self, Self::Error> { let proto::Peer { public_key,
random
[ { "content": "pub fn get_network_config_with_port(port: u16) -> NetworkConfig {\n\n let mut config = NetworkConfig::default();\n\n config.replace_port(Protocol::Tcp(port)).unwrap();\n\n config\n\n}\n\n\n", "file_path": "bee-network/bee-gossip/src/tests/common/network_config.rs", "rank": 0, "score": 330824.6455571034 }, { "content": "pub fn gen_deterministic_peer_id(gen: impl ToString) -> PeerId {\n\n let keys = gen_deterministic_keys(gen);\n\n PeerId::from_public_key(&PublicKey::Ed25519(keys.public()))\n\n}\n\n\n", "file_path": "bee-network/bee-gossip/examples/common/keys_and_ids.rs", "rank": 1, "score": 323527.1621208399 }, { "content": "/// Unpacks an optional payload from a reader.\n\npub fn option_payload_unpack<R: Read + ?Sized, const CHECK: bool>(\n\n reader: &mut R,\n\n) -> Result<(usize, Option<Payload>), Error> {\n\n let payload_len = u32::unpack_inner::<R, CHECK>(reader)? as usize;\n\n\n\n if payload_len > 0 {\n\n let payload = Payload::unpack_inner::<R, CHECK>(reader)?;\n\n if payload_len != payload.packed_len() {\n\n Err(Error::InvalidPayloadLength(payload_len, payload.packed_len()))\n\n } else {\n\n Ok((payload_len, Some(payload)))\n\n }\n\n } else {\n\n Ok((0, None))\n\n }\n\n}\n", "file_path": "bee-message/src/payload/mod.rs", "rank": 2, "score": 322721.7441755992 }, { "content": "/// Creates the corresponding `libp2p_core::PeerId` from a crypto.rs ED25519 public key.\n\npub fn libp2p_peer_id(public_key: &PublicKey) -> libp2p_core::PeerId {\n\n libp2p_core::PeerId::from_public_key(&libp2p_public_key(public_key))\n\n}\n\n\n", "file_path": "bee-network/bee-autopeering/src/peer/peer_id.rs", "rank": 3, "score": 318809.08821024525 }, { "content": "/// Creates the corresponding `libp2p_core::PublicKey` from a crypto.rs ED25519 public key.\n\npub fn libp2p_public_key(public_key: &PublicKey) -> libp2p_core::PublicKey {\n\n libp2p_core::PublicKey::Ed25519(\n\n libp2p_core::identity::ed25519::PublicKey::decode(public_key.as_ref())\n\n .expect(\"error decoding ed25519 public key from bytes\"),\n\n )\n\n}\n\n\n\nimpl Eq for PeerId {}\n\nimpl PartialEq for PeerId {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.id_bytes == other.id_bytes\n\n }\n\n}\n\n\n\nimpl Hash for PeerId {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.id_bytes.hash(state);\n\n }\n\n}\n\n\n", "file_path": "bee-network/bee-autopeering/src/peer/peer_id.rs", "rank": 4, "score": 285364.948125809 }, { "content": "/// Generates a random boolean.\n\npub fn rand_bool() -> bool {\n\n rand::thread_rng().gen::<bool>()\n\n}\n", "file_path": "bee-test/src/rand/bool.rs", "rank": 5, "score": 281705.20252209733 }, { "content": "/// Generates a random address.\n\npub fn rand_address() -> Address {\n\n #[allow(clippy::modulo_one)]\n\n Address::from(match rand_number::<u64>() % 1 {\n\n 0 => rand_ed25519_address(),\n\n _ => unreachable!(),\n\n })\n\n}\n", "file_path": "bee-test/src/rand/address.rs", "rank": 6, "score": 281649.0711918762 }, { "content": "pub fn init<N: Node>(\n\n config: config::ProtocolConfig,\n\n network_id: (String, u64),\n\n network_events: NetworkEventRx,\n\n autopeering_events: Option<AutopeeringEventRx>,\n\n node_builder: N::Builder,\n\n) -> N::Builder\n\nwhere\n\n N::Backend: storage::StorageBackend,\n\n{\n\n node_builder\n\n .with_worker::<MetricsWorker>()\n\n .with_worker::<PeerManagerResWorker>()\n\n .with_worker_cfg::<PeerManagerWorker>(PeerManagerConfig {\n\n network_rx: network_events,\n\n peering_rx: autopeering_events,\n\n network_name: network_id.0,\n\n })\n\n .with_worker_cfg::<HasherWorker>(config.clone())\n\n .with_worker_cfg::<ProcessorWorker>(network_id.1)\n", "file_path": "bee-protocol/src/workers/mod.rs", "rank": 7, "score": 272408.1082505589 }, { "content": "pub fn is_healthy<B: StorageBackend>(tangle: &Tangle<B>, peer_manager: &PeerManager) -> bool {\n\n if !tangle.is_confirmed_threshold(HEALTH_CONFIRMED_THRESHOLD) {\n\n return false;\n\n }\n\n\n\n if peer_manager.connected_peers() == 0 {\n\n return false;\n\n }\n\n\n\n match tangle.get_milestone(tangle.get_latest_milestone_index()) {\n\n Some(milestone) => {\n\n (SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Clock may have gone backwards\")\n\n .as_secs() as u64)\n\n .saturating_sub(milestone.timestamp())\n\n <= HEALTH_MILESTONE_AGE_MAX\n\n }\n\n None => false,\n\n }\n\n}\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/health.rs", "rank": 9, "score": 265294.59373119246 }, { "content": "pub fn gen_random_peer_id() -> PeerId {\n\n PeerId::from_public_key(&libp2p_core::PublicKey::Ed25519(Keypair::generate().public()))\n\n}\n\n\n", "file_path": "bee-network/bee-gossip/examples/common/keys_and_ids.rs", "rank": 10, "score": 263789.74533127213 }, { "content": "struct ActivePeerVisitor {}\n\n\n\nimpl<'de> Visitor<'de> for ActivePeerVisitor {\n\n type Value = ActivePeer;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"'ActivePeer'\")\n\n }\n\n\n\n fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: SeqAccess<'de>,\n\n {\n\n let peer = seq\n\n .next_element::<Peer>()?\n\n .ok_or_else(|| serde::de::Error::invalid_length(0, &self))?;\n\n\n\n let metrics = seq\n\n .next_element::<PeerMetrics>()?\n\n .ok_or_else(|| serde::de::Error::invalid_length(1, &self))?;\n", "file_path": "bee-network/bee-autopeering/src/peer/lists.rs", "rank": 11, "score": 263170.2132122743 }, { "content": "fn split_multiaddr(multiaddr: &str) -> Result<(Multiaddr, PeerId), Error> {\n\n let mut multiaddr: Multiaddr = multiaddr\n\n .parse()\n\n .map_err(|_| Error::ParsingFailed(multiaddr.to_string()))?;\n\n\n\n if let Protocol::P2p(multihash) = multiaddr.pop().ok_or(Error::MultiaddrUnderspecified)? {\n\n Ok((\n\n multiaddr,\n\n PeerId::from_multihash(multihash).expect(\"Invalid peer Multiaddr: Make sure your peer's Id is complete.\"),\n\n ))\n\n } else {\n\n Err(Error::MissingP2pProtocol)\n\n }\n\n}\n\n\n\n#[derive(Deserialize, PartialEq)]\n\n#[must_use]\n\npub struct PeerBuilder {\n\n #[serde(alias = \"address\")]\n\n multiaddr: String,\n", "file_path": "bee-network/bee-gossip/src/config.rs", "rank": 12, "score": 262906.491163257 }, { "content": "pub fn gen_constant_peer_id() -> PeerId {\n\n \"12D3KooWJWEKvSFbben74C7H4YtKjhPMTDxd7gP7zxWSUEeF27st\".parse().unwrap()\n\n}\n\n\n\n#[cfg(feature = \"full\")]\n\nmod full {\n\n\n\n use std::iter::repeat;\n\n\n\n use libp2p_core::identity::{\n\n ed25519::{Keypair, SecretKey},\n\n PublicKey,\n\n };\n\n\n\n use super::*;\n\n\n\n pub fn gen_random_peer_id() -> PeerId {\n\n PeerId::from_public_key(&libp2p_core::PublicKey::Ed25519(Keypair::generate().public()))\n\n }\n\n\n", "file_path": "bee-network/bee-gossip/src/tests/common/keys_and_ids.rs", "rank": 13, "score": 261938.71844171014 }, { "content": "/// Prints the Bee banner and the commit version to stdout.\n\npub fn print_banner_and_version(print_banner: bool) {\n\n let version = if BEE_GIT_COMMIT.is_empty() {\n\n BEE_VERSION.to_owned()\n\n } else {\n\n BEE_VERSION.to_owned() + \"-\" + &BEE_GIT_COMMIT[0..7]\n\n };\n\n if print_banner {\n\n println!(\n\n \"\n\n██████╗ ███████╗███████╗\n\n██╔══██╗██╔════╝██╔════╝\n\n██████╦╝█████╗ █████╗\n\n██╔══██╗██╔══╝ ██╔══╝\n\n██████╦╝███████╗███████╗\n\n╚═════╝ ╚══════╝╚══════╝\n\n{: ^24}\\n\",\n\n version\n\n );\n\n } else {\n\n println!(\"{}\", version);\n\n }\n\n}\n", "file_path": "bee-node/src/util.rs", "rank": 14, "score": 254690.92626727826 }, { "content": "/// Mandatory functionality of any peer store.\n\npub trait PeerStore: Clone + Send + Sync {\n\n /// The peer store configuration.\n\n type Config;\n\n\n\n /// Error raised when a peer store operation fails.\n\n type Error: Error + Send;\n\n\n\n /// Creates a new peer store from config.\n\n fn new(config: Self::Config) -> Result<Self, Self::Error>;\n\n\n\n /// Stores an active peer.\n\n fn store_active(&self, peer: ActivePeer) -> Result<(), Self::Error>;\n\n\n\n /// Stores all current active peers.\n\n fn store_all_active(&self, peers: &ActivePeersList) -> Result<(), Self::Error>;\n\n\n\n /// Stores a replacement peer.\n\n fn store_replacement(&self, peer: Peer) -> Result<(), Self::Error>;\n\n\n\n /// Stores all current replacement peers.\n", "file_path": "bee-network/bee-autopeering/src/peer/stores/mod.rs", "rank": 15, "score": 253534.15838719366 }, { "content": "/// Returns the packed length of an optional payload.\n\npub fn option_payload_packed_len(payload: Option<&Payload>) -> usize {\n\n 0u32.packed_len() + payload.map_or(0, Packable::packed_len)\n\n}\n\n\n", "file_path": "bee-message/src/payload/mod.rs", "rank": 16, "score": 250774.73966596817 }, { "content": "fn handle_peering_dropped(peer_id: bee_autopeering::PeerId, gossip_command_tx: &NetworkCommandSender) {\n\n let peer_id = peer_id.libp2p_peer_id();\n\n\n\n // Panic: sending commands cannot fail: same explanation as in other sender usages.\n\n gossip_command_tx\n\n .send(Command::RemovePeer { peer_id })\n\n .expect(\"send command to gossip layer\");\n\n}\n", "file_path": "bee-protocol/src/workers/peer/manager.rs", "rank": 17, "score": 249041.63673119422 }, { "content": "/// Not exactly fuzzing, just doing something a lot\n\npub fn fuzz(n: usize, mut f: impl FnMut()) {\n\n (0..n).for_each(|_| f());\n\n}\n", "file_path": "bee-test/src/ternary.rs", "rank": 18, "score": 248395.90897507974 }, { "content": "#[test]\n\nfn debug_impl() {\n\n assert_eq!(\n\n format!(\"{:?}\", Ed25519Address::from_str(ED25519_ADDRESS).unwrap()),\n\n \"Ed25519Address(52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c649)\"\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/ed25519_address.rs", "rank": 19, "score": 247534.1966166044 }, { "content": "fn format_autopeering_multiaddr(host_multiaddr: &Multiaddr, public_key: &PublicKey) -> String {\n\n format!(\n\n \"{}/{}/{}\",\n\n host_multiaddr,\n\n AUTOPEERING_MULTIADDR_PROTOCOL_NAME,\n\n pubkey_to_base58(public_key),\n\n )\n\n}\n\n\n\nimpl FromStr for AutopeeringMultiaddr {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let parts = s\n\n .split_terminator(&format!(\"/{}/\", AUTOPEERING_MULTIADDR_PROTOCOL_NAME))\n\n .collect::<Vec<&str>>();\n\n\n\n if parts.len() != 2 {\n\n return Err(Error::AutopeeringMultiaddr);\n\n }\n", "file_path": "bee-network/bee-autopeering/src/multiaddr.rs", "rank": 20, "score": 244356.1609788743 }, { "content": "fn process(\n\n message_id: MessageId,\n\n message: Message,\n\n transaction_payload_worker: &mpsc::UnboundedSender<TransactionPayloadWorkerEvent>,\n\n milestone_payload_worker: &mpsc::UnboundedSender<MilestonePayloadWorkerEvent>,\n\n indexation_payload_worker: &mpsc::UnboundedSender<IndexationPayloadWorkerEvent>,\n\n) {\n\n match message.payload() {\n\n Some(Payload::Transaction(_)) => {\n\n if transaction_payload_worker\n\n .send(TransactionPayloadWorkerEvent { message_id, message })\n\n .is_err()\n\n {\n\n error!(\"Sending message {} to transaction payload worker failed.\", message_id);\n\n }\n\n }\n\n Some(Payload::Milestone(_)) => {\n\n if milestone_payload_worker\n\n .send(MilestonePayloadWorkerEvent { message_id, message })\n\n .is_err()\n", "file_path": "bee-protocol/src/workers/message/payload/mod.rs", "rank": 21, "score": 243013.3211492854 }, { "content": "/// Generates a random message.\n\npub fn rand_message() -> Message {\n\n rand_message_with_parents(rand_parents())\n\n}\n", "file_path": "bee-test/src/rand/message.rs", "rank": 22, "score": 239663.1465919581 }, { "content": "/// Generates a random ED25519 address.\n\npub fn rand_ed25519_address() -> Ed25519Address {\n\n Ed25519Address::new(rand_bytes_32())\n\n}\n\n\n", "file_path": "bee-test/src/rand/address.rs", "rank": 23, "score": 237270.27701568435 }, { "content": "/// Generates a random message id.\n\npub fn rand_message_id() -> MessageId {\n\n MessageId::new(rand_bytes_32())\n\n}\n\n\n", "file_path": "bee-test/src/rand/message.rs", "rank": 24, "score": 235660.1641119391 }, { "content": "pub fn get_in_memory_network_config(port: u64) -> NetworkConfig {\n\n NetworkConfig::build_in_memory()\n\n .with_bind_multiaddr({\n\n let mut m = Multiaddr::empty();\n\n m.push(Protocol::Memory(port));\n\n m\n\n })\n\n .finish()\n\n}\n", "file_path": "bee-network/bee-gossip/src/tests/common/network_config.rs", "rank": 25, "score": 235293.3440302853 }, { "content": "pub fn gen_deterministic_keys(gen: impl ToString) -> Keypair {\n\n let gen = gen.to_string();\n\n\n\n let div = 64 / gen.len();\n\n let rem = 64 % gen.len();\n\n\n\n let identity_sk = repeat(gen.clone())\n\n .take(div)\n\n .chain(gen.chars().map(Into::into).take(rem))\n\n .collect::<String>();\n\n\n\n // Panic:\n\n // The input consists only of valid hex chars and the length for the secret key\n\n // is also correct. Hence, the `unwrap`s are fine.\n\n let mut hex_sk = hex::decode(identity_sk).expect(\"invalid generated secret key\");\n\n let sk = SecretKey::from_bytes(&mut hex_sk).unwrap();\n\n sk.into()\n\n}\n\n\n", "file_path": "bee-network/bee-gossip/examples/common/keys_and_ids.rs", "rank": 26, "score": 234206.4537275128 }, { "content": "/// Generates a random unrefere,ced message.\n\npub fn rand_unreferenced_message() -> UnreferencedMessage {\n\n rand_message_id().into()\n\n}\n", "file_path": "bee-test/src/rand/unreferenced_message.rs", "rank": 27, "score": 233736.26026360973 }, { "content": "fn verify_signature(address: &Address, unlock_blocks: &UnlockBlocks, index: usize, essence_hash: &[u8; 32]) -> bool {\n\n if let Some(UnlockBlock::Signature(signature)) = unlock_blocks.get(index) {\n\n address.verify(essence_hash, signature).is_ok()\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "bee-ledger/src/workers/consensus/white_flag.rs", "rank": 28, "score": 233447.45282619866 }, { "content": "fn path() -> impl Filter<Extract = (PeerId,), Error = Rejection> + Clone {\n\n super::path()\n\n .and(warp::path(\"peers\"))\n\n .and(peer_id())\n\n .and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n peer_manager: ResourceHandle<PeerManager>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::get())\n\n .and(has_permission(ROUTE_PEER, public_routes, allowed_ips))\n\n .and(with_peer_manager(peer_manager))\n\n .and_then(|peer_id, peer_manager| async move { peer(peer_id, peer_manager) })\n\n .boxed()\n\n}\n\n\n\npub(crate) fn peer(peer_id: PeerId, peer_manager: ResourceHandle<PeerManager>) -> Result<impl Reply, Rejection> {\n\n peer_manager\n\n .get_map(&peer_id, |peer_entry| {\n\n Ok(warp::reply::json(&SuccessBody::new(PeerResponse(PeerDto::from(\n\n peer_entry.0.as_ref(),\n\n )))))\n\n })\n\n .unwrap_or_else(|| Err(reject::custom(CustomRejection::NotFound(\"peer not found\".to_string()))))\n\n}\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/peer.rs", "rank": 29, "score": 232744.67605901352 }, { "content": "/// Generates a random message metadata.\n\npub fn rand_message_metadata() -> MessageMetadata {\n\n MessageMetadata::new(\n\n unsafe { Flags::from_bits_unchecked(rand_number::<u8>()) },\n\n rand_option(rand_milestone_index()),\n\n rand_number(),\n\n rand_number(),\n\n rand_number(),\n\n rand_option((\n\n IndexId::new(rand_milestone_index(), rand_message_id()),\n\n IndexId::new(rand_milestone_index(), rand_message_id()),\n\n )),\n\n rand_conflict_reason(),\n\n )\n\n}\n", "file_path": "bee-test/src/rand/metadata.rs", "rank": 30, "score": 230445.30685167096 }, { "content": "/// Generates a random message with given parents.\n\npub fn rand_message_with_parents(parents: Parents) -> Message {\n\n MessageBuilder::<u64>::new()\n\n .with_network_id(rand_number())\n\n .with_parents(parents)\n\n .with_payload(rand_payload_for_message())\n\n .with_nonce_provider(rand_number(), 0f64)\n\n .finish()\n\n .unwrap()\n\n}\n\n\n", "file_path": "bee-test/src/rand/message.rs", "rank": 31, "score": 226953.28481158544 }, { "content": "/// Initializes the ledger workers.\n\npub fn init<N>(\n\n node_builder: N::Builder,\n\n network_id: u64,\n\n snapshot_config: SnapshotConfig,\n\n pruning_config: PruningConfig,\n\n) -> N::Builder\n\nwhere\n\n N: Node,\n\n N::Backend: StorageBackend,\n\n{\n\n node_builder\n\n .with_worker_cfg::<SnapshotWorker>((network_id, snapshot_config.clone()))\n\n .with_worker_cfg::<ConsensusWorker>((snapshot_config, pruning_config))\n\n}\n", "file_path": "bee-ledger/src/workers/mod.rs", "rank": 32, "score": 226485.61624246635 }, { "content": "pub fn shutdown(secs: u64) -> impl Future + Send + Unpin {\n\n Box::new(Box::pin(time::sleep(Duration::from_secs(secs))))\n\n}\n", "file_path": "bee-network/bee-gossip/src/tests/common/shutdown.rs", "rank": 33, "score": 226168.29626278338 }, { "content": "fn path() -> impl Filter<Extract = (PeerId,), Error = warp::Rejection> + Clone {\n\n super::path()\n\n .and(warp::path(\"peers\"))\n\n .and(peer_id())\n\n .and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n network_command_sender: ResourceHandle<NetworkCommandSender>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::delete())\n\n .and(has_permission(ROUTE_REMOVE_PEER, public_routes, allowed_ips))\n\n .and(with_network_command_sender(network_command_sender))\n\n .and_then(|peer_id, network_controller| async move { remove_peer(peer_id, network_controller) })\n\n .boxed()\n\n}\n\n\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/remove_peer.rs", "rank": 34, "score": 225929.7093409357 }, { "content": "fn set_outbound_update_interval(outbound_nbh: &OutboundNeighborhood, local: &Local) {\n\n let mut delay = OPEN_OUTBOUND_NBH_UPDATE_SECS;\n\n\n\n if outbound_nbh.is_full() {\n\n delay = FULL_OUTBOUND_NBH_UPDATE_SECS\n\n };\n\n\n\n // Panic: We don't allow invalid salts.\n\n let salt_expiration =\n\n Duration::from_secs(time::until(local.public_salt().expiration_time()).expect(\"time until error\"));\n\n\n\n if salt_expiration < delay {\n\n delay = salt_expiration;\n\n }\n\n\n\n OUTBOUND_NBH_UPDATE_INTERVAL.set(delay);\n\n}\n", "file_path": "bee-network/bee-autopeering/src/peering/update.rs", "rank": 35, "score": 225609.67634274997 }, { "content": "/// Produce an iterator over the [`Btrit`]s that make up a given integer.\n\npub fn signed_int_trits<I>(x: I) -> impl Iterator<Item = Btrit> + Clone\n\nwhere\n\n I: Clone + AsPrimitive<i8> + FromPrimitive + Signed,\n\n{\n\n let is_neg = x.is_negative();\n\n let mut x = if is_neg { x } else { -x };\n\n\n\n let radix = I::from_i8(3).unwrap();\n\n\n\n core::iter::from_fn(move || {\n\n if x.is_zero() {\n\n None\n\n } else {\n\n let modulus = ((x + I::one()).abs() % radix).as_();\n\n x = x / radix;\n\n if modulus == 1 {\n\n x = x + -I::one();\n\n }\n\n Some(Btrit::try_from(((modulus + 2) % 3 - 1) * if is_neg { -1 } else { 1 }).unwrap())\n\n }\n\n })\n\n // If the integer is exactly 0, add an extra trit\n\n .chain(Some(Btrit::Zero).filter(|_| x.is_zero()))\n\n}\n\n\n", "file_path": "bee-ternary/src/convert.rs", "rank": 36, "score": 225544.63151930083 }, { "content": "/// Packs an optional payload to a writer.\n\npub fn option_payload_pack<W: Write>(writer: &mut W, payload: Option<&Payload>) -> Result<(), Error> {\n\n if let Some(payload) = payload {\n\n (payload.packed_len() as u32).pack(writer)?;\n\n payload.pack(writer)?;\n\n } else {\n\n 0u32.pack(writer)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "bee-message/src/payload/mod.rs", "rank": 37, "score": 224963.93609567027 }, { "content": "// Hive.go: returns the oldest peer, or nil if empty.\n\nfn peer_to_reverify(active_peers: &ActivePeersList) -> Option<PeerId> {\n\n active_peers.read().get_oldest().map(|p| *p.peer_id())\n\n}\n\n\n\n// Hive.go:\n\n// The current strategy is to always select the latest verified peer and one of\n\n// the peers that returned the most number of peers the last time it was queried.\n\npub(crate) fn query_fn() -> Repeat<QueryContext> {\n\n Box::new(|ctx| {\n\n let peers = select_peers_to_query(&ctx.active_peers);\n\n if peers.is_empty() {\n\n log::info!(\"No peers to query.\");\n\n } else {\n\n log::info!(\"Querying {} peer/s...\", peers.len());\n\n\n\n for peer_id in peers.into_iter() {\n\n let ctx_ = ctx.clone();\n\n\n\n // TODO: introduce `UnsupervisedTask` type, that always finishes after a timeout.\n\n tokio::spawn(async move {\n", "file_path": "bee-network/bee-autopeering/src/discovery/query.rs", "rank": 38, "score": 223991.50045224995 }, { "content": "struct IncomingPacketHandler<const USE_IP_V6: bool> {\n\n incoming_socket: Arc<UdpSocket>,\n\n incoming_senders: IncomingPacketSenders,\n\n bind_addr: SocketAddr,\n\n}\n\n\n\n// Note: Invalid packets from peers are not logged as warnings because the fault is not on our side.\n\n#[async_trait::async_trait]\n\nimpl<const USE_IP_V6: bool> Runnable for IncomingPacketHandler<USE_IP_V6> {\n\n const NAME: &'static str = if USE_IP_V6 {\n\n \"IncomingIPv6PacketHandler\"\n\n } else {\n\n \"IncomingIPv4PacketHandler\"\n\n };\n\n const SHUTDOWN_PRIORITY: u8 = 2;\n\n\n\n type ShutdownSignal = ShutdownRx;\n\n\n\n async fn run(self, mut shutdown_rx: Self::ShutdownSignal) {\n\n let IncomingPacketHandler {\n", "file_path": "bee-network/bee-autopeering/src/server.rs", "rank": 39, "score": 222627.8172174011 }, { "content": "struct OutgoingPacketHandler<const USE_IP_V6: bool> {\n\n outgoing_socket: Arc<UdpSocket>,\n\n outgoing_rx: OutgoingPacketRx,\n\n local: Local,\n\n bind_addr: SocketAddr,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl<const USE_IP_V6: bool> Runnable for OutgoingPacketHandler<USE_IP_V6> {\n\n const NAME: &'static str = if USE_IP_V6 {\n\n \"OutgoingIPv6PacketHandler\"\n\n } else {\n\n \"OutgoingIPv4PacketHandler\"\n\n };\n\n const SHUTDOWN_PRIORITY: u8 = 3;\n\n\n\n type ShutdownSignal = ShutdownRx;\n\n\n\n async fn run(self, mut shutdown_rx: Self::ShutdownSignal) {\n\n let OutgoingPacketHandler {\n", "file_path": "bee-network/bee-autopeering/src/server.rs", "rank": 40, "score": 222627.8172174011 }, { "content": "// Hive.go: selects the peers that should be queried.\n\nfn select_peers_to_query(active_peers: &ActivePeersList) -> Vec<PeerId> {\n\n let mut verif_peers = manager::get_verified_peers(active_peers);\n\n\n\n // If we have less than 3 verified peers, then we use those for the query.\n\n if verif_peers.len() < 3 {\n\n verif_peers.into_iter().map(|ap| *ap.peer_id()).collect::<Vec<_>>()\n\n } else {\n\n // Note: this macro is useful to remove some noise from the pattern matching rules.\n\n macro_rules! num {\n\n ($t:expr) => {\n\n // Panic: we made sure, that unwrap is always okay.\n\n $t.as_ref().unwrap().metrics().last_new_peers()\n\n };\n\n }\n\n\n\n let latest = *verif_peers.remove(0).peer_id();\n\n let len = verif_peers.len().min(3);\n\n\n\n // Note: This loop finds the three \"heaviest\" peers with one iteration over an unsorted vec of verified peers.\n\n let heaviest3 = verif_peers.into_iter().fold(\n", "file_path": "bee-network/bee-autopeering/src/discovery/query.rs", "rank": 41, "score": 222393.9275467879 }, { "content": "/// Generates a random payload for a message.\n\npub fn rand_payload_for_message() -> Payload {\n\n rand_indexation_payload().into()\n\n}\n", "file_path": "bee-test/src/rand/payload.rs", "rank": 42, "score": 221051.4337462165 }, { "content": "/// Produce an iterator over the [`Utrit`]s that make up a given integer.\n\npub fn unsigned_int_trits<I>(mut x: I) -> impl Iterator<Item = Utrit> + Clone\n\nwhere\n\n I: Clone + AsPrimitive<u8> + FromPrimitive + Num,\n\n{\n\n let radix = I::from_u8(3).unwrap();\n\n\n\n core::iter::from_fn(move || {\n\n if x.is_zero() {\n\n None\n\n } else {\n\n let modulus = (x % radix).as_();\n\n x = x / radix;\n\n Some(Utrit::try_from(modulus).unwrap())\n\n }\n\n })\n\n // If the integer is exactly 0, add an extra trit\n\n .chain(Some(Utrit::Zero).filter(|_| x.is_zero()))\n\n}\n", "file_path": "bee-ternary/src/convert.rs", "rank": 43, "score": 220443.38045326254 }, { "content": "/// Generates a vector of random message ids of a given length.\n\npub fn rand_message_ids(len: usize) -> Vec<MessageId> {\n\n let mut parents = (0..len).map(|_| rand_message_id()).collect::<Vec<_>>();\n\n parents.sort_by(|a, b| a.as_ref().cmp(b.as_ref()));\n\n parents\n\n}\n\n\n", "file_path": "bee-test/src/rand/message.rs", "rank": 44, "score": 220337.70459282835 }, { "content": "fn path() -> impl Filter<Extract = (Address,), Error = Rejection> + Clone {\n\n super::path()\n\n .and(warp::path(\"addresses\"))\n\n .and(bech32_address())\n\n .and(warp::path(\"outputs\"))\n\n .and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n consensus_worker: mpsc::UnboundedSender<ConsensusWorkerCommand>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::get())\n\n .and(has_permission(ROUTE_OUTPUTS_BECH32, public_routes, allowed_ips))\n\n .and(with_consensus_worker(consensus_worker))\n\n .and_then(|addr, consensus_worker| async move { outputs_bech32(addr, consensus_worker).await })\n\n .boxed()\n\n}\n\n\n\npub(crate) async fn outputs_bech32(\n\n addr: Address,\n\n consensus_worker: mpsc::UnboundedSender<ConsensusWorkerCommand>,\n\n) -> Result<impl Reply, Rejection> {\n\n match addr {\n\n Address::Ed25519(a) => outputs_ed25519(a, consensus_worker).await,\n\n }\n\n}\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/outputs_bech32.rs", "rank": 45, "score": 220107.14347679957 }, { "content": "#[derive(Default)]\n\nstruct InMemoryPeerStoreInner {\n\n active_peers: HashMap<PeerId, ActivePeer>,\n\n replacements: HashMap<PeerId, Peer>,\n\n}\n\n\n\nimpl InMemoryPeerStore {\n\n fn read(&self) -> RwLockReadGuard<InMemoryPeerStoreInner> {\n\n self.inner.read().expect(\"error getting read access\")\n\n }\n\n\n\n fn write(&self) -> RwLockWriteGuard<InMemoryPeerStoreInner> {\n\n self.inner.write().expect(\"error getting write access\")\n\n }\n\n}\n\n\n\nimpl PeerStore for InMemoryPeerStore {\n\n type Config = ();\n\n\n\n type Error = Infallible;\n\n\n", "file_path": "bee-network/bee-autopeering/src/peer/stores/in_memory.rs", "rank": 46, "score": 219390.009621793 }, { "content": "fn handle_new_peering(peer: bee_autopeering::Peer, network_name: &str, gossip_command_tx: &NetworkCommandSender) {\n\n if let Some(multiaddr) = peer.service_multiaddr(network_name) {\n\n let peer_id = peer.peer_id().libp2p_peer_id();\n\n\n\n // Panic: sending commands cannot fail due to worker dependencies: because the \"Peer Manager\" depends on\n\n // the `bee-gossip` \"ServiceHost\", it is guaranteed that the receiver of this channel is not dropped\n\n // before the sender.\n\n gossip_command_tx\n\n .send(Command::AddPeer {\n\n peer_id,\n\n alias: Some(alias!(peer_id).to_string()),\n\n multiaddr,\n\n relation: PeerRelation::Discovered,\n\n })\n\n .expect(\"send command to gossip layer\");\n\n }\n\n}\n\n\n", "file_path": "bee-protocol/src/workers/peer/manager.rs", "rank": 47, "score": 219174.50367399963 }, { "content": "pub fn message_id_to_message_access<B: StorageBackend>(storage: &B) {\n\n let (message_id, message) = (rand_message_id(), rand_message());\n\n\n\n assert!(!Exist::<MessageId, Message>::exist(storage, &message_id).unwrap());\n\n assert!(\n\n Fetch::<MessageId, Message>::fetch(storage, &message_id)\n\n .unwrap()\n\n .is_none()\n\n );\n\n let results = MultiFetch::<MessageId, Message>::multi_fetch(storage, &[message_id])\n\n .unwrap()\n\n .collect::<Vec<_>>();\n\n assert_eq!(results.len(), 1);\n\n assert!(matches!(results.get(0), Some(Ok(None))));\n\n\n\n Insert::<MessageId, Message>::insert(storage, &message_id, &message).unwrap();\n\n\n\n let message = rand_message();\n\n Insert::<MessageId, Message>::insert(storage, &message_id, &message).unwrap();\n\n assert_eq!(\n", "file_path": "bee-storage/bee-storage-test/src/message_id_to_message.rs", "rank": 48, "score": 215373.6493613105 }, { "content": "pub fn start_inbound_gossip_handler(\n\n peer_id: PeerId,\n\n mut inbound_gossip_rx: BufReader<ReadHalf<Box<NegotiatedSubstream>>>,\n\n inbound_gossip_tx: GossipSender,\n\n internal_event_tx: InternalEventSender,\n\n) {\n\n tokio::spawn(async move {\n\n let mut buf = vec![0u8; MSG_BUFFER_LEN];\n\n\n\n loop {\n\n if let Some(len) = (&mut inbound_gossip_rx)\n\n .read(&mut buf)\n\n .await\n\n .ok()\n\n .filter(|len| *len > 0)\n\n {\n\n if inbound_gossip_tx.send(buf[..len].to_vec()).is_err() {\n\n debug!(\"Terminating gossip protocol with {}.\", alias!(peer_id));\n\n\n\n break;\n", "file_path": "bee-network/bee-gossip/src/swarm/protocols/iota_gossip/io.rs", "rank": 49, "score": 214902.80265461156 }, { "content": "pub fn start_outbound_gossip_handler(\n\n peer_id: PeerId,\n\n mut outbound_gossip_tx: BufWriter<WriteHalf<Box<NegotiatedSubstream>>>,\n\n outbound_gossip_rx: GossipReceiver,\n\n internal_event_tx: InternalEventSender,\n\n) {\n\n tokio::spawn(async move {\n\n let mut outbound_gossip_rx = outbound_gossip_rx.fuse();\n\n\n\n // If the gossip sender dropped we end the connection.\n\n while let Some(message) = outbound_gossip_rx.next().await {\n\n // Note: Instead of polling another shutdown channel, we use an empty message\n\n // to signal that we want to end the connection. We use this \"trick\" whenever the network\n\n // receives the `DisconnectPeer` command to enforce that the connection will be dropped.\n\n if message.is_empty() {\n\n debug!(\n\n \"Terminating gossip protocol with {} (received shutdown signal).\",\n\n alias!(peer_id)\n\n );\n\n\n", "file_path": "bee-network/bee-gossip/src/swarm/protocols/iota_gossip/io.rs", "rank": 50, "score": 214902.80265461156 }, { "content": "fn path() -> impl Filter<Extract = (Address,), Error = warp::Rejection> + Clone {\n\n super::path()\n\n .and(warp::path(\"addresses\"))\n\n .and(bech32_address())\n\n .and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n consensus_worker: mpsc::UnboundedSender<ConsensusWorkerCommand>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::get())\n\n .and(has_permission(ROUTE_BALANCE_BECH32, public_routes, allowed_ips))\n\n .and(with_consensus_worker(consensus_worker))\n\n .and_then(|addr, consensus_worker| async move { balance_bech32(addr, consensus_worker).await })\n\n .boxed()\n\n}\n\n\n\npub(crate) async fn balance_bech32(\n\n addr: Address,\n\n consensus_worker: mpsc::UnboundedSender<ConsensusWorkerCommand>,\n\n) -> Result<impl Reply, Rejection> {\n\n match addr {\n\n Address::Ed25519(a) => balance_ed25519(a, consensus_worker).await,\n\n }\n\n}\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/balance_bech32.rs", "rank": 51, "score": 214797.41788454278 }, { "content": "struct PeerIdVisitor {}\n\n\n\nimpl<'de> Visitor<'de> for PeerIdVisitor {\n\n type Value = PeerId;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"'PeerId'\")\n\n }\n\n\n\n fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: SeqAccess<'de>,\n\n {\n\n let bytes = seq\n\n .next_element::<[u8; PUBLIC_KEY_LENGTH]>()?\n\n .ok_or_else(|| serde::de::Error::invalid_length(0, &self))?;\n\n\n\n let public_key = PublicKey::try_from_bytes(bytes).map_err(|_| serde::de::Error::invalid_length(0, &self))?;\n\n\n\n Ok(PeerId::from_public_key(public_key))\n", "file_path": "bee-network/bee-autopeering/src/peer/peer_id.rs", "rank": 52, "score": 214655.52158747357 }, { "content": "struct AutopeeringMultiaddrVisitor;\n\n\n\nimpl<'de> Visitor<'de> for AutopeeringMultiaddrVisitor {\n\n type Value = AutopeeringMultiaddr;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"autopeering multiaddr\")\n\n }\n\n\n\n fn visit_string<E>(self, value: String) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n value.parse().map_err(de::Error::custom)\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n", "file_path": "bee-network/bee-autopeering/src/multiaddr.rs", "rank": 53, "score": 214309.9934512939 }, { "content": "pub fn message_raw<B: StorageBackend>(\n\n message_id: MessageId,\n\n tangle: ResourceHandle<Tangle<B>>,\n\n) -> Result<impl Reply, Rejection> {\n\n match tangle.get(&message_id) {\n\n Some(message) => Ok(Response::builder()\n\n .header(\"Content-Type\", \"application/octet-stream\")\n\n .body(message.pack_new())),\n\n None => Err(reject::custom(CustomRejection::NotFound(\n\n \"can not find message\".to_string(),\n\n ))),\n\n }\n\n}\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/message_raw.rs", "rank": 54, "score": 213613.9104052977 }, { "content": "pub fn message_children<B: StorageBackend>(\n\n message_id: MessageId,\n\n tangle: ResourceHandle<Tangle<B>>,\n\n) -> Result<impl Reply, Rejection> {\n\n let mut children = Vec::from_iter(tangle.get_children(&message_id).unwrap_or_default());\n\n let count = children.len();\n\n let max_results = 1000;\n\n children.truncate(max_results);\n\n Ok(warp::reply::json(&SuccessBody::new(MessageChildrenResponse {\n\n message_id: message_id.to_string(),\n\n max_results,\n\n count,\n\n children_message_ids: children.iter().map(|id| id.to_string()).collect(),\n\n })))\n\n}\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/message_children.rs", "rank": 55, "score": 213613.9104052977 }, { "content": "pub fn message_id_to_message_id_access<B: StorageBackend>(storage: &B) {\n\n let (parent, child) = (rand_message_id(), rand_message_id());\n\n\n\n assert!(!Exist::<(MessageId, MessageId), ()>::exist(storage, &(parent, child)).unwrap());\n\n assert!(\n\n Fetch::<MessageId, Vec<MessageId>>::fetch(storage, &parent)\n\n .unwrap()\n\n .unwrap()\n\n .is_empty()\n\n );\n\n\n\n Insert::<(MessageId, MessageId), ()>::insert(storage, &(parent, child), &()).unwrap();\n\n\n\n assert!(Exist::<(MessageId, MessageId), ()>::exist(storage, &(parent, child)).unwrap());\n\n assert_eq!(\n\n Fetch::<MessageId, Vec<MessageId>>::fetch(storage, &parent)\n\n .unwrap()\n\n .unwrap(),\n\n vec![child]\n\n );\n", "file_path": "bee-storage/bee-storage-test/src/message_id_to_message_id.rs", "rank": 56, "score": 212373.3487535003 }, { "content": "#[derive(Default)]\n\nstruct PeerManagerInner {\n\n peers: Vec<(PeerId, PeerTuple)>,\n\n}\n\n\n\nimpl PeerManagerInner {\n\n fn get(&self, id: &PeerId) -> Option<&PeerTuple> {\n\n self.peers\n\n .binary_search_by_key(id, |(id, _)| *id)\n\n .ok()\n\n .map(|i| &self.peers[i].1)\n\n }\n\n\n\n fn get_mut(&mut self, id: &PeerId) -> Option<&mut PeerTuple> {\n\n self.peers\n\n .binary_search_by_key(id, |(id, _)| *id)\n\n .ok()\n\n .map(|i| &mut self.peers[i].1)\n\n }\n\n\n\n fn insert(&mut self, id: PeerId, peer: PeerTuple) {\n", "file_path": "bee-protocol/src/workers/peer/manager_res.rs", "rank": 57, "score": 210872.707944619 }, { "content": "pub fn address_to_balance_access<B: StorageBackend>(storage: &B) {\n\n let (address, balance) = (rand_address(), rand_balance());\n\n\n\n assert!(!Exist::<Address, Balance>::exist(storage, &address).unwrap());\n\n assert!(Fetch::<Address, Balance>::fetch(storage, &address).unwrap().is_none());\n\n let results = MultiFetch::<Address, Balance>::multi_fetch(storage, &[address])\n\n .unwrap()\n\n .collect::<Vec<_>>();\n\n assert_eq!(results.len(), 1);\n\n assert!(matches!(results.get(0), Some(Ok(None))));\n\n\n\n Insert::<Address, Balance>::insert(storage, &address, &balance).unwrap();\n\n\n\n assert!(Exist::<Address, Balance>::exist(storage, &address).unwrap());\n\n assert_eq!(\n\n Fetch::<Address, Balance>::fetch(storage, &address)\n\n .unwrap()\n\n .unwrap()\n\n .pack_new(),\n\n balance.pack_new()\n", "file_path": "bee-storage/bee-storage-test/src/address_to_balance.rs", "rank": 58, "score": 210645.73679943974 }, { "content": "struct BtritVisitor;\n\n\n\nimpl<'de> Visitor<'de> for BtritVisitor {\n\n type Value = Btrit;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"a value between -1 and 1 inclusive\")\n\n }\n\n\n\n fn visit_u64<E: Error>(self, trit: u64) -> Result<Self::Value, E> {\n\n i8::try_from(trit)\n\n .map_err(|_| ())\n\n .and_then(|trit| Btrit::try_from(trit).map_err(|_| ()))\n\n .map_err(|_| E::invalid_value(Unexpected::Unsigned(trit), &self))\n\n }\n\n\n\n fn visit_i64<E: Error>(self, trit: i64) -> Result<Self::Value, E> {\n\n i8::try_from(trit)\n\n .map_err(|_| ())\n\n .and_then(|trit| Btrit::try_from(trit).map_err(|_| ()))\n", "file_path": "bee-ternary/src/serde.rs", "rank": 59, "score": 210374.793754637 }, { "content": "struct UtritVisitor;\n\n\n\nimpl<'de> Visitor<'de> for UtritVisitor {\n\n type Value = Utrit;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"a value between 0 and 2 inclusive\")\n\n }\n\n\n\n fn visit_u64<E: Error>(self, trit: u64) -> Result<Self::Value, E> {\n\n u8::try_from(trit)\n\n .map_err(|_| ())\n\n .and_then(|trit| Utrit::try_from(trit).map_err(|_| ()))\n\n .map_err(|_| E::invalid_value(Unexpected::Unsigned(trit), &self))\n\n }\n\n\n\n fn visit_i64<E: Error>(self, trit: i64) -> Result<Self::Value, E> {\n\n i8::try_from(trit)\n\n .map_err(|_| ())\n\n .and_then(|trit| Utrit::try_from(trit).map_err(|_| ()))\n", "file_path": "bee-ternary/src/serde.rs", "rank": 60, "score": 210374.793754637 }, { "content": "pub fn command_channel() -> (CommandSender, CommandReceiver) {\n\n mpsc::unbounded_channel()\n\n}\n\n\n\n/// Describes the commands accepted by the networking layer.\n\n#[derive(Debug, Eq, PartialEq)]\n\n#[non_exhaustive]\n\npub enum Command {\n\n /// Adds a peer.\n\n AddPeer {\n\n /// The peer's id.\n\n peer_id: PeerId,\n\n /// The peer's address.\n\n multiaddr: Multiaddr,\n\n /// The peer's optional alias.\n\n alias: Option<String>,\n\n /// The relation with that peer.\n\n relation: PeerRelation,\n\n },\n\n /// Removes a peer.\n", "file_path": "bee-network/bee-gossip/src/service/command.rs", "rank": 61, "score": 209890.6138631099 }, { "content": "pub fn init_full_node<N: Node>(\n\n rest_api_config: RestApiConfig,\n\n protocol_config: ProtocolConfig,\n\n network_id: NetworkId,\n\n bech32_hrp: Bech32Hrp,\n\n node_builder: N::Builder,\n\n) -> N::Builder\n\nwhere\n\n N::Backend: StorageBackend,\n\n{\n\n node_builder.with_worker_cfg::<ApiWorkerFullNode>((rest_api_config, protocol_config, network_id, bech32_hrp))\n\n}\n\n\n\npub struct ApiWorkerFullNode;\n\n\n\n#[async_trait]\n\nimpl<N: Node> Worker<N> for ApiWorkerFullNode\n\nwhere\n\n N::Backend: StorageBackend,\n\n{\n", "file_path": "bee-api/bee-rest-api/src/endpoints/mod.rs", "rank": 62, "score": 209346.84894254565 }, { "content": "pub fn exec(tool: &Tool) -> Result<(), ToolError> {\n\n match tool {\n\n Tool::Ed25519(tool) => ed25519::exec(tool)?,\n\n #[cfg(feature = \"rocksdb\")]\n\n Tool::Rocksdb(tool) => rocksdb::exec(tool)?,\n\n #[cfg(feature = \"sled\")]\n\n Tool::Sled(tool) => sled::exec(tool)?,\n\n Tool::SnapshotInfo(tool) => snapshot_info::exec(tool)?,\n\n Tool::Password(tool) => password::exec(tool)?,\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "bee-node/src/tools/mod.rs", "rank": 63, "score": 207475.73735758985 }, { "content": "pub fn index_to_message_id_access<B: StorageBackend>(storage: &B) {\n\n let (index, message_id) = (rand_indexation_payload().padded_index(), rand_message_id());\n\n\n\n assert!(!Exist::<(PaddedIndex, MessageId), ()>::exist(storage, &(index, message_id)).unwrap());\n\n assert!(\n\n Fetch::<PaddedIndex, Vec<MessageId>>::fetch(storage, &index)\n\n .unwrap()\n\n .unwrap()\n\n .is_empty()\n\n );\n\n\n\n Insert::<(PaddedIndex, MessageId), ()>::insert(storage, &(index, message_id), &()).unwrap();\n\n\n\n assert!(Exist::<(PaddedIndex, MessageId), ()>::exist(storage, &(index, message_id)).unwrap());\n\n assert_eq!(\n\n Fetch::<PaddedIndex, Vec<MessageId>>::fetch(storage, &index)\n\n .unwrap()\n\n .unwrap(),\n\n vec![message_id]\n\n );\n", "file_path": "bee-storage/bee-storage-test/src/index_to_message_id.rs", "rank": 64, "score": 205774.43386701052 }, { "content": "pub fn message_id_to_metadata_access<B: StorageBackend>(storage: &B) {\n\n let (message_id, metadata) = (rand_message_id(), rand_message_metadata());\n\n\n\n assert!(!Exist::<MessageId, MessageMetadata>::exist(storage, &message_id).unwrap());\n\n assert!(\n\n Fetch::<MessageId, MessageMetadata>::fetch(storage, &message_id)\n\n .unwrap()\n\n .is_none()\n\n );\n\n let results = MultiFetch::<MessageId, MessageMetadata>::multi_fetch(storage, &[message_id])\n\n .unwrap()\n\n .collect::<Vec<_>>();\n\n assert_eq!(results.len(), 1);\n\n assert!(matches!(results.get(0), Some(Ok(None))));\n\n\n\n InsertStrict::<MessageId, MessageMetadata>::insert_strict(storage, &message_id, &metadata).unwrap();\n\n assert!(Exist::<MessageId, MessageMetadata>::exist(storage, &message_id).unwrap());\n\n\n\n // calling `insert_strict` with the same `MessageId` but a different `MessageMetadata` should\n\n // not overwrite the old value.\n", "file_path": "bee-storage/bee-storage-test/src/message_id_to_metadata.rs", "rank": 65, "score": 205774.43386701052 }, { "content": "pub fn channel() -> (GossipSender, GossipReceiver) {\n\n let (sender, receiver) = mpsc::unbounded_channel();\n\n (sender, UnboundedReceiverStream::new(receiver))\n\n}\n\n\n", "file_path": "bee-network/bee-gossip/src/swarm/protocols/iota_gossip/io.rs", "rank": 66, "score": 205371.60776301532 }, { "content": "#[test]\n\nfn generate_address() {\n\n match Address::from(Ed25519Address::new([1; 32])) {\n\n Address::Ed25519(a) => assert_eq!(a.len(), 32),\n\n }\n\n}\n\n\n", "file_path": "bee-message/tests/ed25519_address.rs", "rank": 67, "score": 204505.7427945892 }, { "content": "#[test]\n\nfn bech32_string_to_address() {\n\n let bytes: [u8; 32] = hex::decode(ED25519_ADDRESS).unwrap().try_into().unwrap();\n\n\n\n let address = Address::try_from_bech32(&Address::from(Ed25519Address::new(bytes)).to_bech32(\"iota\")).unwrap();\n\n let Address::Ed25519(ed) = address;\n\n\n\n assert_eq!(ed.to_string(), ED25519_ADDRESS);\n\n\n\n let address = Address::try_from_bech32(&Address::from(Ed25519Address::new(bytes)).to_bech32(\"atoi\")).unwrap();\n\n let Address::Ed25519(ed) = address;\n\n\n\n assert_eq!(ed.to_string(), ED25519_ADDRESS);\n\n}\n\n\n", "file_path": "bee-message/tests/address.rs", "rank": 68, "score": 204505.7427945892 }, { "content": "#[test]\n\nfn debug_impl() {\n\n assert_eq!(\n\n format!(\"{:?}\", MessageId::from_str(MESSAGE_ID).unwrap()),\n\n \"MessageId(52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c649)\"\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/message_id.rs", "rank": 69, "score": 204264.33513264882 }, { "content": "pub fn ed25519_address_to_output_id_access<B: StorageBackend>(storage: &B) {\n\n let (address, output_id) = (rand_ed25519_address(), rand_output_id());\n\n\n\n assert!(!Exist::<(Ed25519Address, OutputId), ()>::exist(storage, &(address, output_id)).unwrap());\n\n assert!(\n\n Fetch::<Ed25519Address, Vec<OutputId>>::fetch(storage, &address)\n\n .unwrap()\n\n .unwrap()\n\n .is_empty()\n\n );\n\n\n\n Insert::<(Ed25519Address, OutputId), ()>::insert(storage, &(address, output_id), &()).unwrap();\n\n\n\n assert!(Exist::<(Ed25519Address, OutputId), ()>::exist(storage, &(address, output_id)).unwrap());\n\n assert_eq!(\n\n Fetch::<Ed25519Address, Vec<OutputId>>::fetch(storage, &address)\n\n .unwrap()\n\n .unwrap(),\n\n vec![output_id]\n\n );\n", "file_path": "bee-storage/bee-storage-test/src/ed25519_address_to_output_id.rs", "rank": 70, "score": 203829.61227703214 }, { "content": "pub fn milestone_index_to_unreferenced_message_access<B: StorageBackend>(storage: &B) {\n\n let (index, unreferenced_message) = (rand_milestone_index(), rand_unreferenced_message());\n\n\n\n assert!(\n\n !Exist::<(MilestoneIndex, UnreferencedMessage), ()>::exist(storage, &(index, unreferenced_message)).unwrap()\n\n );\n\n assert!(\n\n Fetch::<MilestoneIndex, Vec<UnreferencedMessage>>::fetch(storage, &index)\n\n .unwrap()\n\n .unwrap()\n\n .is_empty()\n\n );\n\n\n\n Insert::<(MilestoneIndex, UnreferencedMessage), ()>::insert(storage, &(index, unreferenced_message), &()).unwrap();\n\n\n\n assert!(\n\n Exist::<(MilestoneIndex, UnreferencedMessage), ()>::exist(storage, &(index, unreferenced_message)).unwrap()\n\n );\n\n assert_eq!(\n\n Fetch::<MilestoneIndex, Vec<UnreferencedMessage>>::fetch(storage, &index)\n", "file_path": "bee-storage/bee-storage-test/src/milestone_index_to_unreferenced_message.rs", "rank": 71, "score": 202471.31014970155 }, { "content": "#[test]\n\nfn invalid_bech32_string_to_address() {\n\n let address = Address::try_from_bech32(ED25519_ADDRESS_BAD);\n\n assert!(matches!(address, Err(Error::InvalidAddress)));\n\n}\n\n\n", "file_path": "bee-message/tests/address.rs", "rank": 72, "score": 202226.70133679628 }, { "content": "// An event handler.\n\n//\n\n// This type takes care of actually receiving the events and appending them to an inner buffer so\n\n// they can be used seamlessly by the `PacketHandler`.\n\nstruct EventHandler {\n\n receiver: EventRecv,\n\n buffer: Vec<u8>,\n\n offset: usize,\n\n}\n\n\n\nimpl EventHandler {\n\n /// Create a new event handler from an event receiver.\n\n fn new(receiver: EventRecv) -> Self {\n\n Self {\n\n receiver,\n\n buffer: vec![],\n\n offset: 0,\n\n }\n\n }\n\n\n\n /// Push a new event into the buffer.\n\n ///\n\n /// This method also removes the `..self.offset` range from the buffer and sets the offset back\n\n /// to zero. Which means that this should only be called when the buffer is empty or when there\n", "file_path": "bee-protocol/src/workers/peer/packet_handler.rs", "rank": 73, "score": 201585.22624570038 }, { "content": "fn add_peers_from_store<S: PeerStore>(\n\n peer_store: &S,\n\n active_peers: &ActivePeersList,\n\n replacements: &ReplacementPeersList,\n\n) -> Result<usize, S::Error> {\n\n let mut num_added = 0;\n\n\n\n let mut write = active_peers.write();\n\n for active_peer in peer_store.fetch_all_active()? {\n\n if write.insert(active_peer) {\n\n num_added += 1;\n\n }\n\n }\n\n drop(write);\n\n\n\n let mut write = replacements.write();\n\n for replacement in peer_store.fetch_all_replacements()? {\n\n if write.insert(replacement) {\n\n num_added += 1;\n\n }\n", "file_path": "bee-network/bee-autopeering/src/discovery/manager.rs", "rank": 74, "score": 201052.98522290256 }, { "content": "struct CustomHasher {\n\n result: u64,\n\n}\n\n\n\nimpl CustomHasher {\n\n fn finish(&self) -> u64 {\n\n self.result\n\n }\n\n fn write(&mut self, i: u64) {\n\n self.result = i;\n\n }\n\n}\n\n\n\nimpl Default for CustomHasher {\n\n fn default() -> Self {\n\n Self {\n\n result: 17_241_709_254_077_376_921,\n\n }\n\n }\n\n}\n", "file_path": "bee-protocol/src/workers/message/hash_cache.rs", "rank": 75, "score": 200784.4335569159 }, { "content": "fn path() -> impl Filter<Extract = (MessageId,), Error = Rejection> + Clone {\n\n super::path()\n\n .and(warp::path(\"messages\"))\n\n .and(message_id())\n\n .and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter<B: StorageBackend>(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n tangle: ResourceHandle<Tangle<B>>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::get())\n\n .and(has_permission(ROUTE_MESSAGE, public_routes, allowed_ips))\n\n .and(with_tangle(tangle))\n\n .and_then(|message_id, tangle| async move { message(message_id, tangle) })\n\n .boxed()\n\n}\n\n\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/message.rs", "rank": 76, "score": 200237.55972475358 }, { "content": "/// The maximum number of allowed dust outputs on an address is `dust_allowance_sum` divided by `DUST_ALLOWANCE_DIVISOR`\n\n/// and rounded down, i.e. 10 outputs for each 1 Mi deposited. `dust_allowance_sum` is the sum of all the amounts of all\n\n/// unspent `SigLockedDustAllowanceOutputs` on this address. Regardless of `dust_allowance_sum`, the number of dust\n\n/// outputs must never exceed `DUST_OUTPUTS_MAX` per address.\n\npub fn dust_outputs_max(dust_allowance_sum: u64) -> u64 {\n\n DUST_OUTPUTS_MAX.min(dust_allowance_sum / DUST_ALLOWANCE_DIVISOR)\n\n}\n\n\n\n/// A `SignatureLockedDustAllowanceOutput` functions like a `SignatureLockedSingleOutput` but as a special property it\n\n/// is used to increase the allowance/amount of dust outputs on a given address.\n\n#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]\n\n#[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n\npub struct SignatureLockedDustAllowanceOutput {\n\n address: Address,\n\n amount: u64,\n\n}\n\n\n\nimpl SignatureLockedDustAllowanceOutput {\n\n /// The output kind of a `SignatureLockedDustAllowanceOutput`.\n\n pub const KIND: u8 = 1;\n\n\n\n /// Creates a new `SignatureLockedDustAllowanceOutput`.\n\n pub fn new(address: Address, amount: u64) -> Result<Self, Error> {\n\n if !SIGNATURE_LOCKED_DUST_ALLOWANCE_OUTPUT_AMOUNT.contains(&amount) {\n", "file_path": "bee-message/src/output/signature_locked_dust_allowance.rs", "rank": 77, "score": 199612.06662951736 }, { "content": "#[test]\n\nfn kind() {\n\n let bytes: [u8; 32] = hex::decode(ED25519_ADDRESS).unwrap().try_into().unwrap();\n\n let ed25519_address = Address::from(Ed25519Address::new(bytes));\n\n\n\n assert_eq!(ed25519_address.kind(), 0);\n\n}\n\n\n", "file_path": "bee-message/tests/address.rs", "rank": 78, "score": 198638.13685045642 }, { "content": "fn handle_rejection(err: Rejection) -> Result<impl Reply, Infallible> {\n\n let (http_code, err_code, reason) = match err.find() {\n\n // handle custom rejections\n\n Some(CustomRejection::Forbidden) => (StatusCode::FORBIDDEN, \"403\", \"access forbidden\"),\n\n Some(CustomRejection::NotFound(reason)) => (StatusCode::NOT_FOUND, \"404\", reason.as_str()),\n\n Some(CustomRejection::BadRequest(reason)) => (StatusCode::BAD_REQUEST, \"400\", reason.as_str()),\n\n Some(CustomRejection::ServiceUnavailable(reason)) => (StatusCode::SERVICE_UNAVAILABLE, \"503\", reason.as_str()),\n\n // handle default rejections\n\n _ => {\n\n if err.is_not_found() {\n\n (StatusCode::NOT_FOUND, \"404\", \"data not found\")\n\n } else if err.find::<warp::reject::MethodNotAllowed>().is_some() {\n\n (StatusCode::FORBIDDEN, \"403\", \"access forbidden\")\n\n } else {\n\n error!(\"unhandled rejection: {:?}\", err);\n\n (StatusCode::INTERNAL_SERVER_ERROR, \"500\", \"internal server error\")\n\n }\n\n }\n\n };\n\n Ok(warp::reply::with_status(\n\n warp::reply::json(&ErrorBody::new(DefaultErrorResponse {\n\n code: err_code.to_string(),\n\n message: reason.to_string(),\n\n })),\n\n http_code,\n\n ))\n\n}\n\n\n", "file_path": "bee-api/bee-rest-api/src/endpoints/mod.rs", "rank": 79, "score": 197812.93434121247 }, { "content": "fn resolve_dns_multiaddr(dns: Cow<'_, str>) -> Result<Protocol, Error> {\n\n use std::net::{IpAddr, ToSocketAddrs};\n\n\n\n match dns\n\n .to_socket_addrs()\n\n .map_err(|_| Error::UnresolvableDomain(dns.to_string()))?\n\n .next()\n\n .ok_or_else(|| Error::UnresolvableDomain(dns.to_string()))?\n\n .ip()\n\n {\n\n IpAddr::V4(ip4) => return Ok(Protocol::Ip4(ip4)),\n\n IpAddr::V6(ip6) => return Ok(Protocol::Ip6(ip6)),\n\n }\n\n}\n\n\n\nimpl Default for NetworkConfig {\n\n fn default() -> Self {\n\n Self {\n\n // Panic:\n\n // Unwrapping is fine, because we made sure that the default is parsable.\n", "file_path": "bee-network/bee-gossip/src/config.rs", "rank": 80, "score": 197620.65884124418 }, { "content": "#[test]\n\nfn serialize() {\n\n serialize_generic::<T1B1Buf<Btrit>>();\n\n serialize_generic_unbalanced::<T1B1Buf<Utrit>>();\n\n serialize_generic::<T2B1Buf>();\n\n serialize_generic::<T3B1Buf>();\n\n serialize_generic::<T4B1Buf>();\n\n serialize_generic::<T5B1Buf>();\n\n}\n\n\n", "file_path": "bee-test/tests/ternary/serde.rs", "rank": 81, "score": 197438.2191942129 }, { "content": "#[test]\n\nfn deserialize() {\n\n deserialize_generic::<T1B1Buf<Btrit>>();\n\n deserialize_generic_unbalanced::<T1B1Buf<Utrit>>();\n\n deserialize_generic::<T2B1Buf>();\n\n deserialize_generic::<T3B1Buf>();\n\n deserialize_generic::<T4B1Buf>();\n\n deserialize_generic::<T5B1Buf>();\n\n}\n", "file_path": "bee-test/tests/ternary/serde.rs", "rank": 82, "score": 197436.37863833015 }, { "content": "#[test]\n\nfn debug_impl() {\n\n let milestone = Milestone::new(MessageId::from_str(MESSAGE_ID).unwrap(), 0);\n\n\n\n assert_eq!(\n\n format!(\"{:?}\", milestone),\n\n \"Milestone { message_id: MessageId(9e23e9fccb816af4ad355c27d904b6a6e88618e0bed1b640df3d4c19f4579bc9), timestamp: 0 }\",\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/milestone.rs", "rank": 83, "score": 195986.34465573382 }, { "content": "#[test]\n\nfn kind() {\n\n assert_eq!(Ed25519Address::KIND, 0);\n\n}\n\n\n", "file_path": "bee-message/tests/ed25519_address.rs", "rank": 84, "score": 195964.64695702607 }, { "content": "#[test]\n\nfn from_to_str() {\n\n assert_eq!(\n\n ED25519_ADDRESS,\n\n Ed25519Address::from_str(ED25519_ADDRESS).unwrap().to_string()\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/ed25519_address.rs", "rank": 85, "score": 195964.64695702607 }, { "content": "fn hang_up(swarm: &mut Swarm<SwarmBehaviour>, peer_id: PeerId) {\n\n debug!(\"Hanging up on: {}.\", alias!(peer_id));\n\n\n\n let _ = Swarm::disconnect_peer_id(swarm, peer_id);\n\n}\n", "file_path": "bee-network/bee-gossip/src/network/host.rs", "rank": 86, "score": 195286.8476315161 }, { "content": "fn path() -> impl Filter<Extract = (MessageId,), Error = warp::Rejection> + Clone {\n\n super::path()\n\n .and(warp::path(\"messages\"))\n\n .and(message_id())\n\n .and(warp::path(\"metadata\"))\n\n .and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter<B: StorageBackend>(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n tangle: ResourceHandle<Tangle<B>>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::get())\n\n .and(has_permission(ROUTE_MESSAGE_METADATA, public_routes, allowed_ips))\n\n .and(with_tangle(tangle))\n\n .and_then(|message_id, tangle| async move { message_metadata(message_id, tangle) })\n\n .boxed()\n\n}\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/message_metadata.rs", "rank": 87, "score": 194741.8784080723 }, { "content": "fn path() -> impl Filter<Extract = (MessageId,), Error = warp::Rejection> + Clone {\n\n super::path()\n\n .and(warp::path(\"messages\"))\n\n .and(message_id())\n\n .and(warp::path(\"raw\"))\n\n .and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter<B: StorageBackend>(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n tangle: ResourceHandle<Tangle<B>>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::get())\n\n .and(has_permission(ROUTE_MESSAGE_RAW, public_routes, allowed_ips))\n\n .and(with_tangle(tangle))\n\n .and_then(|message_id, tangle| async move { message_raw(message_id, tangle) })\n\n .boxed()\n\n}\n\n\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/message_raw.rs", "rank": 88, "score": 194741.8784080723 }, { "content": "fn path() -> impl Filter<Extract = (MessageId,), Error = warp::Rejection> + Clone {\n\n super::path()\n\n .and(warp::path(\"messages\"))\n\n .and(message_id())\n\n .and(warp::path(\"children\"))\n\n .and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter<B: StorageBackend>(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n tangle: ResourceHandle<Tangle<B>>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::get())\n\n .and(has_permission(ROUTE_MESSAGE_CHILDREN, public_routes, allowed_ips))\n\n .and(with_tangle(tangle))\n\n .and_then(|message_id, tangle| async move { message_children(message_id, tangle) })\n\n .boxed()\n\n}\n\n\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/message_children.rs", "rank": 89, "score": 194741.8784080723 }, { "content": "fn path() -> impl Filter<Extract = (), Error = Rejection> + Clone {\n\n super::path().and(warp::path(\"peers\")).and(warp::path::end())\n\n}\n\n\n\npub(crate) fn filter(\n\n public_routes: Box<[String]>,\n\n allowed_ips: Box<[IpAddr]>,\n\n peer_manager: ResourceHandle<PeerManager>,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n self::path()\n\n .and(warp::get())\n\n .and(has_permission(ROUTE_PEERS, public_routes, allowed_ips))\n\n .and(with_peer_manager(peer_manager))\n\n .and_then(|peer_manager| async move { peers(peer_manager) })\n\n .boxed()\n\n}\n\n\n\npub(crate) fn peers(peer_manager: ResourceHandle<PeerManager>) -> Result<impl Reply, Infallible> {\n\n let mut peers_dtos = Vec::new();\n\n for peer in peer_manager.get_all() {\n\n peers_dtos.push(PeerDto::from(peer.as_ref()));\n\n }\n\n Ok(warp::reply::json(&SuccessBody::new(PeersResponse(peers_dtos))))\n\n}\n", "file_path": "bee-api/bee-rest-api/src/endpoints/routes/api/v1/peers.rs", "rank": 90, "score": 194131.91351777845 }, { "content": "#[test]\n\nfn debug_impl() {\n\n assert_eq!(format!(\"{:?}\", MilestoneIndex::new(0)), \"MilestoneIndex(0)\",);\n\n}\n\n\n", "file_path": "bee-message/tests/milestone_index.rs", "rank": 91, "score": 193397.68214467494 }, { "content": "#[test]\n\nfn debug_impl() {\n\n let id_bytes: [u8; 32] = hex::decode(MILESTONE_ID).unwrap().try_into().unwrap();\n\n\n\n assert_eq!(\n\n format!(\"{:?}\", MilestoneId::new(id_bytes)),\n\n \"MilestoneId(52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c649)\"\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/milestone_id.rs", "rank": 92, "score": 193397.68214467494 }, { "content": "#[test]\n\nfn debug_impl() {\n\n assert_eq!(\n\n format!(\"{:?}\", TreasuryInput::from_str(MILESTONE_ID_VALID).unwrap()),\n\n \"TreasuryInput(52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c649)\"\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/treasury_input.rs", "rank": 93, "score": 193397.68214467494 }, { "content": "#[test]\n\nfn debug_impl() {\n\n assert_eq!(\n\n format!(\"{:?}\", TransactionId::from_str(TRANSACTION_ID).unwrap()),\n\n \"TransactionId(52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c649)\"\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/transaction_id.rs", "rank": 94, "score": 193397.68214467494 }, { "content": "#[test]\n\nfn debug_impl() {\n\n assert_eq!(\n\n format!(\"{:?}\", UtxoInput::from_str(OUTPUT_ID).unwrap()),\n\n \"UtxoInput(52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c6492a00)\"\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/utxo_input.rs", "rank": 95, "score": 193397.68214467494 }, { "content": "#[test]\n\nfn debug_impl() {\n\n assert_eq!(\n\n format!(\"{:?}\", OutputId::from_str(OUTPUT_ID).unwrap()),\n\n \"OutputId(52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c6492a00)\"\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/output_id.rs", "rank": 96, "score": 193397.68214467494 }, { "content": "#[test]\n\nfn display_impl() {\n\n assert_eq!(format!(\"{}\", MilestoneIndex::new(0)), \"0\");\n\n}\n\n\n", "file_path": "bee-message/tests/milestone_index.rs", "rank": 97, "score": 193397.68214467494 }, { "content": "#[test]\n\nfn debug_impl() {\n\n assert_eq!(\n\n format!(\n\n \"{:?}\",\n\n PaddedIndex::new(hex::decode(PADDED_INDEX).unwrap().try_into().unwrap())\n\n ),\n\n \"PaddedIndex(52fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c64952fdfc072182654f163f5f0f9a621d729566c74d10037c4d7bbb0407d1e2c649)\"\n\n );\n\n}\n\n\n", "file_path": "bee-message/tests/indexation_payload.rs", "rank": 98, "score": 193397.68214467494 }, { "content": "#[test]\n\nfn from_str_valid() {\n\n Ed25519Address::from_str(ED25519_ADDRESS).unwrap();\n\n}\n\n\n", "file_path": "bee-message/tests/ed25519_address.rs", "rank": 99, "score": 193376.33007253765 } ]
Rust
src/body.rs
ocornoc/gravity
1263ddc73c29a70a0fb254d41c2cb28dbc3eae63
use bevy::prelude::*; use ultraviolet::DVec3; pub struct TransformScale(pub f64); impl Default for TransformScale { fn default() -> Self { TransformScale(1e-8) } } #[derive(Clone, Copy, PartialEq, Debug, Default, PartialOrd)] pub struct Mass(pub f64); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct Position { pub current: DVec3, pub previous: DVec3, } impl Position { pub const fn new(x: f64, y: f64, z: f64) -> Self { let pos = DVec3::new(x, y, z); Position { current: pos, previous: pos } } } #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct Velocity(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct LinAccel(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct NewLinAccel(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug)] pub struct Timestep { pub current: f64, pub substeps: usize, current_frame_time: f64, pub paused: bool, } impl Default for Timestep { fn default() -> Self { Self::new(Timestep::DAY_PER_SECOND, true) } } impl Timestep { pub const REALTIME: f64 = 1.0; pub const MINUTE_PER_SECOND: f64 = 60.0 * Timestep::REALTIME; pub const HOUR_PER_SECOND: f64 = 60.0 * Timestep::MINUTE_PER_SECOND; pub const DAY_PER_SECOND: f64 = Timestep::HOUR_PER_SECOND * 24.0; pub fn new(rate: f64, paused: bool) -> Self { Self::new_with_substeps(rate, paused, 1) } pub fn new_with_substeps(rate: f64, paused: bool, substeps: usize) -> Self { assert_ne!(substeps, 0, "must be a positive amount of substeps"); Timestep { current: rate, substeps, current_frame_time: 0.00001, paused, } } #[allow(dead_code)] pub const fn real_time(paused: bool) -> Self { Timestep { current: Self::REALTIME, substeps: 5, current_frame_time: 0.00001, paused, } } } const SPEED_OF_LIGHT: f64 = 299792458.0; const SQUARE_SOL: f64 = SPEED_OF_LIGHT * SPEED_OF_LIGHT; const SQUARE_SOL_RECIP: f64 = 1.0 / SQUARE_SOL; const GRAV: f64 = 6.6743015e-11; pub struct GravityPlugin; impl Plugin for GravityPlugin { fn build(&self, app: &mut AppBuilder) { app .add_resource(Timestep::default()) .add_resource(TransformScale::default()) .add_system(eih_integrate_position.system()); } } type Q<'a, 'b> = Query<'b, ( Entity, &'a mut NewLinAccel, &'a Mass, &'a mut Position, &'a mut Velocity, &'a mut LinAccel, &'a mut Transform, )>; fn eih_integrate_position( mut timestep: ResMut<Timestep>, mut time: ResMut<Time>, tfs: Res<TransformScale>, mut q: Q, ) { if timestep.paused { return; } let substeps = timestep.substeps; let time = &mut *time; let timestep = &mut *timestep; let real_rate = timestep.current; timestep.current /= substeps as f64; center_and_set_positions(tfs.0, &mut q); for _ in 0..substeps { calculate_newaccel_eih(&q); integrate_accel(&mut q, time, timestep); center_and_set_positions(tfs.0, &mut q); } timestep.current = real_rate; } fn calculate_newaccel_eih(q: &Q) { for ( id0, mut newaccel, _, pos0, vel0, _, _, ) in unsafe { q.iter_unsafe() } { let [mut sum0, mut sum1, mut sum2, mut sum3] = [DVec3::new(0.0, 0.0, 0.0); 4]; let (pos0, vel0) = (pos0.current, vel0.0); for ( id1, _, &Mass(mass1), pos1, vel1, accel1, _, ) in unsafe { q.iter_unsafe() } { if id0 == id1 { continue; } let (pos1, vel1, accel1) = (pos1.current, vel1.0, accel1.0); let pos0spos1 = pos0 - pos1; let distsq01 = pos0spos1.mag_sq(); let distsq01rec = distsq01.recip(); let dist01 = distsq01.sqrt(); let dist01rec = dist01.recip(); let norm01 = pos0spos1 * dist01rec; let norm10: DVec3 = -norm01; let grav_mass1 = GRAV * mass1; let grm1divdistsq01 = grav_mass1 * distsq01rec; let grm1divdist01 = grav_mass1 * dist01rec; sum0 = norm10.mul_add(DVec3::broadcast(grm1divdistsq01), sum0); sum2 = (vel0 - vel1).mul_add(DVec3::broadcast( grm1divdistsq01 * norm01.dot(vel0.mul_add(DVec3::broadcast(4.0), -3.0 * vel1))), sum2, ); sum3 = accel1.mul_add(DVec3::broadcast(grm1divdist01), sum3); let mut temp_sum1_0: f64 = 0.0; let mut temp_sum1_1: f64 = 0.0; for (id2, _, &Mass(mass2), pos2, _, _, _) in unsafe { q.iter_unsafe() } { let pos2 = pos2.current; if id2 != id0 { temp_sum1_0 = mass2.mul_add((pos2 - pos0).mag().recip(), temp_sum1_0); } if id2 != id1 { temp_sum1_1 = mass2.mul_add((pos2 - pos1).mag().recip(), temp_sum1_1); } } sum1 = norm10.mul_add( DVec3::broadcast(grm1divdistsq01 * (-pos0spos1).dot(accel1).mul_add( 0.5, temp_sum1_1.mul_add(-GRAV, temp_sum1_0.mul_add( -4.0 * GRAV, norm01.dot(vel0).powi(2).mul_add(-1.5, vel0.dot(vel1).mul_add( -4.0, vel1.dot(vel1).mul_add(2.0, vel0.dot(vel0)), )) )) )), sum1, ); } newaccel.0 = sum0 + sum1 * SQUARE_SOL_RECIP + sum2 * SQUARE_SOL_RECIP + sum3 * (3.5 * SQUARE_SOL_RECIP); } } fn integrate_accel(q: &mut Q, time: &Time, timestep: &mut Timestep) { let old_time = timestep.current_frame_time; timestep.current_frame_time = time.delta_seconds_f64(); let dt = timestep.current * timestep.current_frame_time; let br0 = DVec3::broadcast(dt); let br1 = DVec3::broadcast(dt * dt); let br2 = (timestep.current * old_time).recip(); for (_, newaccel, _, mut pos, mut vel, mut accel, _) in q.iter_mut() { if pos.current == pos.previous && (vel.0 != DVec3::zero() || accel.0 != DVec3::zero()) { vel.0 = accel.0.mul_add(br0, vel.0); pos.previous = pos.current; pos.current = vel.0.mul_add(br0, pos.current); } else { let diff = pos.current - pos.previous; vel.0 = diff * br2; pos.previous = pos.current; pos.current = vel.0.mul_add(br0, accel.0.mul_add(br1, pos.current)); } accel.0 = newaccel.0; } } fn center_and_set_positions(tfs: f64, q: &mut Q) { #[cold] fn fail() { println!("Warning: failed to center and set positions within {} iters", MAX_INIT); } let mut delta; let mut total_mass = 0.0; let max = q .iter_mut() .max_by(|l, r| l.2.partial_cmp(&r.2).unwrap_or(std::cmp::Ordering::Equal)); if let Some((_, _, _, base_pos, ..)) = max { delta = base_pos.current; } else { return; } for (_, _, &Mass(mass), mut pos, .., mut tf) in q.iter_mut() { pos.previous -= delta; pos.current -= delta; tf.translation = Vec3::new( (pos.current.x * tfs) as f32, (pos.current.y * tfs) as f32, (pos.current.z * tfs) as f32, ); total_mass += mass; } const MAX_INIT: u32 = 500; let mut iters_left = MAX_INIT; while iters_left > 0 { for (_, _, &Mass(mass), pos, ..) in q.iter_mut() { delta = pos.current.mul_add(DVec3::broadcast(mass), delta); } delta /= total_mass; if delta.mag_sq() >= 1e-3 { for (_, _, _, mut pos, .., mut tf) in q.iter_mut() { pos.previous -= delta; pos.current -= delta; tf.translation = Vec3::new( (pos.current.x * tfs) as f32, (pos.current.y * tfs) as f32, (pos.current.z * tfs) as f32, ); } iters_left -= 1; } else { return; } } fail() }
use bevy::prelude::*; use ultraviolet::DVec3; pub struct TransformScale(pub f64); impl Default for TransformScale { fn default() -> Self { TransformScale(1e-8) } } #[derive(Clone, Copy, PartialEq, Debug, Default, PartialOrd)] pub struct Mass(pub f64); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct Position { pub current: DVec3, pub previous: DVec3, } impl Position { pub const fn new(x: f64, y: f64, z: f64) -> Self { let pos = DVec3::new(x, y, z); Position { current: pos, previous: pos } } } #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct Velocity(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct LinAccel(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug, Default)] pub struct NewLinAccel(pub DVec3); #[derive(Clone, Copy, PartialEq, Debug)] pub struct Timestep { pub current: f64, pub substeps: usize, current_frame_time: f64, pub paused: bool, } impl Default for Timestep { fn default() -> Self { Self::new(Timestep::DAY_PER_SECOND, true) } } impl Timestep { pub const REALTIME: f64 = 1.0; pub const MINUTE_PER_SECOND: f64 = 60.0 * Timestep::REALTIME; pub const HOUR_PER_SECOND: f64 = 60.0 * Timestep::MINUTE_PER_SECOND; pub const DAY_PER_SECOND: f64 = Timestep::HOUR_PER_SECOND * 24.0; pub fn new(rate: f64, paused: bool) -> Self { Self::new_with_substeps(rate, paused, 1) } pub fn new_with_substeps(rate: f64, paused: bool, substep
, paused, } } #[allow(dead_code)] pub const fn real_time(paused: bool) -> Self { Timestep { current: Self::REALTIME, substeps: 5, current_frame_time: 0.00001, paused, } } } const SPEED_OF_LIGHT: f64 = 299792458.0; const SQUARE_SOL: f64 = SPEED_OF_LIGHT * SPEED_OF_LIGHT; const SQUARE_SOL_RECIP: f64 = 1.0 / SQUARE_SOL; const GRAV: f64 = 6.6743015e-11; pub struct GravityPlugin; impl Plugin for GravityPlugin { fn build(&self, app: &mut AppBuilder) { app .add_resource(Timestep::default()) .add_resource(TransformScale::default()) .add_system(eih_integrate_position.system()); } } type Q<'a, 'b> = Query<'b, ( Entity, &'a mut NewLinAccel, &'a Mass, &'a mut Position, &'a mut Velocity, &'a mut LinAccel, &'a mut Transform, )>; fn eih_integrate_position( mut timestep: ResMut<Timestep>, mut time: ResMut<Time>, tfs: Res<TransformScale>, mut q: Q, ) { if timestep.paused { return; } let substeps = timestep.substeps; let time = &mut *time; let timestep = &mut *timestep; let real_rate = timestep.current; timestep.current /= substeps as f64; center_and_set_positions(tfs.0, &mut q); for _ in 0..substeps { calculate_newaccel_eih(&q); integrate_accel(&mut q, time, timestep); center_and_set_positions(tfs.0, &mut q); } timestep.current = real_rate; } fn calculate_newaccel_eih(q: &Q) { for ( id0, mut newaccel, _, pos0, vel0, _, _, ) in unsafe { q.iter_unsafe() } { let [mut sum0, mut sum1, mut sum2, mut sum3] = [DVec3::new(0.0, 0.0, 0.0); 4]; let (pos0, vel0) = (pos0.current, vel0.0); for ( id1, _, &Mass(mass1), pos1, vel1, accel1, _, ) in unsafe { q.iter_unsafe() } { if id0 == id1 { continue; } let (pos1, vel1, accel1) = (pos1.current, vel1.0, accel1.0); let pos0spos1 = pos0 - pos1; let distsq01 = pos0spos1.mag_sq(); let distsq01rec = distsq01.recip(); let dist01 = distsq01.sqrt(); let dist01rec = dist01.recip(); let norm01 = pos0spos1 * dist01rec; let norm10: DVec3 = -norm01; let grav_mass1 = GRAV * mass1; let grm1divdistsq01 = grav_mass1 * distsq01rec; let grm1divdist01 = grav_mass1 * dist01rec; sum0 = norm10.mul_add(DVec3::broadcast(grm1divdistsq01), sum0); sum2 = (vel0 - vel1).mul_add(DVec3::broadcast( grm1divdistsq01 * norm01.dot(vel0.mul_add(DVec3::broadcast(4.0), -3.0 * vel1))), sum2, ); sum3 = accel1.mul_add(DVec3::broadcast(grm1divdist01), sum3); let mut temp_sum1_0: f64 = 0.0; let mut temp_sum1_1: f64 = 0.0; for (id2, _, &Mass(mass2), pos2, _, _, _) in unsafe { q.iter_unsafe() } { let pos2 = pos2.current; if id2 != id0 { temp_sum1_0 = mass2.mul_add((pos2 - pos0).mag().recip(), temp_sum1_0); } if id2 != id1 { temp_sum1_1 = mass2.mul_add((pos2 - pos1).mag().recip(), temp_sum1_1); } } sum1 = norm10.mul_add( DVec3::broadcast(grm1divdistsq01 * (-pos0spos1).dot(accel1).mul_add( 0.5, temp_sum1_1.mul_add(-GRAV, temp_sum1_0.mul_add( -4.0 * GRAV, norm01.dot(vel0).powi(2).mul_add(-1.5, vel0.dot(vel1).mul_add( -4.0, vel1.dot(vel1).mul_add(2.0, vel0.dot(vel0)), )) )) )), sum1, ); } newaccel.0 = sum0 + sum1 * SQUARE_SOL_RECIP + sum2 * SQUARE_SOL_RECIP + sum3 * (3.5 * SQUARE_SOL_RECIP); } } fn integrate_accel(q: &mut Q, time: &Time, timestep: &mut Timestep) { let old_time = timestep.current_frame_time; timestep.current_frame_time = time.delta_seconds_f64(); let dt = timestep.current * timestep.current_frame_time; let br0 = DVec3::broadcast(dt); let br1 = DVec3::broadcast(dt * dt); let br2 = (timestep.current * old_time).recip(); for (_, newaccel, _, mut pos, mut vel, mut accel, _) in q.iter_mut() { if pos.current == pos.previous && (vel.0 != DVec3::zero() || accel.0 != DVec3::zero()) { vel.0 = accel.0.mul_add(br0, vel.0); pos.previous = pos.current; pos.current = vel.0.mul_add(br0, pos.current); } else { let diff = pos.current - pos.previous; vel.0 = diff * br2; pos.previous = pos.current; pos.current = vel.0.mul_add(br0, accel.0.mul_add(br1, pos.current)); } accel.0 = newaccel.0; } } fn center_and_set_positions(tfs: f64, q: &mut Q) { #[cold] fn fail() { println!("Warning: failed to center and set positions within {} iters", MAX_INIT); } let mut delta; let mut total_mass = 0.0; let max = q .iter_mut() .max_by(|l, r| l.2.partial_cmp(&r.2).unwrap_or(std::cmp::Ordering::Equal)); if let Some((_, _, _, base_pos, ..)) = max { delta = base_pos.current; } else { return; } for (_, _, &Mass(mass), mut pos, .., mut tf) in q.iter_mut() { pos.previous -= delta; pos.current -= delta; tf.translation = Vec3::new( (pos.current.x * tfs) as f32, (pos.current.y * tfs) as f32, (pos.current.z * tfs) as f32, ); total_mass += mass; } const MAX_INIT: u32 = 500; let mut iters_left = MAX_INIT; while iters_left > 0 { for (_, _, &Mass(mass), pos, ..) in q.iter_mut() { delta = pos.current.mul_add(DVec3::broadcast(mass), delta); } delta /= total_mass; if delta.mag_sq() >= 1e-3 { for (_, _, _, mut pos, .., mut tf) in q.iter_mut() { pos.previous -= delta; pos.current -= delta; tf.translation = Vec3::new( (pos.current.x * tfs) as f32, (pos.current.y * tfs) as f32, (pos.current.z * tfs) as f32, ); } iters_left -= 1; } else { return; } } fail() }
s: usize) -> Self { assert_ne!(substeps, 0, "must be a positive amount of substeps"); Timestep { current: rate, substeps, current_frame_time: 0.00001
function_block-random_span
[ { "content": "/// Get a new unique color, specified by RGB components.\n\npub fn new_color() -> [f32; 3] {\n\n COLORS[COUNTER.fetch_add(1, Ordering::Relaxed) % COLORS.len()]\n\n}\n", "file_path": "src/scene/newcolor.rs", "rank": 1, "score": 51750.93467058338 }, { "content": "fn unpause_after_3s(time: Res<Time>, mut step: ResMut<Timestep>) {\n\n if step.paused && time.seconds_since_startup() >= 3.0 {\n\n println!(\"Unpaused!\");\n\n step.paused = false;\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 4, "score": 40692.874042412615 }, { "content": "struct FpsText;\n\n\n", "file_path": "src/fps.rs", "rank": 5, "score": 34363.71936544726 }, { "content": "#[derive(Default)]\n\nstruct SceneLoader;\n\n\n\nimpl AssetLoader for SceneLoader {\n\n fn load<'a>(&'a self, bytes: &'a [u8], load_context: &'a mut LoadContext)\n\n -> BoxedFuture<'a, Result<(), anyhow::Error>>\n\n {\n\n Box::pin(async move {\n\n load_context.set_default_asset(LoadedAsset::new(\n\n ron::from_str::<'_, Scene>(std::str::from_utf8(bytes)?)?\n\n ));\n\n Ok(())\n\n })\n\n }\n\n\n\n fn extensions(&self) -> &[&str] {\n\n &[\"grav\"]\n\n }\n\n}\n\n\n", "file_path": "src/scene/mod.rs", "rank": 6, "score": 33263.095292403224 }, { "content": "/// Stage to automatically `.grav` scene assets.\n\nstruct ImportScenes;\n\n\n\nimpl Stage for ImportScenes {\n\n fn initialize(&mut self, world: &mut World, resources: &mut Resources) {\n\n // get the meshes, for the body spheres\n\n let mut meshes = resources.get_mut::<Assets<Mesh>>().unwrap();\n\n // get the materials resources, for colors\n\n let mut materials = resources.get_mut::<Assets<StandardMaterial>>().unwrap();\n\n // get the transform scale\n\n let scale = resources.get::<TransformScale>().unwrap().0;\n\n // load all scenes from the asset server\n\n let scenes = resources.get::<Assets<Scene>>().unwrap();\n\n // an iterator over all decomposed bodies in all scenes\n\n let iter = scenes\n\n .iter()\n\n .flat_map(|(_, scene)| scene.0.iter())\n\n .map(|b| Body::decompose(b, scale));\n\n \n\n for (body_data, (radius, color, transform)) in iter {\n\n // add a new color material\n", "file_path": "src/scene/mod.rs", "rank": 7, "score": 33260.53831539894 }, { "content": "#[bevy_main]\n\nfn main() {\n\n App::build()\n\n .add_plugins(DefaultPlugins)\n\n .add_plugin(GravityPlugin)\n\n .add_plugin(FPSPlugin)\n\n .add_plugin(GravScenePlugin)\n\n //.add_resource(Msaa { samples: 4 })\n\n .add_startup_system(add_bodies.system())\n\n .add_system(unpause_after_3s.system())\n\n //.add_system(print_positions.system())\n\n .run()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 30769.984709660013 }, { "content": "fn add_bodies(\n\n commands: &mut Commands,\n\n mut meshes: ResMut<Assets<Mesh>>,\n\n mut materials: ResMut<Assets<StandardMaterial>>,\n\n mut time: ResMut<Timestep>,\n\n scale: ResMut<TransformScale>,\n\n asset_server: Res<AssetServer>,\n\n) {\n\n const EARTH_MASS: f64 = 5.972e24;\n\n const EARTH_RADIUS: f64 = 6_371_009.0;\n\n const LUNAR_MASS: f64 = 7.3459e22;\n\n const LUNAR_RADIUS: f64 = 1_737_400.0;\n\n const LUNAR_APOGEE: f64 = 405_700_000.0;\n\n\n\n asset_server.load::<GravScene, _>(\"scenes/many.grav\");\n\n\n\n *time = Timestep::new_with_substeps(Timestep::DAY_PER_SECOND, false, 500);\n\n let scale = scale.0;\n\n commands\n\n .spawn((\n", "file_path": "src/main.rs", "rank": 9, "score": 29569.991390476076 }, { "content": "fn setup(commands: &mut Commands, asset_server: Res<AssetServer>) {\n\n commands\n\n // UI camera\n\n .spawn(CameraUiBundle::default())\n\n // texture\n\n .spawn(TextBundle {\n\n style: Style {\n\n align_self: AlignSelf::FlexEnd,\n\n ..Default::default()\n\n },\n\n text: Text {\n\n value: \"FPS:\".to_string(),\n\n font: asset_server.load(\"fonts/firasans/FiraSans-Bold.ttf\"),\n\n style: TextStyle {\n\n font_size: 60.0,\n\n color: Color::WHITE,\n\n ..Default::default()\n\n },\n\n },\n\n ..Default::default()\n\n })\n\n .with(FpsText);\n\n}\n", "file_path": "src/fps.rs", "rank": 11, "score": 19276.324590545108 }, { "content": "fn text_update_system(diagnostics: Res<Diagnostics>, mut query: Query<&mut Text, With<FpsText>>) {\n\n for mut text in query.iter_mut() {\n\n if let Some(fps) = diagnostics.get(FrameTimeDiagnosticsPlugin::FPS) {\n\n if let Some(average) = fps.average() {\n\n text.value = format!(\"FPS: {:.2}\", average);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/fps.rs", "rank": 12, "score": 15963.399260382117 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy)]\n\npub struct Position {\n\n pub x: Distance,\n\n pub y: Distance,\n\n pub z: Distance,\n\n}\n\n\n\nimpl Default for Position {\n\n fn default() -> Self {\n\n Position {\n\n x: Distance::m(0.0),\n\n y: Distance::m(0.0),\n\n z: Distance::m(0.0),\n\n }\n\n }\n\n}\n", "file_path": "src/scene/units.rs", "rank": 18, "score": 11.646795173695642 }, { "content": "}\n\n\n\nimpl Default for Time {\n\n fn default() -> Self {\n\n Time::seconds(1.0)\n\n }\n\n}\n\n\n\nimpl Time {\n\n pub fn to_seconds(self) -> f64 {\n\n use Time::*;\n\n\n\n match self {\n\n ms(x) => x * 0.001,\n\n s(x) | seconds(x) => x,\n\n min(x) | minutes(x) => x * 60.0,\n\n hr(x) | hours(x) => x * 360.0,\n\n days(x) => x * 86400.0,\n\n weeks(x) => x * 604800.0,\n\n years(x) => x * 3.154e7,\n", "file_path": "src/scene/units.rs", "rank": 19, "score": 9.833238575428014 }, { "content": "}\n\n\n\nimpl Default for Velocity {\n\n fn default() -> Self {\n\n Velocity {\n\n change: Position::nonzero(),\n\n per: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl Velocity {\n\n pub fn to_mps(self) -> DVec3 {\n\n self.change.to_meters() / self.per.to_seconds()\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy)]\n\npub struct Body {\n\n #[serde(default)]\n\n pub position: Position,\n\n pub mass: Mass,\n\n #[serde(default)]\n\n pub velocity: Velocity,\n\n pub radius: Distance,\n\n #[serde(default = \"super::new_color\")]\n\n pub color: [f32; 3],\n\n}\n\n\n", "file_path": "src/scene/units.rs", "rank": 20, "score": 9.734813384915606 }, { "content": "\n\nimpl Position {\n\n pub fn to_meters(self) -> DVec3 {\n\n DVec3::new(self.x.to_meters(), self.y.to_meters(), self.z.to_meters())\n\n }\n\n\n\n const fn nonzero() -> Position {\n\n Position {\n\n x: Distance::mm(1.0),\n\n y: Distance::mm(1.0),\n\n z: Distance::mm(1.0),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy)]\n\npub struct Velocity {\n\n pub change: Position,\n\n #[serde(default)]\n\n pub per: Time,\n", "file_path": "src/scene/units.rs", "rank": 21, "score": 7.337466418178282 }, { "content": "use bevy::prelude::{Transform, Color, Vec3};\n\nuse bevy::reflect::TypeUuid;\n\nuse serde::{Deserialize, Serialize};\n\nuse ultraviolet::DVec3;\n\nuse crate::body;\n\n\n\n#[allow(non_camel_case_types)]\n\n#[derive(Serialize, Deserialize, Clone, Copy)]\n\npub enum Distance {\n\n mm(f64),\n\n cm(f64),\n\n r#in(f64),\n\n inches(f64),\n\n ft(f64),\n\n feet(f64),\n\n yd(f64),\n\n yards(f64),\n\n m(f64),\n\n meters(f64),\n\n km(f64),\n", "file_path": "src/scene/units.rs", "rank": 22, "score": 7.260219930046377 }, { "content": " earth_masses(f64),\n\n jupiter_masses(f64),\n\n jovian_masses(f64),\n\n solar_masses(f64),\n\n}\n\n\n\nimpl Mass {\n\n pub fn to_kg(self) -> f64 {\n\n use Mass::*;\n\n\n\n match self {\n\n g(x) | grams(x) => x * 0.001,\n\n oz(x) | ounces(x) => x * 0.0283495,\n\n lb(x) | pounds(x) => x * 0.453592,\n\n kg(x) | kilograms(x) => x,\n\n t(x) | ton(x) | tons(x) | metric_tons(x) => x * 1000.0,\n\n lunar_masses(x) => x * 7.342e22,\n\n earth_masses(x) => x * 5.9722e24,\n\n jupiter_masses(x) | jovian_masses(x) => x * 1.89813e27,\n\n solar_masses(x) => x * 1.98847e30,\n", "file_path": "src/scene/units.rs", "rank": 25, "score": 5.859379750808837 }, { "content": " }\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\n#[derive(Serialize, Deserialize, Clone, Copy)]\n\npub enum Mass {\n\n g(f64),\n\n grams(f64),\n\n oz(f64),\n\n ounces(f64),\n\n lb(f64),\n\n pounds(f64),\n\n kg(f64),\n\n kilograms(f64),\n\n t(f64),\n\n ton(f64),\n\n tons(f64),\n\n metric_tons(f64),\n\n lunar_masses(f64),\n", "file_path": "src/scene/units.rs", "rank": 26, "score": 5.556865612326973 }, { "content": " light_days(x) => x * 2.59020683712e13,\n\n light_years(x) => x * 9.4607304725808e15,\n\n pc(x) | parsecs(x) => x * 3.085677581491367278913937957796472e16,\n\n }\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\n#[derive(Serialize, Deserialize, Clone, Copy)]\n\npub enum Time {\n\n ms(f64),\n\n s(f64),\n\n seconds(f64),\n\n min(f64),\n\n minutes(f64),\n\n hr(f64),\n\n hours(f64),\n\n days(f64),\n\n weeks(f64),\n\n years(f64),\n", "file_path": "src/scene/units.rs", "rank": 27, "score": 5.359615950367138 }, { "content": " miles(f64),\n\n lunar_radii(f64),\n\n earth_radii(f64),\n\n jupiter_radii(f64),\n\n jovian_radii(f64),\n\n light_seconds(f64),\n\n ld(f64),\n\n lunar_distances(f64),\n\n solar_radii(f64),\n\n light_minutes(f64),\n\n au(f64),\n\n light_hours(f64),\n\n light_days(f64),\n\n light_years(f64),\n\n pc(f64),\n\n parsecs(f64),\n\n}\n\n\n\nimpl Distance {\n\n pub fn to_meters(self) -> f64 {\n", "file_path": "src/scene/units.rs", "rank": 28, "score": 5.26881895825113 }, { "content": "use bevy::prelude::*;\n\nuse bevy::ecs::Stage;\n\nuse bevy::app::startup_stage::POST_STARTUP;\n\nuse bevy::asset::{LoadContext, AssetLoader, LoadedAsset, Assets};\n\nuse bevy::utils::BoxedFuture;\n\n\n\nmod units;\n\nmod newcolor;\n\nuse newcolor::new_color;\n\nuse units::{*, Scene};\n\nuse crate::body::TransformScale;\n\npub use units::Scene as GravScene;\n\n\n\n/// Asset loader for `.grav` scenes.\n\n#[derive(Default)]\n", "file_path": "src/scene/mod.rs", "rank": 29, "score": 4.668076000431325 }, { "content": "use bevy::prelude::*;\n\nuse bevy::diagnostic::{Diagnostics, FrameTimeDiagnosticsPlugin};\n\n\n\npub struct FPSPlugin;\n\n\n\nimpl Plugin for FPSPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app\n\n .add_plugin(FrameTimeDiagnosticsPlugin::default())\n\n .add_startup_system(setup.system())\n\n .add_system(text_update_system.system());\n\n }\n\n}\n\n\n", "file_path": "src/fps.rs", "rank": 31, "score": 4.096397109302247 }, { "content": "use std::sync::atomic::{AtomicUsize, Ordering};\n\n\n\nstatic COUNTER: AtomicUsize = AtomicUsize::new(0);\n\nstatic COLORS: [[f32; 3]; 64] = include!(\"newcolor_colors\");\n\n\n\n/// Get a new unique color, specified by RGB components.\n", "file_path": "src/scene/newcolor.rs", "rank": 32, "score": 4.083831016851249 }, { "content": " body::Velocity(self.velocity.to_mps()),\n\n ), (\n\n self.radius.to_meters(),\n\n Color::rgb(self.color[0], self.color[1], self.color[2]),\n\n Transform::from_translation(Vec3::new(\n\n (position.x * tfs) as f32,\n\n (position.y * tfs) as f32,\n\n (position.z * tfs) as f32,\n\n ))\n\n ))\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, TypeUuid)]\n\n#[uuid = \"99652da1-11d6-44fe-94cb-91b4271b5cc8\"]\n\npub struct Scene(pub Vec<Body>);\n", "file_path": "src/scene/units.rs", "rank": 33, "score": 3.8203483422071867 }, { "content": "\n\n/// Plugin that loads and imports all `.grav` assets into the universe.\n\n#[derive(Default)]\n\npub struct GravScenePlugin;\n\n\n\nimpl Plugin for GravScenePlugin {\n\n fn build(&self, app: &mut bevy::prelude::AppBuilder) {\n\n app\n\n .add_asset::<Scene>()\n\n .add_asset_loader(SceneLoader)\n\n .add_startup_stage_before(POST_STARTUP, IMPORT_SCENES_STAGE, ImportScenes);\n\n }\n\n}\n\n\n\npub const IMPORT_SCENES_STAGE: &str = \"import scene stage\";\n", "file_path": "src/scene/mod.rs", "rank": 34, "score": 3.668605338781455 }, { "content": "use bevy::prelude::*;\n\nuse ultraviolet::DVec3;\n\n\n\nmod body;\n\nmod fps;\n\nmod scene;\n\nuse body::*;\n\nuse fps::*;\n\nuse scene::*;\n\n\n\n#[bevy_main]\n", "file_path": "src/main.rs", "rank": 38, "score": 2.874782497895442 }, { "content": " Mass(EARTH_MASS),\n\n Position::new(0.0, 0.0, 0.0),\n\n Velocity::default(),\n\n LinAccel::default(),\n\n NewLinAccel::default(),\n\n ))\n\n .with_bundle(PbrBundle {\n\n mesh: meshes.add(Mesh::from(shape::Icosphere { radius: (EARTH_RADIUS * scale) as f32, subdivisions: 30 })),\n\n material: materials.add(Color::BLUE.into()),\n\n transform: Transform::from_translation(Vec3::zero()),\n\n ..Default::default()\n\n })\n\n .with_children(|cb| { cb\n\n .spawn(Camera3dBundle {\n\n transform: Transform::from_translation(Vec3::new(6.0, 2.0, 6.0))\n\n .looking_at(Vec3::zero(), Vec3::unit_y()),\n\n ..Default::default()\n\n });\n\n })\n\n .spawn((\n", "file_path": "src/main.rs", "rank": 40, "score": 2.4373256321193817 }, { "content": " Mass(LUNAR_MASS),\n\n Position::new(LUNAR_APOGEE, 0.0, 0.0),\n\n Velocity(DVec3::new(0.0, 0.0, 970.0)),\n\n LinAccel::default(),\n\n NewLinAccel::default(),\n\n ))\n\n .with_bundle(PbrBundle {\n\n mesh: meshes.add(Mesh::from(shape::Icosphere { radius: (LUNAR_RADIUS * scale) as f32, subdivisions: 30 })),\n\n material: materials.add(Color::WHITE.into()),\n\n transform: Transform::from_translation(Vec3::new((LUNAR_APOGEE * scale) as f32, 0.0, 0.0)),\n\n ..Default::default()\n\n });\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 42, "score": 2.1431362411674466 }, { "content": " use Distance::*;\n\n\n\n match self {\n\n mm(x) => x * 0.001,\n\n cm(x) => x * 0.01,\n\n r#in(x) | inches(x) => x * 0.0254,\n\n ft(x) | feet(x) => x * 0.3048,\n\n yd(x) | yards(x) => x * 0.9144,\n\n m(x) | meters(x) => x,\n\n km(x) => x * 1000.0,\n\n miles(x) => x * 1609.344,\n\n lunar_radii(x) => x * 1_737_400.0,\n\n earth_radii(x) => x * 6.371e6,\n\n jupiter_radii(x) | jovian_radii(x) => x * 7.1492e7,\n\n light_seconds(x) => x * 2.99792458e8,\n\n ld(x) | lunar_distances(x) => x * 397126e3,\n\n solar_radii(x) => x * 6.957e8,\n\n light_minutes(x) => x * 1.798754748e10,\n\n au(x) => x * 1.495978707e11,\n\n light_hours(x) => x * 1.0792528488e12,\n", "file_path": "src/scene/units.rs", "rank": 43, "score": 1.2036826550830186 }, { "content": " let material = materials.add(color.into());\n\n world\n\n .build()\n\n // add the body bundle\n\n .spawn(body_data)\n\n // add a mesh bundle for the sphere\n\n .with_bundle(PbrBundle {\n\n mesh: meshes.add(Mesh::from(shape::Icosphere {\n\n radius: (radius * scale) as f32,\n\n subdivisions: 10,\n\n })),\n\n material,\n\n transform,\n\n ..Default::default()\n\n });\n\n }\n\n }\n\n\n\n fn run(&mut self, _world: &mut World, _resources: &mut Resources) {}\n\n}\n", "file_path": "src/scene/mod.rs", "rank": 44, "score": 1.187566597413332 }, { "content": "# gravity\n\n\n\nAn n-body gravity simulator using a first order approximation of general relativity.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0\n\n ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license\n\n ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n", "file_path": "README.md", "rank": 45, "score": 0.9780462580534728 } ]
Rust
src/ir/operator.rs
dan-zheng/telamon
de463284fdcea70ce29cf43a9c62f3aa2da14276
use self::Operator::*; use crate::ir::{self, AccessPattern, LoweringMap, Operand, Type}; use fxhash::FxHashSet; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::{self, fmt}; #[derive(Clone, Copy, PartialEq, Eq, Debug, Serialize, Deserialize)] #[repr(C)] pub enum Rounding { Exact, Nearest, Zero, Positive, Negative, } impl std::fmt::Display for Rounding { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let name = match self { Rounding::Exact => "exact", Rounding::Nearest => "toward nearest", Rounding::Zero => "toward zero", Rounding::Positive => "toward +inf", Rounding::Negative => "toward -inf", }; write!(f, "{}", name) } } impl Rounding { fn check(self, t: ir::Type) -> Result<(), ir::TypeError> { if t.is_float() ^ (self == Rounding::Exact) { Ok(()) } else { Err(ir::TypeError::InvalidRounding { rounding: self, t }) } } } #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[repr(C)] pub enum BinOp { Add, Sub, Div, And, Or, Lt, Leq, Equals, Max, } impl fmt::Display for BinOp { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(self.name()) } } impl BinOp { fn name(self) -> &'static str { match self { BinOp::Add => "add", BinOp::Sub => "sub", BinOp::Div => "div", BinOp::And => "and", BinOp::Or => "or", BinOp::Lt => "lt", BinOp::Leq => "leq", BinOp::Equals => "equals", BinOp::Max => "max", } } pub fn t(self, operand_type: ir::Type) -> ir::Type { match self { BinOp::Lt | BinOp::Leq | BinOp::Equals => ir::Type::I(1), _ => operand_type, } } fn requires_rounding(self) -> bool { match self { BinOp::Lt | BinOp::Leq | BinOp::Equals | BinOp::Max => false, _ => true, } } } #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[repr(C)] pub enum UnaryOp { Mov, Cast(ir::Type), Exp(ir::Type), } impl fmt::Display for UnaryOp { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self { UnaryOp::Exp(..) => fmt.write_str("exp"), UnaryOp::Mov => fmt.write_str("mov"), UnaryOp::Cast(t) => write!(fmt, "cast({})", t), } } } impl UnaryOp { fn t(self, op_type: ir::Type) -> ir::Type { match self { UnaryOp::Mov | UnaryOp::Exp(..) => op_type, UnaryOp::Cast(t) => t, } } } #[derive(Clone, Debug, Serialize, Deserialize)] pub enum Operator<L = LoweringMap> { BinOp(BinOp, Operand<L>, Operand<L>, Rounding), UnaryOp(UnaryOp, Operand<L>), Mul(Operand<L>, Operand<L>, Rounding, Type), Mad(Operand<L>, Operand<L>, Operand<L>, Rounding), Ld(Type, Operand<L>, AccessPattern), St(Operand<L>, Operand<L>, bool, AccessPattern), TmpLd(Type, ir::MemId), TmpSt(Operand<L>, ir::MemId), } impl<L> Operator<L> { pub fn check( &self, iter_dims: &FxHashSet<ir::DimId>, fun: &ir::Function<L>, ) -> Result<(), ir::Error> { self.t() .map(|t| fun.device().check_type(t)) .unwrap_or(Ok(()))?; for operand in self.operands() { fun.device().check_type(operand.t())?; if let Some(dim_map) = operand.mapped_dims() { for &(lhs, rhs) in dim_map { if fun.find_mapping(lhs, rhs).is_none() { Err(ir::Error::MissingDimMapping { lhs, rhs })?; } } } } match *self { BinOp(operator, ref lhs, ref rhs, rounding) => { if operator.requires_rounding() { rounding.check(lhs.t())?; } else if rounding != Rounding::Exact { Err(ir::TypeError::InvalidRounding { rounding, t: lhs.t(), })?; } ir::TypeError::check_equals(lhs.t(), rhs.t())?; } Mul(ref lhs, ref rhs, rounding, res_type) => { rounding.check(lhs.t())?; ir::TypeError::check_equals(lhs.t(), rhs.t())?; match (lhs.t(), res_type) { (x, z) if x == z => (), (Type::I(32), Type::I(64)) | (Type::I(32), Type::PtrTo(_)) => (), (_, t) => Err(ir::TypeError::UnexpectedType { t })?, } } Mad(ref mul_lhs, ref mul_rhs, ref add_rhs, rounding) => { rounding.check(mul_lhs.t())?; ir::TypeError::check_equals(mul_lhs.t(), mul_rhs.t())?; match (mul_lhs.t(), add_rhs.t()) { (ref x, ref z) if x == z => (), (Type::I(32), Type::I(64)) | (Type::I(32), Type::PtrTo(_)) => (), (_, t) => Err(ir::TypeError::UnexpectedType { t })?, } } Ld(_, ref addr, ref pattern) => { pattern.check(iter_dims)?; let pointer_type = pattern.pointer_type(fun.device()); ir::TypeError::check_equals(addr.t(), pointer_type)?; } St(ref addr, _, _, ref pattern) => { pattern.check(iter_dims)?; let pointer_type = pattern.pointer_type(fun.device()); ir::TypeError::check_equals(addr.t(), pointer_type)?; } TmpLd(..) | UnaryOp(..) | TmpSt(..) => (), } Ok(()) } pub fn t(&self) -> Option<Type> { match self { Mad(_, _, op, _) => Some(op.t()), Ld(t, ..) | TmpLd(t, _) | Mul(.., t) => Some(*t), BinOp(operator, lhs, ..) => Some(operator.t(lhs.t())), UnaryOp(operator, operand) => Some(operator.t(operand.t())), St(..) | TmpSt(..) => None, } } pub fn operands(&self) -> Vec<&Operand<L>> { match self { BinOp(_, lhs, rhs, _) | Mul(lhs, rhs, _, _) | St(lhs, rhs, _, _) => { vec![lhs, rhs] } Mad(mul_lhs, mul_rhs, add_rhs, _) => vec![mul_lhs, mul_rhs, add_rhs], UnaryOp(_, op) | Ld(_, op, _) | TmpSt(op, _) => vec![op], TmpLd(..) => vec![], } } pub fn operands_mut<'b>(&'b mut self) -> Vec<&'b mut Operand<L>> { match self { BinOp(_, lhs, rhs, _) | Mul(lhs, rhs, _, _) | St(lhs, rhs, _, _) => { vec![lhs, rhs] } Mad(mul_lhs, mul_rhs, add_rhs, _) => vec![mul_lhs, mul_rhs, add_rhs], UnaryOp(_, op, ..) | Ld(_, op, ..) | TmpSt(op, _) => vec![op], TmpLd(..) => vec![], } } pub fn has_side_effects(&self) -> bool { match self { St(_, _, b, _) => *b, BinOp(..) | UnaryOp(..) | Mul(..) | Mad(..) | Ld(..) | TmpLd(..) | TmpSt(..) => false, } } pub fn is_mem_access(&self) -> bool { match self { St(..) | Ld(..) | TmpSt(..) | TmpLd(..) => true, _ => false, } } pub fn merge_dims(&mut self, lhs: ir::DimId, rhs: ir::DimId) { self.operands_mut() .iter_mut() .for_each(|x| x.merge_dims(lhs, rhs)); } pub fn mem_access_pattern(&self) -> Option<Cow<AccessPattern>> { match *self { Ld(_, _, ref pattern) | St(_, _, _, ref pattern) => { Some(Cow::Borrowed(pattern)) } TmpLd(_, mem_id) | TmpSt(_, mem_id) => { Some(Cow::Owned(AccessPattern::Unknown(Some(mem_id)))) } _ => None, } } pub fn mem_used(&self) -> Option<ir::MemId> { self.mem_access_pattern().and_then(|p| p.mem_block()) } pub fn map_operands<T, F>(self, mut f: F) -> Operator<T> where F: FnMut(Operand<L>) -> Operand<T>, { match self { BinOp(op, oper1, oper2, rounding) => { let oper1 = f(oper1); let oper2 = f(oper2); BinOp(op, oper1, oper2, rounding) } UnaryOp(operator, operand) => UnaryOp(operator, f(operand)), Mul(oper1, oper2, rounding, t) => { let oper1 = f(oper1); let oper2 = f(oper2); Mul(oper1, oper2, rounding, t) } Mad(oper1, oper2, oper3, rounding) => { let oper1 = f(oper1); let oper2 = f(oper2); let oper3 = f(oper3); Mad(oper1, oper2, oper3, rounding) } Ld(t, oper1, ap) => { let oper1 = f(oper1); Ld(t, oper1, ap) } St(oper1, oper2, side_effects, ap) => { let oper1 = f(oper1); let oper2 = f(oper2); St(oper1, oper2, side_effects, ap) } TmpLd(t, id) => TmpLd(t, id), TmpSt(oper1, id) => { let oper1 = f(oper1); TmpSt(oper1, id) } } } } impl<L> ir::IrDisplay<L> for Operator<L> { fn fmt(&self, fmt: &mut fmt::Formatter, function: &ir::Function<L>) -> fmt::Result { match self { BinOp(op, lhs, rhs, _rnd) => write!( fmt, "{}({}, {})", op, lhs.display(function), rhs.display(function) ), UnaryOp(op, arg) => write!(fmt, "{}({})", op, arg.display(function)), Mul(lhs, rhs, _rnd, _t) => write!( fmt, "mul({}, {})", lhs.display(function), rhs.display(function) ), Mad(arg0, arg1, arg2, _rnd) => write!( fmt, "mad({}, {}, {})", arg0.display(function), arg1.display(function), arg2.display(function) ), Ld(_t, arg, _ap) => write!(fmt, "load({})", arg.display(function)), St(dst, src, _side_effects, _ap) => write!( fmt, "store({}, {})", dst.display(function), src.display(function) ), TmpLd(_t, mem) => write!(fmt, "load({})", mem), TmpSt(src, mem) => write!(fmt, "store({}, {})", mem, src.display(function)), } } } impl Operator<()> { pub fn freeze(self, cnt: &mut ir::Counter) -> Operator { self.map_operands(|oper| oper.freeze(cnt)) } } impl<L> std::fmt::Display for Operator<L> { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { match self { BinOp(op, lhs, rhs, rnd) => write!(fmt, "{}[{}]({}, {})", op, rnd, lhs, rhs), UnaryOp(op, arg) => write!(fmt, "{}({})", op, arg), Mul(lhs, rhs, rnd, t) => write!(fmt, "Mul<{}>[{}]({}, {})", t, rnd, lhs, rhs), Mad(arg0, arg1, arg2, rnd) => { write!(fmt, "Mad[{}]({}, {}, {})", rnd, arg0, arg1, arg2) } Ld(_t, arg, _ap) => write!(fmt, "Load({})", arg), St(dst, src, _side_effects, _ap) => write!(fmt, "Store({}, {})", dst, src), TmpLd(_t, mem) => write!(fmt, "TempLoad({})", mem), TmpSt(src, mem) => write!(fmt, "TempStore({}, {})", mem, src), } } }
use self::Operator::*; use crate::ir::{self, AccessPattern, LoweringMap, Operand, Type}; use fxhash::FxHashSet; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::{self, fmt}; #[derive(Clone, Copy, PartialEq, Eq, Debug, Serialize, Deserialize)] #[repr(C)] pub enum Rounding { Exact, Nearest, Zero, Positive, Negative, } impl std::fmt::Display for Rounding { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let name = match self { Rounding::Exact => "exact", Rounding::Nearest => "toward nearest", Rounding::Zero => "toward zero", Rounding::Positive => "toward +inf", Rounding::Negative => "toward -inf", }; write!(f, "{}", name) } } impl Rounding { fn check(self, t: ir::Type) -> Result<(), ir::TypeError> { if t.is_float() ^ (self == Rounding::Exact) { Ok(()) } else { Err(ir::TypeError::InvalidRounding { rounding: self, t }) } } } #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[repr(C)] pub enum BinOp { Add, Sub, Div, And, Or, Lt, Leq, Equals, Max, } impl fmt::Display for BinOp { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(self.name()) } } impl BinOp { fn name(self) -> &'static str { match self { BinOp::Add => "add", Bi
pub fn t(self, operand_type: ir::Type) -> ir::Type { match self { BinOp::Lt | BinOp::Leq | BinOp::Equals => ir::Type::I(1), _ => operand_type, } } fn requires_rounding(self) -> bool { match self { BinOp::Lt | BinOp::Leq | BinOp::Equals | BinOp::Max => false, _ => true, } } } #[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[repr(C)] pub enum UnaryOp { Mov, Cast(ir::Type), Exp(ir::Type), } impl fmt::Display for UnaryOp { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self { UnaryOp::Exp(..) => fmt.write_str("exp"), UnaryOp::Mov => fmt.write_str("mov"), UnaryOp::Cast(t) => write!(fmt, "cast({})", t), } } } impl UnaryOp { fn t(self, op_type: ir::Type) -> ir::Type { match self { UnaryOp::Mov | UnaryOp::Exp(..) => op_type, UnaryOp::Cast(t) => t, } } } #[derive(Clone, Debug, Serialize, Deserialize)] pub enum Operator<L = LoweringMap> { BinOp(BinOp, Operand<L>, Operand<L>, Rounding), UnaryOp(UnaryOp, Operand<L>), Mul(Operand<L>, Operand<L>, Rounding, Type), Mad(Operand<L>, Operand<L>, Operand<L>, Rounding), Ld(Type, Operand<L>, AccessPattern), St(Operand<L>, Operand<L>, bool, AccessPattern), TmpLd(Type, ir::MemId), TmpSt(Operand<L>, ir::MemId), } impl<L> Operator<L> { pub fn check( &self, iter_dims: &FxHashSet<ir::DimId>, fun: &ir::Function<L>, ) -> Result<(), ir::Error> { self.t() .map(|t| fun.device().check_type(t)) .unwrap_or(Ok(()))?; for operand in self.operands() { fun.device().check_type(operand.t())?; if let Some(dim_map) = operand.mapped_dims() { for &(lhs, rhs) in dim_map { if fun.find_mapping(lhs, rhs).is_none() { Err(ir::Error::MissingDimMapping { lhs, rhs })?; } } } } match *self { BinOp(operator, ref lhs, ref rhs, rounding) => { if operator.requires_rounding() { rounding.check(lhs.t())?; } else if rounding != Rounding::Exact { Err(ir::TypeError::InvalidRounding { rounding, t: lhs.t(), })?; } ir::TypeError::check_equals(lhs.t(), rhs.t())?; } Mul(ref lhs, ref rhs, rounding, res_type) => { rounding.check(lhs.t())?; ir::TypeError::check_equals(lhs.t(), rhs.t())?; match (lhs.t(), res_type) { (x, z) if x == z => (), (Type::I(32), Type::I(64)) | (Type::I(32), Type::PtrTo(_)) => (), (_, t) => Err(ir::TypeError::UnexpectedType { t })?, } } Mad(ref mul_lhs, ref mul_rhs, ref add_rhs, rounding) => { rounding.check(mul_lhs.t())?; ir::TypeError::check_equals(mul_lhs.t(), mul_rhs.t())?; match (mul_lhs.t(), add_rhs.t()) { (ref x, ref z) if x == z => (), (Type::I(32), Type::I(64)) | (Type::I(32), Type::PtrTo(_)) => (), (_, t) => Err(ir::TypeError::UnexpectedType { t })?, } } Ld(_, ref addr, ref pattern) => { pattern.check(iter_dims)?; let pointer_type = pattern.pointer_type(fun.device()); ir::TypeError::check_equals(addr.t(), pointer_type)?; } St(ref addr, _, _, ref pattern) => { pattern.check(iter_dims)?; let pointer_type = pattern.pointer_type(fun.device()); ir::TypeError::check_equals(addr.t(), pointer_type)?; } TmpLd(..) | UnaryOp(..) | TmpSt(..) => (), } Ok(()) } pub fn t(&self) -> Option<Type> { match self { Mad(_, _, op, _) => Some(op.t()), Ld(t, ..) | TmpLd(t, _) | Mul(.., t) => Some(*t), BinOp(operator, lhs, ..) => Some(operator.t(lhs.t())), UnaryOp(operator, operand) => Some(operator.t(operand.t())), St(..) | TmpSt(..) => None, } } pub fn operands(&self) -> Vec<&Operand<L>> { match self { BinOp(_, lhs, rhs, _) | Mul(lhs, rhs, _, _) | St(lhs, rhs, _, _) => { vec![lhs, rhs] } Mad(mul_lhs, mul_rhs, add_rhs, _) => vec![mul_lhs, mul_rhs, add_rhs], UnaryOp(_, op) | Ld(_, op, _) | TmpSt(op, _) => vec![op], TmpLd(..) => vec![], } } pub fn operands_mut<'b>(&'b mut self) -> Vec<&'b mut Operand<L>> { match self { BinOp(_, lhs, rhs, _) | Mul(lhs, rhs, _, _) | St(lhs, rhs, _, _) => { vec![lhs, rhs] } Mad(mul_lhs, mul_rhs, add_rhs, _) => vec![mul_lhs, mul_rhs, add_rhs], UnaryOp(_, op, ..) | Ld(_, op, ..) | TmpSt(op, _) => vec![op], TmpLd(..) => vec![], } } pub fn has_side_effects(&self) -> bool { match self { St(_, _, b, _) => *b, BinOp(..) | UnaryOp(..) | Mul(..) | Mad(..) | Ld(..) | TmpLd(..) | TmpSt(..) => false, } } pub fn is_mem_access(&self) -> bool { match self { St(..) | Ld(..) | TmpSt(..) | TmpLd(..) => true, _ => false, } } pub fn merge_dims(&mut self, lhs: ir::DimId, rhs: ir::DimId) { self.operands_mut() .iter_mut() .for_each(|x| x.merge_dims(lhs, rhs)); } pub fn mem_access_pattern(&self) -> Option<Cow<AccessPattern>> { match *self { Ld(_, _, ref pattern) | St(_, _, _, ref pattern) => { Some(Cow::Borrowed(pattern)) } TmpLd(_, mem_id) | TmpSt(_, mem_id) => { Some(Cow::Owned(AccessPattern::Unknown(Some(mem_id)))) } _ => None, } } pub fn mem_used(&self) -> Option<ir::MemId> { self.mem_access_pattern().and_then(|p| p.mem_block()) } pub fn map_operands<T, F>(self, mut f: F) -> Operator<T> where F: FnMut(Operand<L>) -> Operand<T>, { match self { BinOp(op, oper1, oper2, rounding) => { let oper1 = f(oper1); let oper2 = f(oper2); BinOp(op, oper1, oper2, rounding) } UnaryOp(operator, operand) => UnaryOp(operator, f(operand)), Mul(oper1, oper2, rounding, t) => { let oper1 = f(oper1); let oper2 = f(oper2); Mul(oper1, oper2, rounding, t) } Mad(oper1, oper2, oper3, rounding) => { let oper1 = f(oper1); let oper2 = f(oper2); let oper3 = f(oper3); Mad(oper1, oper2, oper3, rounding) } Ld(t, oper1, ap) => { let oper1 = f(oper1); Ld(t, oper1, ap) } St(oper1, oper2, side_effects, ap) => { let oper1 = f(oper1); let oper2 = f(oper2); St(oper1, oper2, side_effects, ap) } TmpLd(t, id) => TmpLd(t, id), TmpSt(oper1, id) => { let oper1 = f(oper1); TmpSt(oper1, id) } } } } impl<L> ir::IrDisplay<L> for Operator<L> { fn fmt(&self, fmt: &mut fmt::Formatter, function: &ir::Function<L>) -> fmt::Result { match self { BinOp(op, lhs, rhs, _rnd) => write!( fmt, "{}({}, {})", op, lhs.display(function), rhs.display(function) ), UnaryOp(op, arg) => write!(fmt, "{}({})", op, arg.display(function)), Mul(lhs, rhs, _rnd, _t) => write!( fmt, "mul({}, {})", lhs.display(function), rhs.display(function) ), Mad(arg0, arg1, arg2, _rnd) => write!( fmt, "mad({}, {}, {})", arg0.display(function), arg1.display(function), arg2.display(function) ), Ld(_t, arg, _ap) => write!(fmt, "load({})", arg.display(function)), St(dst, src, _side_effects, _ap) => write!( fmt, "store({}, {})", dst.display(function), src.display(function) ), TmpLd(_t, mem) => write!(fmt, "load({})", mem), TmpSt(src, mem) => write!(fmt, "store({}, {})", mem, src.display(function)), } } } impl Operator<()> { pub fn freeze(self, cnt: &mut ir::Counter) -> Operator { self.map_operands(|oper| oper.freeze(cnt)) } } impl<L> std::fmt::Display for Operator<L> { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { match self { BinOp(op, lhs, rhs, rnd) => write!(fmt, "{}[{}]({}, {})", op, rnd, lhs, rhs), UnaryOp(op, arg) => write!(fmt, "{}({})", op, arg), Mul(lhs, rhs, rnd, t) => write!(fmt, "Mul<{}>[{}]({}, {})", t, rnd, lhs, rhs), Mad(arg0, arg1, arg2, rnd) => { write!(fmt, "Mad[{}]({}, {}, {})", rnd, arg0, arg1, arg2) } Ld(_t, arg, _ap) => write!(fmt, "Load({})", arg), St(dst, src, _side_effects, _ap) => write!(fmt, "Store({}, {})", dst, src), TmpLd(_t, mem) => write!(fmt, "TempLoad({})", mem), TmpSt(src, mem) => write!(fmt, "TempStore({}, {})", mem, src), } } }
nOp::Sub => "sub", BinOp::Div => "div", BinOp::And => "and", BinOp::Or => "or", BinOp::Lt => "lt", BinOp::Leq => "leq", BinOp::Equals => "equals", BinOp::Max => "max", } }
function_block-function_prefixed
[ { "content": "#[allow(unused_mut)]\n\npub fn set_{{name}}(&mut self{{>args_decl}}, mut value: {{>value_type.name value_type}}) {\n\n {{#if is_symmetric~}}\n\n if {{arguments.[0].[0]}} > {{arguments.[1].[0]}} {\n\n std::mem::swap(&mut {{arguments.[0].[0]}}, &mut {{arguments.[1].[0]}});\n\n {{~#if is_antisymmetric}}value = value.inverse();{{/if}}\n\n }\n\n {{~/if}}\n\n debug!(\"set {{name}}{:?} to {:?}\", {{>args}}, value);\n\n *unwrap!(Arc::make_mut(&mut self.{{name}}).get_mut(&{{>args}})) = value;\n\n}\n\n\n\n/// Restricts the domain of {name} for the given arguments. Put the old value in `diff`\n\n/// and indicates if the new domain is failed.\n", "file_path": "telamon-gen/src/print/template/getter.rs", "rank": 0, "score": 391374.00533305743 }, { "content": "#[allow(unused_mut)]\n\npub fn restrict_{{name}}(&mut self{{>args_decl}}, mut value: {{>value_type.name value_type}},\n\n diff: &mut DomainDiff) -> Result<(), ()> {\n\n {{#if is_symmetric~}}\n\n if {{arguments.[0].[0]}} > {{arguments.[1].[0]}} {\n\n std::mem::swap(&mut {{arguments.[0].[0]}}, &mut {{arguments.[1].[0]}});\n\n {{~#if is_antisymmetric}}value = value.inverse();{{/if}}\n\n }\n\n {{~/if}}\n\n let mut ptr = unwrap!(Arc::make_mut(&mut self.{{name}}).get_mut(&{{>args}}));\n\n let old = *ptr;\n\n ptr.{{>restrict_op}}(value);\n\n if old != *ptr {\n\n debug!(\"restrict {{name}}{:?} to {:?}\", {{>args}}, *ptr);\n\n diff.{{name}}.entry({{>args}}).or_insert((old, *ptr)).1 = *ptr;\n\n }\n\n if ptr.is_failed() { Err(()) } else { Ok(()) }\n\n}\n\n\n\n{{#if compute_counter~}}\n\n/// Updates a counter by changing the value of an increment.\n", "file_path": "telamon-gen/src/print/template/getter.rs", "rank": 1, "score": 391374.00533305743 }, { "content": "/// Prints the inverse function if needed.\n\nfn inverse(enum_: &ir::Enum, f: &mut Formatter) -> fmt::Result {\n\n if let Some(mapping) = enum_.inverse_mapping() {\n\n let mut values: FxHashSet<_> = enum_.values().keys().collect();\n\n let mut low = Vec::new();\n\n let mut high = Vec::new();\n\n for &(ref lhs, ref rhs) in mapping {\n\n values.remove(lhs);\n\n values.remove(rhs);\n\n low.push(lhs);\n\n high.push(rhs);\n\n }\n\n let n = enum_.name();\n\n let same_bits = if values.is_empty() {\n\n \"0\".to_string()\n\n } else {\n\n values\n\n .iter()\n\n .map(|v| format!(\"{}::{}.bits\", n, v))\n\n .format(\" | \")\n\n .to_string()\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 2, "score": 373584.5580543304 }, { "content": "#[allow(unused_mut)]\n\npub fn get_{{name}}(&self{{>args_decl}}) -> {{>value_type.name value_type}} {\n\n {{#if is_symmetric~}}\n\n if {{arguments.[0].[0]}} > {{arguments.[1].[0]}} {\n\n std::mem::swap(&mut {{arguments.[0].[0]}}, &mut {{arguments.[1].[0]}});\n\n self.{{name}}[&{{>args}}]{{#if is_antisymmetric}}.inverse(){{/if}}\n\n } else {\n\n {{~/if}} self.{{name}}[&{{>args}}] {{#if is_symmetric}} } {{/if}}\n\n}\n\n\n\n/// Returns the domain of {name} for the given arguments. If the domain has been restricted\n\n/// but the change not yet applied, returns the old value.\n", "file_path": "telamon-gen/src/print/template/getter.rs", "rank": 3, "score": 331962.2936328981 }, { "content": "#[allow(unused_mut)]\n\nfn update_{{name}}(&mut self{{>args_decl}}, old_incr: {{>value_type.name value_type}},\n\n new_incr: {{>value_type.name value_type}}, diff: &mut DomainDiff) -> Result<(), ()> {\n\n {{#if is_symmetric~}}\n\n if {{arguments.[0].[0]}} > {{arguments.[1].[0]}} {\n\n std::mem::swap(&mut {{arguments.[0].[0]}}, &mut {{arguments.[1].[0]}});\n\n }\n\n {{~/if}}\n\n let mut ptr = unwrap!(Arc::make_mut(&mut self.{{name}}).get_mut(&{{>args}}));\n\n let old = *ptr;\n\n {{#*inline \"op_name\"~}}\n\n {{#ifeq compute_counter.op \"+\"}}add{{else}}mul{{/ifeq~}}\n\n {{/inline~}}\n\n ptr.sub_{{>op_name}}(old_incr);\n\n ptr.add_{{>op_name}}(new_incr);\n\n if old != *ptr {\n\n debug!(\"update {{name}}{:?} to {:?}\", {{>args}}, *ptr);\n\n diff.{{name}}.entry({{>args}}).or_insert((old, *ptr)).1 = *ptr;\n\n }\n\n if ptr.is_failed() { Err(()) } else { Ok(()) }\n\n}\n\n{{/if}}\n", "file_path": "telamon-gen/src/print/template/getter.rs", "rank": 4, "score": 319891.5880069436 }, { "content": "/// Displays a bound. Only prints the origin if it is a chain.\n\nfn display_inline_chain(bound: &Bound, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Origin::Chain { .. } = bound.origin {\n\n write!(f, \"{}\", bound.origin)\n\n } else {\n\n write!(f, \"{:.2e}ns: {}\", bound.value, bound.origin)\n\n }\n\n}\n\n\n\n/// The pressure on the hardware induced by a computation.\n\n#[derive(Clone, Debug)]\n\npub struct HwPressure {\n\n latency: f64,\n\n bottlenecks: Vec<f64>,\n\n}\n\n\n\nimpl HwPressure {\n\n /// Creates a new `Pressure`\n\n pub fn new(latency: f64, bottlenecks: Vec<f64>) -> Self {\n\n HwPressure {\n\n latency,\n", "file_path": "src/model/hw_pressure.rs", "rank": 5, "score": 314334.03017213766 }, { "content": "/// Indicates if the filter should be run.\n\npub fn is_enabled(name: &str) -> bool {\n\n ::bencher::FILTER.as_ref().map(|filter| name.contains(filter)).unwrap_or(true)\n\n}\n\n\n\n/// Runs a benchmark.\n\nmacro_rules! benchmark {\n\n ($name:ident, $samples:expr$(,$args:expr)*) => {\n\n let str_name = stringify!($name);\n\n if is_enabled(str_name) {\n\n let bencher = Bencher::new(str_name, $samples);\n\n $name(&bencher, $($args),*)\n\n }\n\n }\n\n}\n", "file_path": "old_benches/bencher.rs", "rank": 6, "score": 312602.5665787604 }, { "content": "#[allow(unused_mut)]\n\npub fn get_old_{{name}}(&self{{>args_decl}},diff: &DomainDiff) -> {{>value_type.name value_type}} {\n\n {{#if is_symmetric~}}\n\n if {{arguments.[0].[0]}} > {{arguments.[1].[0]}} {\n\n std::mem::swap(&mut {{arguments.[0].[0]}}, &mut {{arguments.[1].[0]}});\n\n diff.{{name}}.get(&{{>args}}).map(|&(old, _)| old)\n\n .unwrap_or_else(|| self.{{name}}[&{{>args}}])\n\n {{#if is_antisymmetric}}.inverse(){{/if}}\n\n } else {\n\n {{~/if~}}\n\n diff.{{name}}.get(&{{>args}}).map(|&(old, _)| old)\n\n .unwrap_or_else(|| self.{{name}}[&{{>args}}])\n\n {{~#if is_symmetric}} } {{/if}}\n\n}\n\n\n\n/// Sets the domain of {name} for the given arguments.\n", "file_path": "telamon-gen/src/print/template/getter.rs", "rank": 7, "score": 306953.10087416804 }, { "content": "/// Binds a parameter to a value in the given context.\n\npub fn bind_scalar<T: ScalarArgument>(name: &str, val: T, context: &mut Context) {\n\n let p = ir::Parameter {\n\n t: T::t(),\n\n name: name.to_string(),\n\n elem_t: None,\n\n };\n\n context.bind_scalar(&p, val);\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/gen.rs", "rank": 8, "score": 302838.2215645777 }, { "content": "/// Binds a parameter to a value in the given context.\n\npub fn bind_array<'a, T: 'a>(name: &str, len: usize, context: &mut Context<'a>) {\n\n let array = std::sync::Arc::new(context.executor().allocate_array::<T>(len));\n\n context.bind_param(name.to_string(), array.clone());\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/gen.rs", "rank": 9, "score": 300853.50172315864 }, { "content": "/// Converts a choice name to a rust type name.\n\nfn to_type_name(name: &str) -> String {\n\n let mut result = \"\".to_string();\n\n let mut is_new_word = true;\n\n let mut last_is_sep = true;\n\n for c in name.chars() {\n\n if c != '_' || last_is_sep {\n\n if is_new_word {\n\n result.extend(c.to_uppercase());\n\n } else {\n\n result.push(c);\n\n }\n\n }\n\n last_is_sep = c == '_';\n\n is_new_word = last_is_sep || c.is_numeric();\n\n }\n\n result\n\n}\n\n\n", "file_path": "telamon-gen/src/lib.rs", "rank": 10, "score": 297201.54991779587 }, { "content": "/// Checks that all the types in `types` are identical, and returns that type.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if `types` contains several different types.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if `types` is empty.\n\nfn unify_type(types: impl IntoIterator<Item = ir::Type>) -> Result<ir::Type, TypeError> {\n\n let mut types = types.into_iter();\n\n let first = types.next().unwrap_or_else(|| panic!(\"no types provided\"));\n\n if let Some(other) = types.find(|&other| other != first) {\n\n Err(TypeError::mismatch(first, other))\n\n } else {\n\n Ok(first)\n\n }\n\n}\n\n\n", "file_path": "src/codegen/llir.rs", "rank": 11, "score": 287044.4356423846 }, { "content": "/// Checks that all the types inf `types` are the same floating-point type, and returns it.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if `types` contains different types, or non floating-point types.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if `types` is empty.\n\nfn unify_ftype(types: impl IntoIterator<Item = ir::Type>) -> Result<ir::Type, TypeError> {\n\n unify_type(types).and_then(|t| {\n\n if t.is_float() {\n\n Ok(t)\n\n } else {\n\n Err(TypeError::not_float(t))\n\n }\n\n })\n\n}\n\n\n\n/// A named register.\n\n///\n\n/// Registers are typed, and should only be used in instructions expecting the appropriate type.\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Register<'a> {\n\n name: &'a str,\n\n t: ir::Type,\n\n}\n\n\n\nimpl fmt::Display for Register<'_> {\n", "file_path": "src/codegen/llir.rs", "rank": 12, "score": 284348.5027531494 }, { "content": "/// Checks that all the types in `types` are the same integer type, and returns it.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if `types` contains different types, or non-integer types\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if `types` is empty.\n\nfn unify_itype(types: impl IntoIterator<Item = ir::Type>) -> Result<ir::Type, TypeError> {\n\n unify_type(types).and_then(|t| {\n\n if t.is_integer() {\n\n Ok(t)\n\n } else {\n\n Err(TypeError::not_integer(t))\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/codegen/llir.rs", "rank": 13, "score": 284343.30309683364 }, { "content": "/// Checks the result of all valid candidates.\n\npub fn check_candidates<F>(space: SearchSpace, ctx: &Context, mut check: F)\n\nwhere\n\n F: FnMut(),\n\n{\n\n explorer::gen_space(\n\n ctx,\n\n space,\n\n |_| (),\n\n |candidate| {\n\n debug!(\"testing candidate with actions {:?}\", candidate.actions);\n\n let fun = codegen::Function::build(&candidate.space);\n\n ctx.evaluate(&fun, EvalMode::FindBest).unwrap();\n\n check();\n\n },\n\n );\n\n}\n\n\n\n/// Tests the printing of unrolled dimensions.\n", "file_path": "backend/cuda/tests/lib.rs", "rank": 14, "score": 277106.3663417711 }, { "content": "/// A closure around a printing function.\n\nstruct Printer<T: Fn(&mut Formatter) -> fmt::Result>(T);\n\n\n\nimpl<T: Fn(&mut Formatter) -> fmt::Result> Display for Printer<T> {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n self.0(f)\n\n }\n\n}\n\n\n\nmod ast;\n\nmod choice;\n\nmod filter;\n\nmod partial_init;\n\nmod store;\n\nmod value_set;\n\n\n\nuse self::store::PartialIterator;\n\n\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 15, "score": 273504.8040267535 }, { "content": "/// Analyses the regularity of execution times and returns the median and the relative\n\n/// difference between the fastest and the slowest evaluation. Fails if the relative\n\n/// difference is too big.\n\nfn analyse_runtimes(mut runtimes: Vec<f64>, name: &str, bound: &str) -> (f64, f64) {\n\n runtimes.sort_by(|&x, &y| cmp_f64(x, y));\n\n let median = runtimes[runtimes.len() / 2];\n\n let diff = (runtimes[runtimes.len() - 1] - runtimes[0]) / median;\n\n if diff > MAX_RELATIVE_DIFF {\n\n let min = 1. - runtimes[0] / median;\n\n let max = runtimes[runtimes.len() - 1] / median - 1.;\n\n println!(\n\n \"noisy {}-bound evaluations {:.2e}ns (-{:.2e}x, +{:.2e}x) in {}\",\n\n bound, median, min, max, name\n\n );\n\n }\n\n (median, diff)\n\n}\n\n\n", "file_path": "kernels/benches/cuda_variance.rs", "rank": 16, "score": 273488.0574534255 }, { "content": "/// Generates a function base with the given arguments.\n\npub fn base(params: &[(&str, ir::Type)], arrays: &[&str], gpu: &Gpu) -> Signature {\n\n let mut signature = Signature::new(\"bench\".to_owned());\n\n for &(name, t) in params {\n\n signature.add_scalar(name.to_owned(), t);\n\n }\n\n for &name in arrays {\n\n signature.add_array(gpu, name.to_owned(), ir::Type::I(8));\n\n }\n\n signature\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/gen.rs", "rank": 17, "score": 272475.9555718568 }, { "content": "/// Returns the type to use to implement a bitfiled.\n\nfn bits_type(num_values: usize) -> &'static str {\n\n if num_values <= 8 {\n\n \"u8\"\n\n } else if num_values <= 16 {\n\n \"u16\"\n\n } else if num_values <= 32 {\n\n \"u32\"\n\n } else if num_values <= 64 {\n\n \"u64\"\n\n } else {\n\n panic!(\"too many variants\")\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n/// Printing for test structures.\n\nmod test {\n\n use super::ENGINE;\n\n use crate::ir::test::{EvalContext, StaticCond};\n\n use crate::print;\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 18, "score": 266342.14162185014 }, { "content": "pub fn compile(mut source_file: File, lib_path: &str) -> ExitStatus {\n\n unwrap!(source_file.seek(SeekFrom::Start(0)));\n\n Command::new(\"gcc\")\n\n .stdin(source_file)\n\n .arg(\"-shared\")\n\n .arg(\"-fPIC\")\n\n .arg(\"-o\")\n\n .arg(lib_path)\n\n .arg(\"-xc\")\n\n .arg(\"-\")\n\n .arg(\"-lpthread\")\n\n .status()\n\n .expect(\"Could not execute gcc\")\n\n}\n\n\n", "file_path": "backend/x86/src/compile.rs", "rank": 19, "score": 265421.2634609544 }, { "content": "#[allow(clippy::let_and_return)]\n\npub fn descend_without_copies(mut space: SearchSpace) {\n\n while let Some(mut choice) = {\n\n let choice = explorer::choice::default_list(&space).next();\n\n choice\n\n } {\n\n let id = rand::thread_rng().gen_range(0, choice.len());\n\n let res = match choice.swap_remove(id) {\n\n ActionEx::Action(action) => space.apply_decisions(vec![action]),\n\n ActionEx::LowerLayout {\n\n mem,\n\n ref st_dims,\n\n ref ld_dims,\n\n } => space.lower_layout(mem, st_dims, ld_dims),\n\n };\n\n if res.is_err() {\n\n return;\n\n }\n\n }\n\n}\n\n\n\n/// Descends in the search tree and returns the candidates encountered.\n", "file_path": "backend/cuda/benches/common.rs", "rank": 20, "score": 264920.5530888918 }, { "content": "pub fn inverse(self) -> Self {{\n\n let high_bits = (self.bits & ({low_bits})) << 1;\n\n let low_bits = (self.bits & ({high_bits})) >> 1;\n\n let same_bits = self.bits & ({same_bits});\n\n {type_name} {{ bits: low_bits | high_bits | same_bits}}\n\n}}\n", "file_path": "telamon-gen/src/print/template/inverse.rs", "rank": 21, "score": 257829.05128927238 }, { "content": "/// Converts a choice name to a rust type name.\n\nfn to_type_name(h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> RenderResult {\n\n if h.is_block() {\n\n return Err(RenderError::new(\"to_type_name is not valid on blocks\"));\n\n }\n\n let string = match h.param(0).map(|p| p.value()) {\n\n None => return Err(RenderError::new(\"missing argument for to_type_name\")),\n\n Some(&JsonValue::String(ref string)) => string.clone(),\n\n Some(x) => {\n\n debug!(\"replace argument = {}\", x);\n\n return Err(RenderError::new(\"to_type_name expects a string argument\"));\n\n }\n\n };\n\n rc.writer\n\n .write_all(crate::to_type_name(&string).into_bytes().as_ref())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 22, "score": 252670.74700882268 }, { "content": "/// Creates a variable containing the list of newly created objects of the given set.\n\npub fn new_objs_list(set: &ir::SetDef, new_objs: &str) -> Variable<'static> {\n\n let name = render!(set/new_objs, <'a>,\n\n def: SetDef<'a> = SetDef::new(set),\n\n objs: &'a str = new_objs);\n\n Variable::with_string(name)\n\n}\n\n\n\n/// AST to print the reference to a set.\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize)]\n\npub struct Set<'a> {\n\n def: SetDef<'a>,\n\n var: Option<Variable<'a>>,\n\n constraints: Vec<Set<'a>>,\n\n}\n\n\n\nimpl<'a> Set<'a> {\n\n pub fn new<S: ir::SetRef<'a>>(set: S, ctx: &Context<'a>) -> Self {\n\n if let Some(rev_set) = set.reverse_constraint() {\n\n let mut rev_set_ast = Set::new(rev_set.clone(), ctx);\n\n rev_set_ast.constraints = set\n", "file_path": "telamon-gen/src/print/ast.rs", "rank": 23, "score": 251021.6411117369 }, { "content": "/// Returns the name of the getter method for `choice`. If `get_old` is true, the method\n\n/// will only take into account decisions that have been propagated.\n\npub fn getter_name(choice: &str, get_old: bool) -> Ident {\n\n let name = if get_old {\n\n format!(\"get_old_{}\", choice)\n\n } else {\n\n format!(\"get_{}\", choice)\n\n };\n\n Ident::new(&name, Span::call_site())\n\n}\n\n\n\n//TODO(cleanup): use TokenStream instead of templates\n\nuse crate::ir;\n\nuse crate::ir::SetRef;\n\nuse crate::print::ast::{self, LoopNest, Variable};\n\nuse crate::print::choice::Ast as ChoiceAst;\n\nuse crate::print::value_set;\n\nuse itertools::Itertools;\n\nuse std::iter;\n\nuse utils::*;\n\n\n", "file_path": "telamon-gen/src/print/store.rs", "rank": 24, "score": 247200.90983571994 }, { "content": "#[allow(clippy::let_and_return)]\n\npub fn descend_with_copies(mut space: SearchSpace) -> Vec<SearchSpace> {\n\n let mut spaces = vec![];\n\n while let Some(mut choice) = {\n\n let choice = explorer::choice::default_list(&space).next();\n\n choice\n\n } {\n\n let id = rand::thread_rng().gen_range(0, choice.len());\n\n let res = match choice.swap_remove(id) {\n\n ActionEx::Action(action) => space.apply_decisions(vec![action]),\n\n ActionEx::LowerLayout {\n\n mem,\n\n ref st_dims,\n\n ref ld_dims,\n\n } => space.lower_layout(mem, st_dims, ld_dims),\n\n };\n\n if res.is_err() {\n\n return spaces;\n\n }\n\n spaces.push(space.clone());\n\n }\n\n spaces\n\n}\n", "file_path": "backend/cuda/benches/common.rs", "rank": 25, "score": 247109.34271430605 }, { "content": "/// Explores the full search space.\n\npub fn gen_space<F, G>(\n\n context: &dyn Context,\n\n space: SearchSpace,\n\n mut on_node: F,\n\n mut on_leaf: G,\n\n) where\n\n F: FnMut(&Candidate),\n\n G: FnMut(&Candidate),\n\n{\n\n let perf_bound = bound(&space, context);\n\n let mut stack = vec![Candidate::new(space, perf_bound)];\n\n let mut total = 0;\n\n\n\n info!(\"Beginning exploration\");\n\n while let Some(candidate) = stack.pop() {\n\n total += 1;\n\n if total % 10 == 0 {\n\n warn!(\"{} candidates\", total);\n\n }\n\n let choice_opt = choice::default_list(&candidate.space).next();\n", "file_path": "src/explorer/mod.rs", "rank": 26, "score": 239961.09874975716 }, { "content": "/// Prints a variable of the context.\n\nfn debug(h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> RenderResult {\n\n if h.is_block() {\n\n return Err(RenderError::new(\"debug is not valid on blocks\"));\n\n }\n\n match h.param(0) {\n\n None => debug!(\"context {:?}\", rc.context()),\n\n Some(x) => debug!(\"value {:?}\", x.value()),\n\n };\n\n Ok(())\n\n}\n\n\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 27, "score": 239833.55600073317 }, { "content": "/// Iterates over a linked list while removing some items.\n\npub fn filter_list<'a, T, F>(\n\n list: &'a mut linked_list::LinkedList<T>,\n\n filter: F,\n\n) -> FilterList<'a, T, F>\n\nwhere\n\n T: 'a,\n\n F: FnMut(&mut T) -> bool,\n\n{\n\n FilterList {\n\n cursor: list.cursor(),\n\n filter,\n\n }\n\n}\n\n\n\n/// Iterates over a linked list while removing some items.\n\npub struct FilterList<'a, T, F>\n\nwhere\n\n T: 'a,\n\n F: FnMut(&mut T) -> bool,\n\n{\n", "file_path": "telamon-utils/src/iterator.rs", "rank": 28, "score": 230201.6236609996 }, { "content": "fn type_check_enum_values(enum_: &ir::Enum, values: Vec<RcStr>) -> BTreeSet<RcStr> {\n\n let values = enum_.expand(values).into_iter().collect();\n\n for value in &values {\n\n assert!(enum_.values().contains_key(value));\n\n }\n\n values\n\n}\n\n\n\n/// The value of a counter increment.\n\n#[derive(Clone, Debug)]\n\npub enum CounterVal {\n\n Code(String),\n\n Choice(ChoiceInstance),\n\n}\n\n\n\n/// A statement in an enum definition.\n\n#[derive(Clone, Debug)]\n\npub enum EnumStatement {\n\n /// Defines a possible decision for th enum.\n\n Value(Spanned<String>, Option<String>, Vec<Constraint>),\n", "file_path": "telamon-gen/src/ast/mod.rs", "rank": 29, "score": 228813.5958688147 }, { "content": "/// Adds two tensors `lhs` and `rhs` of the same shape\n\npub fn tensor_add(\n\n builder: &mut Builder,\n\n lhs: &VirtualTensor,\n\n rhs: &VirtualTensor,\n\n) -> VirtualTensor {\n\n assert!(lhs.same_shape(rhs, builder.function()));\n\n\n\n let dims = lhs\n\n .iter()\n\n .map(|dim| builder.open_mapped_dim(dim))\n\n .collect_vec();\n\n\n\n let a_operand = lhs.dim_map(\n\n &dims.iter().collect_vec(),\n\n ir::DimMapScope::Global(()),\n\n builder,\n\n );\n\n\n\n let b_operand = rhs.dim_map(\n\n &dims.iter().collect_vec(),\n", "file_path": "kernels/src/compose.rs", "rank": 30, "score": 223833.93105890308 }, { "content": "/// Orders to Cargo to link a library.\n\nfn add_lib(lib: &str) {\n\n println!(\"cargo:rustc-link-lib={}\", lib);\n\n}\n\n\n", "file_path": "build.rs", "rank": 31, "score": 223335.8250724451 }, { "content": "/// Applies the `max` function to all elements of a virtual tensor\n\n/// `lhs` with `rhs` as the second argument to `max`\n\npub fn tensor_elementwise_max(\n\n builder: &mut Builder,\n\n lhs: &VirtualTensor,\n\n rhs: &dyn AutoOperand,\n\n) -> VirtualTensor {\n\n tensor_map(builder, lhs, |tensor_operand, builder| {\n\n builder.max(tensor_operand, rhs)\n\n })\n\n}\n\n\n\n#[derive(Clone, Deserialize, Serialize)]\n\npub enum ActivationFunction {\n\n /// Linear rectifier (i.e., max(0, v))\n\n ReLU,\n\n\n\n /// Sigmoid activation function (i.e., 1 / (1 + exp(v))\n\n Sigmoid,\n\n}\n\n\n\nimpl ActivationFunction {\n", "file_path": "kernels/src/compose.rs", "rank": 32, "score": 220315.74645843348 }, { "content": "/// Given a function string and its arguments as ThunkArg, compile to a binary, executes it and\n\n/// returns the time elapsed. Converts ThunkArgs to HoldTHunk as we want to allocate memory for\n\n/// temporary arrays at the last possible moment\n\nfn function_evaluate(fun_str: &str, args: &[ThunkArg]) -> Result<f64, ()> {\n\n debug!(\"running code {}\", fun_str);\n\n let temp_dir = unwrap!(tempfile::tempdir());\n\n let templib_name = temp_dir\n\n .path()\n\n .join(\"lib_compute.so\")\n\n .to_string_lossy()\n\n .into_owned();\n\n let mut source_file = unwrap!(tempfile::tempfile());\n\n unwrap!(source_file.write_all(fun_str.as_bytes()));\n\n let compile_status = compile::compile(source_file, &templib_name);\n\n if !compile_status.success() {\n\n panic!(\"Could not compile file:\\n{}\", fun_str);\n\n }\n\n // Lock the arguments and allocate temporary arrays\n\n //\n\n // `thunks` owns the array values\n\n let mut thunks = args\n\n .iter()\n\n .map(|arg| match arg {\n", "file_path": "backend/x86/src/context.rs", "rank": 33, "score": 219758.74151735165 }, { "content": "/// Orders to Cargo to link a library.\n\nfn add_lib(lib: &str) {\n\n println!(\"cargo:rustc-link-lib={}\", lib);\n\n}\n\n\n", "file_path": "telajax/build.rs", "rank": 34, "score": 219602.04150334952 }, { "content": "#[allow(unused_variables, unused_mut)]\n\npub fn init_domain(store: &mut DomainStore,\n\n ir_instance: &mut ir::Function) -> Result<Vec<Action>, ()> {\n\n trace!(\"called init_domain from file {}\", file!());\n\n // Run all the filters once.\n\n let ref mut diff = DomainDiff::default(); // Pass an empty diff to propagate and triggers.\n\n let mut unused_diff = DomainDiff::default();\n\n {{#each choices~}}\n\n {{#>loop_nest iteration_space~}}\n\n {{>run_filters this}}\n\n {{/loop_nest~}}\n\n {{/each~}}\n\n {{store.filter_all}}\n\n // Propagate the filters where necessary.\n\n let mut actions: Vec<Action> = Vec::new();\n\n {{#each triggers~}}\n\n let mut trigger_{{id}} = Vec::new();\n\n {{#>loop_nest loop_nest}}\n\n if check_trigger_{{id}}({{>choice.arg_names}}ir_instance, store, diff) {\n\n trigger_{{id}}.push(({{>choice.arg_ids}}));\n\n }\n", "file_path": "telamon-gen/src/print/template/main.rs", "rank": 35, "score": 218368.96931959473 }, { "content": "/// Prints an analysis of the bounds computed by the lower bound model.\n\npub fn analyze_bounds(mut bounds: Vec<BoundSample>) {\n\n const NUM_QUANTILES: usize = 5;\n\n bounds.sort_by(|x, y| cmp_f64(x.ratio(), y.ratio()));\n\n let num_errors = bounds.iter().take_while(|b| b.ratio() < 1.).count();\n\n if num_errors > 0 {\n\n let error_ratio = num_errors as f64 / bounds.len() as f64;\n\n let error_ratio = statistics::estimate_ratio(error_ratio, bounds.len());\n\n println!(\"ratio of errors {}, for example: \", error_ratio);\n\n let num_printed = std::cmp::min(NUM_QUANTILES, num_errors);\n\n for i in 0..num_printed {\n\n let index = i * num_errors / num_printed;\n\n println!(\"{}% worst error: {}\", i * 100 / num_printed, bounds[index]);\n\n }\n\n }\n\n if num_errors < bounds.len() {\n\n let num_bounds = bounds.len() - num_errors;\n\n let num_quantiles = std::cmp::min(NUM_QUANTILES, num_bounds);\n\n for i in 0..num_quantiles {\n\n let index = (i + 1) * (num_bounds / num_quantiles) - 1;\n\n println!(\n\n \"{}% worst: {}\",\n\n (i + 1) * 100 / num_quantiles,\n\n bounds[num_errors + index]\n\n );\n\n }\n\n }\n\n}\n", "file_path": "kernels/src/kernel.rs", "rank": 36, "score": 218205.78312620264 }, { "content": "/// Generate the trigger code to add a representant to a quotient set.\n\npub fn add_to_quotient(\n\n set: &ir::SetDef,\n\n repr_name: &str,\n\n counter_name: &str,\n\n item: &str,\n\n var: &Option<RcStr>,\n\n) -> String {\n\n let mut add_to_set = set.attributes()[&ir::SetDefKey::AddToSet]\n\n .replace(\"$item\", &format!(\"${}\", item));\n\n if let Some(ref var) = *var {\n\n add_to_set = add_to_set.replace(\"$var\", &format!(\"${}\", var));\n\n }\n\n render!(add_to_quotient, <'a>,\n\n repr_name: &'a str = repr_name,\n\n counter_name: &'a str = counter_name,\n\n add_to_set: &'a str = &add_to_set,\n\n item: &'a str = item,\n\n var: &'a Option<RcStr> = var)\n\n}\n\n\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 37, "score": 216901.85206543037 }, { "content": "/// Adds a dependency to the build script.\n\nfn add_dependency(dep: &str) {\n\n println!(\"cargo:rerun-if-changed={}\", dep);\n\n}\n\n\n", "file_path": "telamon-gen/build.rs", "rank": 38, "score": 216051.34881040006 }, { "content": "/// Orders to Cargo to link a library.\n\nfn add_lib(lib: &str) {\n\n println!(\"cargo:rustc-link-lib={}\", lib);\n\n}\n\n\n", "file_path": "backend/cuda/build.rs", "rank": 39, "score": 216045.45490414512 }, { "content": "#[allow(unused_variables, unused_mut)]\n\npub fn init_domain_partial(store: &mut DomainStore,\n\n ir_instance: &mut ir::Function,\n\n new_objs: &ir::NewObjs,\n\n diff: &mut DomainDiff) -> Result<Vec<Action>, ()> {\n\n let mut unused_diff = DomainDiff::default();\n\n // Disable new increments of existing counters.\n\n {{#each incr_iterators~}}\n\n {{#>iter_new_objects iter~}}\n\n {{>disable_increment}}\n\n {{/iter_new_objects~}}\n\n {{/each~}}\n\n // Call filters.\n\n {{#each partial_iterators~}}\n\n {{#>iter_new_objects this.[0]~}}\n\n {{>run_filters this.[1].choice arg_names=this.[1].arg_names}}\n\n {{/iter_new_objects~}}\n\n {{/each~}}\n\n // Propagate decisions that are not already propagted.\n\n let mut actions: Vec<Action> = Vec::new();\n\n {{#each partial_iterators~}}\n", "file_path": "telamon-gen/src/print/template/main.rs", "rank": 40, "score": 215769.11782304972 }, { "content": "/// Returns the default rounding for a given operand type.\n\nfn default_rounding(t: Type) -> op::Rounding {\n\n if t.is_integer() {\n\n op::Rounding::Exact\n\n } else {\n\n op::Rounding::Nearest\n\n }\n\n}\n", "file_path": "src/helper/builder.rs", "rank": 41, "score": 215508.5026239425 }, { "content": "/// Chooses an order between instructions and dimensions when multiple are possible.\n\n/// The function assumes the order between dimensions is already fixed.\n\n// TODO(search_space): fix order has currently no effect. Should we remove it ?\n\n// It is unused because inst-dim and dim-dim decisions are fixed by the explorer. We\n\n// cannot make them free as we might end-up in a dead-end.\n\npub fn fix_order(mut space: SearchSpace) -> SearchSpace {\n\n // TODO(search_space): make fix_order useless with a differential model\n\n trace!(\"adding arbitrary constraints to the order\");\n\n // Fix the order between instructions and dimensions.\n\n let pairs = space\n\n .ir_instance()\n\n .statements()\n\n .cartesian_product(space.ir_instance().dims())\n\n .map(|(lhs, rhs)| (lhs.stmt_id(), rhs.stmt_id()))\n\n .filter(|&(lhs, rhs)| lhs != rhs)\n\n .filter(|&(lhs, rhs)| !space.domain().get_order(lhs, rhs).is_constrained())\n\n .collect_vec();\n\n for (lhs, rhs) in pairs {\n\n let order = space.domain().get_order(lhs, rhs);\n\n if order.is_constrained() {\n\n continue;\n\n }\n\n let new_order = if order.intersects(Order::BEFORE) {\n\n Order::BEFORE\n\n } else if order.intersects(Order::AFTER) {\n", "file_path": "src/explorer/choice.rs", "rank": 42, "score": 215378.1666271286 }, { "content": "/// Randomize an array of `f32`.\n\npub fn randomize_f32(array: &mut Array<f32>) {\n\n unsafe {\n\n randomize_float_array(array.context, array.array, array.len as u64, 0.0, 1.0);\n\n }\n\n}\n\n\n", "file_path": "backend/cuda/src/api/array.rs", "rank": 43, "score": 215378.1666271286 }, { "content": "/// Generates a kernel with chained adds in a loop.\n\npub fn loop_chained_adds(\n\n base: Arc<Signature>,\n\n device: Arc<dyn Device>,\n\n loop_size: &DimSize,\n\n chained: u32,\n\n out: &str,\n\n) -> SearchSpace {\n\n let mut builder = Builder::new(base, device);\n\n let init = builder.mov(&0f32);\n\n let loop_size = loop_size.to_ir_size(&builder);\n\n let unroll_size = builder.cst_size(chained);\n\n let d0 = builder.open_dim_ex(loop_size, DimKind::LOOP);\n\n let d1 = builder.open_dim_ex(unroll_size, DimKind::UNROLL);\n\n let acc = builder.add(&Reduce(init), &2f32);\n\n builder.close_dim(&d0);\n\n builder.close_dim(&d1);\n\n let pattern = ir::AccessPattern::Unknown(None);\n\n builder.st_ex(&out, &acc, true, pattern, InstFlag::CACHE_GLOBAL);\n\n builder.get()\n\n}\n\n\n\n/// A function that produce a single instruction using the first argument on one of its\n\n/// operands. The second argument may be used for other operands.\n\npub type InstGenerator = dyn Fn(&dyn AutoOperand, &&str, &mut Builder) -> ir::InstId;\n\n\n", "file_path": "backend/cuda/src/characterize/gen.rs", "rank": 44, "score": 213671.47819640316 }, { "content": "// Wrapper around Read::read which retries when receiving an Interrupted error\n\nfn retry_read<R: Read + ?Sized>(read: &mut R, mut buf: &mut [u8]) -> io::Result<usize> {\n\n let mut nread = 0;\n\n while !buf.is_empty() {\n\n match read.read(buf) {\n\n Ok(0) => break,\n\n Ok(n) => {\n\n let tmp = buf;\n\n buf = &mut tmp[n..];\n\n nread += n;\n\n }\n\n Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n\n\n Ok(nread)\n\n}\n\n\n\n/// A tfrecord reader.\n\n///\n", "file_path": "telamon-utils/src/tfrecord.rs", "rank": 45, "score": 209488.69548265985 }, { "content": "/// Performs an integer divison rounded to the upper number.\n\npub fn div_ceil<T: Integer + Copy>(lhs: T, rhs: T) -> T {\n\n let (quo, rem) = lhs.div_rem(&rhs);\n\n if rem == T::zero() {\n\n quo\n\n } else {\n\n quo + T::one()\n\n }\n\n}\n\n\n", "file_path": "telamon-utils/src/lib.rs", "rank": 46, "score": 208390.89799564096 }, { "content": "/// Updates the gpu description with performance numbers.\n\npub fn performance_desc(executor: &Executor, gpu: &mut Gpu) {\n\n // TODO(model): l1 and l2 lines rates may not be correct on non-kepler architectures\n\n // Compute the processing.\n\n gpu.smx_rates = smx_rates(gpu, executor);\n\n gpu.thread_rates = thread_rates(gpu, &gpu.smx_rates);\n\n gpu.gpu_rates = gpu_rates(gpu, &gpu.smx_rates);\n\n // Compute instruction overhead.\n\n gpu.add_f32_inst = instruction::add_f32(gpu, executor);\n\n gpu.add_f64_inst = instruction::add_f64(gpu, executor);\n\n gpu.add_i32_inst = instruction::add_i32(gpu, executor);\n\n gpu.add_i64_inst = instruction::add_i64(gpu, executor);\n\n gpu.mul_f32_inst = instruction::mul_f32(gpu, executor);\n\n gpu.mul_f64_inst = instruction::mul_f64(gpu, executor);\n\n gpu.mul_i32_inst = instruction::mul_i32(gpu, executor);\n\n gpu.mul_i64_inst = instruction::mul_i64(gpu, executor);\n\n gpu.mad_f32_inst = instruction::mad_f32(gpu, executor);\n\n gpu.mad_f64_inst = instruction::mad_f64(gpu, executor);\n\n gpu.mad_i32_inst = instruction::mad_i32(gpu, executor);\n\n gpu.mad_i64_inst = instruction::mad_i64(gpu, executor);\n\n gpu.mad_wide_inst = instruction::mad_wide(gpu, executor);\n", "file_path": "backend/cuda/src/characterize/gpu.rs", "rank": 47, "score": 207646.37963494685 }, { "content": "/// Propagate the changes stored in `diff`.\n\npub fn propagate_changes(diff: &mut DomainDiff, ir_instance: &mut Arc<ir::Function>,\n\n store: &mut DomainStore) -> Result<(), ()> {\n\n {{~#each choices}}\n\n while let Some((({{#each arguments}}{{this.[0]}}, {{/each}}), old, new)) =\n\n diff.pop_{{name}}_diff() {\n\n debug!(\"propagating {{name}}{:?} {:?} -> {:?}\",\n\n ({{>choice.arg_names this}}), old, new);\n\n {{name}}::on_change(old, new,\n\n {{~>choice.arg_names this}}ir_instance, store, diff)?;\n\n }\n\n {{~/each}}\n\n Ok(())\n\n}\n\n\n", "file_path": "telamon-gen/src/print/template/main.rs", "rank": 48, "score": 206583.79777867478 }, { "content": "/// Applies an action to the domain.\n\npub fn apply_action(action: Action, store: &mut DomainStore, diff: &mut DomainDiff)\n\n -> Result<(), ()> {\n\n debug!(\"applying action {:?}\", action);\n\n match action {\n\n {{~#each choices}}\n\n Action::{{to_type_name name}}({{#each arguments}}{{this.[0]}}, {{/each}}value) =>\n\n store.restrict_{{name}}({{#each arguments}}{{this.[0]}}, {{/each}}value, diff),\n\n {{~/each}}\n\n }\n\n}\n", "file_path": "telamon-gen/src/print/template/actions.rs", "rank": 49, "score": 206583.79777867478 }, { "content": "/// Benchmarks full descents in the search tree, with a copy at each level.\n\nfn mm_descent_copy(c: &mut Criterion) {\n\n let _ = env_logger::try_init();\n\n c.bench_function(\"mm descent with copy\", |b| {\n\n b.iter(|| {\n\n common::descend_with_copies(common::MM.clone());\n\n })\n\n });\n\n}\n\n\n\ncriterion_group! {\n\n name = benches;\n\n config = config_criterion();\n\n targets = mm_descent, mm_descent_copy\n\n}\n\n\n\ncriterion_main!(benches);\n", "file_path": "backend/cuda/benches/descent.rs", "rank": 50, "score": 204255.52898045123 }, { "content": "/// Ensures an iterator has at most one element.\n\npub fn at_most_one<IT: Iterator>(mut it: IT) -> Option<IT::Item> {\n\n let out = it.next();\n\n assert!(it.next().is_none());\n\n out\n\n}\n\n\n", "file_path": "telamon-utils/src/iterator.rs", "rank": 51, "score": 204099.07812382127 }, { "content": " pub trait Domain: Copy + Eq {\n\n /// Indicates if the domain is empty.\n\n fn is_failed(&self) -> bool;\n\n /// Indicates if the domain contains a single alternative.\n\n fn is_constrained(&self) -> bool;\n\n /// Indicates if the domain contains another.\n\n fn contains(&self, other: Self) -> bool;\n\n /// Restricts the domain to the intersection with `other`.\n\n fn restrict(&mut self, other: Self);\n\n\n\n /// Indicates if the domain has an alternatve in common with `other`.\n\n fn intersects(&self, mut other: Self) -> bool where Self: Sized {\n\n other.restrict(*self);\n\n !other.is_failed()\n\n }\n\n\n\n /// Indicates if the domain is equal to another domain.\n\n fn is(&self, mut other: Self) -> Trivalent where Self: Sized {\n\n other.restrict(*self);\n\n if other.is_failed() {\n", "file_path": "telamon-gen/src/print/runtime/mod.rs", "rank": 52, "score": 203306.5065887422 }, { "content": "/// Creates a `ValueSet` from the list of enum values.\n\npub fn normalized_enum_set<'a, IT>(\n\n values: IT,\n\n negate: bool,\n\n inverse: bool,\n\n choice: &'a ir::Enum,\n\n) -> ValueSet\n\nwhere\n\n IT: IntoIterator<Item = &'a RcStr>,\n\n{\n\n let values = normalize_values(values, negate, inverse, choice)\n\n .into_iter()\n\n .cloned()\n\n .collect();\n\n ValueSet::enum_values(choice.name().clone(), values)\n\n}\n\n\n\n/// Represents a set of values a choice can take.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum ValueSet {\n\n // TODO(cc_perf): detect when an input and its negation are included.\n", "file_path": "telamon-gen/src/ir/filter.rs", "rank": 53, "score": 202897.5116492847 }, { "content": "pub fn log<E: Send + Serialize>(\n\n config: &Config,\n\n recv: mpsc::Receiver<LogMessage<E>>,\n\n) -> Result<(), LogError> {\n\n let mut record_writer = config.create_eventlog()?;\n\n let mut write_buffer = config.create_log()?;\n\n while let Ok(message) = recv.recv() {\n\n match message {\n\n LogMessage::Event(event) => {\n\n if let Some(writer) = &mut record_writer {\n\n writer.write_record(&bincode::serialize(&event)?)?;\n\n }\n\n }\n\n LogMessage::NewBest {\n\n score,\n\n cpt,\n\n timestamp,\n\n } => {\n\n log_monitor(score, cpt, timestamp, &mut write_buffer);\n\n }\n", "file_path": "src/explorer/logger.rs", "rank": 54, "score": 201992.07096650873 }, { "content": "/// This function is to be either removed or reimplemented eventually. It is just a replacement for\n\n/// the previous list implementation (exposes the choices in the same order). Default should\n\n/// preferably be handled in config file\n\npub fn default_list<'a>(space: &'a SearchSpace) -> impl Iterator<Item = Choice> + 'a {\n\n list(&config::DEFAULT_ORDERING, space)\n\n}\n\n\n", "file_path": "src/explorer/choice.rs", "rank": 55, "score": 192565.72946741618 }, { "content": "/// Generates actions to enforce operands invariants.\n\npub fn invariants(fun: &ir::Function, op: &ir::Operand, user: ir::StmtId) -> Vec<Action> {\n\n match *op {\n\n Int(..) | Float(..) | Param(..) | Addr(..) | Variable(..) => vec![],\n\n Inst(src, _, ref dim_map, ref scope) => {\n\n // Order dimensions in the dim map.\n\n let order = Order::BEFORE | Order::MERGED;\n\n let mut actions = Vec::new();\n\n for &(lhs, rhs) in dim_map.iter() {\n\n actions.push(Action::Order(lhs.into(), rhs.into(), order));\n\n let mapping = match scope {\n\n DimMapScope::Local => DimMapping::UNROLL_MAP,\n\n DimMapScope::Thread => DimMapping::MAPPED,\n\n DimMapScope::Global(..) => DimMapping::ALL,\n\n };\n\n actions.push(Action::DimMapping(lhs, rhs, mapping));\n\n // FIXME: allow tmp mem with dynamic size when the scope is global.\n\n if fun.dim(lhs).possible_sizes().is_none() {\n\n actions.push(Action::Order(lhs.into(), rhs.into(), Order::MERGED));\n\n }\n\n }\n", "file_path": "src/search_space/operand.rs", "rank": 56, "score": 188513.86709082988 }, { "content": "fn accuracy<'a, K>(params: &K::Parameters, name: &'a str, executor: &'a cuda::Executor)\n\nwhere\n\n K: Kernel<'a>,\n\n{\n\n let mut context = cuda::Context::new(executor);\n\n info!(\"Generating {} candidates\", NUM_TESTS);\n\n let (signature, kernel, context) = KernelBuilder::default()\n\n .name(name)\n\n .build::<K, cuda::Context>(params.clone(), &mut context);\n\n let candidates = kernel.build_body(signature.into(), context);\n\n let candidates = std::iter::repeat(())\n\n .flat_map(|()| {\n\n let order = explorer::config::NewNodeOrder::WeightedRandom;\n\n let candidate_idx = order.pick_candidate(&candidates, CUT);\n\n let candidate = candidates[unwrap!(candidate_idx)].clone();\n\n local_selection::descend(&Default::default(), order, context, candidate, CUT)\n\n })\n\n .take(NUM_TESTS)\n\n .collect_vec();\n\n info!(\"Evaluating candidates, simulating a GPU-bound exploration\");\n", "file_path": "kernels/benches/cuda_variance.rs", "rank": 57, "score": 185181.752704384 }, { "content": "type Callback = unsafe extern \"C\" fn(*const libc::c_void, i32, *mut libc::c_void);\n\n\n", "file_path": "telajax/src/lib.rs", "rank": 58, "score": 185146.85418070666 }, { "content": "/// Applies a set of decisions to the domain and propagate the changes.\n\npub fn apply_decisions(actions: Vec<Action>, ir_instance: &mut Arc<ir::Function>,\n\n domain: &mut DomainStore) -> Result<(), ()> {\n\n let mut diff = DomainDiff::default();\n\n for action in actions { apply_action(action, domain, &mut diff)?; }\n\n while !diff.is_empty() { propagate_changes(&mut diff, ir_instance, domain)?; }\n\n Ok(())\n\n}\n\n\n\n/// Update the domain after a lowering.\n", "file_path": "telamon-gen/src/print/template/main.rs", "rank": 59, "score": 183587.6054623417 }, { "content": "#[test]\n\nfn conflict() {\n\n assert_eq!(\n\n parser::parse_ast(Lexer::new(\n\n b\"set BasicBlock:\n\n item_type = \\\"ir::basic_block::Obj\\\"\n\n id_type = \\\"ir::basic_block::Id\\\"\n\n item_getter = \\\"ir::basic_block::get($fun, $id)\\\"\n\n id_getter = \\\"ir::basic_block::Obj::id($item)\\\"\n\n iterator = \\\"ir::basic_block::iter($fun)\\\"\n\n var_prefix = \\\"bb\\\"\n\n new_objs = \\\"$objs.basic_block\\\"\n\n end\n\n define enum foo($lhs in BasicBlock, $rhs in BasicBlock):\n\n symmetric\n\n antisymmetric:\n\n A -> B\n\n value A:\n\n value B:\n\n end\"\n\n .to_vec()\n", "file_path": "telamon-gen/tests/typing/choice_defs/enums.rs", "rank": 60, "score": 182264.3667675376 }, { "content": "/// Prints the universe of a `ValueType`.\n\npub fn universe(value_type: &ir::ValueType, ctx: &print::Context) -> Value {\n\n match value_type {\n\n ir::ValueType::Enum(..) => panic!(\"only integer domains have a universe\"),\n\n ir::ValueType::Range { .. } | ir::ValueType::Constant => {\n\n Value::new(quote::quote!(&()), ir::ValueType::Constant)\n\n }\n\n ir::ValueType::NumericSet(universe) => Value::new_const(universe, ctx),\n\n }\n\n}\n\n\n", "file_path": "telamon-gen/src/print/value.rs", "rank": 61, "score": 180414.06335636546 }, { "content": "/// Implements a truth table as a filter.\n\nfn truth_table_to_filter(table: &mut TableView) -> ir::SubFilter {\n\n if table.num_inputs() == 0 {\n\n let cell = table.into_iter().next().unwrap();\n\n let rules = cell.extract_rules();\n\n return ir::SubFilter::Rules(rules);\n\n }\n\n match table_min_split(table).unwrap() {\n\n TableSplit::Forward { mut sub_view } => truth_table_to_filter(&mut sub_view),\n\n TableSplit::Switch { input, cases } => {\n\n let sub_filters = cases\n\n .into_iter()\n\n .map(|(values, mut view)| (values, truth_table_to_filter(&mut view)))\n\n .collect();\n\n ir::SubFilter::Switch {\n\n switch: input,\n\n cases: sub_filters,\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "telamon-gen/src/truth_table.rs", "rank": 62, "score": 179432.03464391216 }, { "content": "pub fn div_f64(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Div f64\");\n\n inst::<f64>(gpu, executor, &|init, arg, b| b.div(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 63, "score": 177549.30019445185 }, { "content": "pub fn div_f32(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Div f32\");\n\n inst::<f32>(gpu, executor, &|init, arg, b| b.div(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 64, "score": 177549.30019445185 }, { "content": "pub fn div_i32(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Div i32\");\n\n inst::<i32>(gpu, executor, &|init, arg, b| b.div(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 65, "score": 177549.30019445185 }, { "content": "pub fn div_i64(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Div i64\");\n\n inst::<i64>(gpu, executor, &|init, arg, b| b.div(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 66, "score": 177549.30019445185 }, { "content": "pub fn max_i32(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Max i32\");\n\n inst::<i32>(gpu, executor, &|init, arg, b| b.max(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 67, "score": 177539.84782458955 }, { "content": "pub fn max_f64(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Max f64\");\n\n inst::<f64>(gpu, executor, &|init, arg, b| b.max(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 68, "score": 177539.84782458955 }, { "content": "pub fn max_f32(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Max f32\");\n\n inst::<f32>(gpu, executor, &|init, arg, b| b.max(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 69, "score": 177539.84782458955 }, { "content": "pub fn max_i64(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Max i64\");\n\n inst::<i64>(gpu, executor, &|init, arg, b| b.max(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 70, "score": 177539.84782458955 }, { "content": "pub fn add_i32(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Add i32\");\n\n inst::<i32>(gpu, executor, &|init, arg, b| b.add(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 71, "score": 177514.7160535389 }, { "content": "pub fn add_i64(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Add i64\");\n\n inst::<i64>(gpu, executor, &|init, arg, b| b.add(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 72, "score": 177514.7160535389 }, { "content": "pub fn add_f32(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Add f32\");\n\n inst::<f32>(gpu, executor, &|init, arg, b| b.add(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 73, "score": 177514.7160535389 }, { "content": "pub fn add_f64(gpu: &Gpu, executor: &Executor) -> InstDesc {\n\n info!(\"Instruction: Add f64\");\n\n inst::<f64>(gpu, executor, &|init, arg, b| b.add(init, arg))\n\n}\n\n\n", "file_path": "backend/cuda/src/characterize/instruction.rs", "rank": 74, "score": 177514.7160535389 }, { "content": "type RenderResult = Result<(), RenderError>;\n\n\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 75, "score": 176653.39828576232 }, { "content": "/// Merge flat filters that can be merged in the given list.\n\npub fn merge(mut filters: Vec<FlatFilter>, ir_desc: &ir::IrDesc) -> Vec<FlatFilter> {\n\n // Filters can only be merged into filters with more inputs. Thus we only try to\n\n // merge them with filter with more inputs.\n\n filters.sort_by_key(|x| x.inputs.len());\n\n let mut merged_filters: Vec<FlatFilter> = Vec::new();\n\n for filter in filters.into_iter().rev() {\n\n let mut merged = false;\n\n for other_filter in &mut merged_filters {\n\n if other_filter.try_merge(&filter, ir_desc) {\n\n merged = true;\n\n }\n\n }\n\n if !merged {\n\n merged_filters.push(filter);\n\n }\n\n }\n\n merged_filters\n\n}\n\n\n\n/// A filter with only negative rules.\n", "file_path": "telamon-gen/src/flat_filter.rs", "rank": 76, "score": 176144.70122851105 }, { "content": "/// Returns a factor and a multiple of `size`.\n\npub fn factors(\n\n size: &ir::PartialSize,\n\n space: &SearchSpace,\n\n ctx: &dyn Context,\n\n) -> FactorRange {\n\n let (factor, param_factors, dim_size_factors) = size.factors();\n\n let divisors = size.divisors();\n\n let factor = param_factors\n\n .iter()\n\n .map(|p| u64::from(ctx.param_as_size(&p.name).unwrap()))\n\n .product::<u64>()\n\n * u64::from(factor);\n\n let mut total_gcd = factor.to_biguint().unwrap();\n\n let mut total_lcm = total_gcd.clone();\n\n for &dim in dim_size_factors {\n\n let size = dim_factors(dim, space);\n\n total_gcd *= size.gcd;\n\n total_lcm *= size.lcm;\n\n }\n\n for &dim in divisors {\n\n let size = dim_factors(dim, space);\n\n total_gcd /= size.lcm.to_biguint().unwrap().gcd(&total_gcd);\n\n total_lcm /= size.gcd;\n\n }\n\n FactorRange {\n\n gcd: total_gcd.to_u64().unwrap(),\n\n lcm: total_lcm.to_u64().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "src/model/size.rs", "rank": 77, "score": 176099.86105172845 }, { "content": "/// Generates the list of levels to consider. The root level is the first one.\n\n///\n\n/// The idea is to ensure that each instruction is considered the right number of times\n\n/// and that inner loops are applied before outer ones. For this, we build the list of\n\n/// outer dimensions of each instruction or loops. For loops, we include both the nesting\n\n/// with and without the loop. We then build the minimal dag for the order defined such as\n\n/// X < Y iff:\n\n/// - nesting(X) < nesting(Y)\n\n/// - forall z in Y\\X, forall y in Y, z inner y\n\n/// Each edge of the dag represents a level, appling the dimensions in the difference\n\n/// between the nestings at each end of the edge.\n\npub fn generate(\n\n space: &SearchSpace,\n\n ctx: &dyn Context,\n\n local_info: &LocalInfo,\n\n) -> (Vec<Level>, Vec<DimMap>) {\n\n // Build the list of nestings, exclude block and vector dimensions.\n\n let mut nestings = local_info\n\n .nesting\n\n .iter()\n\n .flat_map(|(&stmt, nesting)| {\n\n let outer_dims = nesting.outer_dims.filter(|&d| must_consider_dim(space, d));\n\n if let ir::StmtId::Dim(dim) = stmt {\n\n if must_consider_dim(space, dim) {\n\n let mut outer_with_self = outer_dims.clone();\n\n outer_with_self.insert(dim);\n\n vec![outer_dims, outer_with_self]\n\n } else {\n\n vec![]\n\n }\n\n } else {\n", "file_path": "src/model/level.rs", "rank": 78, "score": 176099.86105172845 }, { "content": "/// Multiplies all elements of `lhs_mul` with `rhs_mul_operand` and\n\n/// adds the result to the tensor `rhs_add`\n\npub fn tensor_mad(\n\n builder: &mut Builder,\n\n lhs_mul: &VirtualTensor,\n\n rhs_mul_operand: &dyn AutoOperand,\n\n rhs_add: &VirtualTensor,\n\n) -> VirtualTensor {\n\n assert!(lhs_mul.same_shape(rhs_add, builder.function()));\n\n\n\n let dims = lhs_mul\n\n .iter()\n\n .map(|dim| builder.open_mapped_dim(&dim))\n\n .collect_vec();\n\n\n\n let lhs_mul_operand = lhs_mul.dim_map(\n\n &dims.iter().collect_vec(),\n\n ir::DimMapScope::Global(()),\n\n builder,\n\n );\n\n\n\n let rhs_add_operand = rhs_add.dim_map(\n", "file_path": "kernels/src/compose.rs", "rank": 79, "score": 173643.02780625442 }, { "content": "/// Opens dimensions mapped to the entire set of dimensions of a\n\n/// virtual tensor `a` and calls a function `f` with an operand\n\n/// representing a tensor's element and the builder. All further\n\n/// instructions created by `f` using the builder will be placed in a\n\n/// set of dimensions mapped to the dimensions of the virtual input\n\n/// tensor `a`.\n\npub fn tensor_map(\n\n builder: &mut Builder,\n\n a: &VirtualTensor,\n\n f: impl FnOnce(&ir::Operand<()>, &mut Builder) -> ir::InstId,\n\n) -> VirtualTensor {\n\n let dims = a\n\n .iter()\n\n .map(|dim| builder.open_mapped_dim(&dim))\n\n .collect_vec();\n\n\n\n let operand = a.dim_map(\n\n &dims.iter().map(|dim| dim).collect_vec()[..],\n\n ir::DimMapScope::Global(()),\n\n builder,\n\n );\n\n\n\n let res_instr = f(&operand, builder);\n\n\n\n for dim in &dims {\n\n builder.close_dim(&dim);\n\n }\n\n\n\n VirtualTensor::new(res_instr, dims)\n\n}\n\n\n", "file_path": "kernels/src/compose.rs", "rank": 80, "score": 173635.32041460264 }, { "content": "/// Computes the mean and the confidence interval of the data set. The requested degree\n\n/// of confidence must be between 0 and 1.\n\npub fn estimate_mean(\n\n mut data: Vec<f64>,\n\n confidence: f64,\n\n unit: &'static str,\n\n) -> Estimate {\n\n assert!(0. <= confidence && confidence <= 1.);\n\n let mean = mean(&data);\n\n for item in &mut data {\n\n *item = (*item - mean).abs();\n\n }\n\n data.sort_by(|&x, &y| cmp_f64(x, y));\n\n let idx = std::cmp::min(\n\n ((data.len() - 1) as f64 * confidence).ceil() as usize,\n\n data.len() - 1,\n\n );\n\n Estimate {\n\n value: mean,\n\n unit,\n\n interval: data[idx],\n\n confidence,\n\n }\n\n}\n\n\n", "file_path": "kernels/src/statistics.rs", "rank": 81, "score": 173625.421211963 }, { "content": "/// Computes the `HwPressure` caused by the intersection of the bodies of the given loops.\n\npub fn sum_pressure(\n\n ctx: &dyn Context,\n\n space: &SearchSpace,\n\n local_info: &LocalInfo,\n\n bound_level: BottleneckLevel,\n\n nest: &[ir::DimId],\n\n repeat: &ir::PartialSize,\n\n) -> HwPressure {\n\n // Compute the pressure induced by the dimensions overhead.\n\n let mut pressure =\n\n HwPressure::min(nest.iter().map(|d| &local_info.dim_overhead[d].0))\n\n .unwrap_or_else(|| HwPressure::zero(&*ctx.device()));\n\n if nest.is_empty() {\n\n let min_num_threads = match bound_level {\n\n BottleneckLevel::Global => local_info.parallelism.min_num_threads,\n\n BottleneckLevel::Block => local_info.parallelism.min_num_threads_per_blocks,\n\n BottleneckLevel::Thread => 1,\n\n };\n\n let mut init_pressure = local_info.thread_overhead.clone();\n\n if bound_level <= BottleneckLevel::Block {\n", "file_path": "src/model/level.rs", "rank": 82, "score": 173625.421211963 }, { "content": "/// Entry point of the exploration. This function returns the best candidate that it has found in\n\n/// the given time (or at whatever point we decided to stop the search - potentially after an\n\n/// exhaustive search)\n\npub fn find_best(\n\n config: &Config,\n\n context: &dyn Context,\n\n search_space: Vec<SearchSpace>,\n\n check_result_fn: Option<&CheckResultFn<'_>>,\n\n) -> Option<SearchSpace> {\n\n find_best_ex(\n\n config,\n\n context,\n\n search_space\n\n .into_iter()\n\n .map(|s| {\n\n let bound = bound(&s, context);\n\n Candidate::new(s, bound)\n\n })\n\n .collect(),\n\n check_result_fn,\n\n )\n\n .map(|c| c.space)\n\n}\n\n\n", "file_path": "src/explorer/mod.rs", "rank": 83, "score": 173625.421211963 }, { "content": "/// A recursive function that takes a candidate and expands it until we have a completely specified\n\n/// candidate that we can pass to the evaluator, or we find a dead-end\n\npub fn descend(\n\n choice_order: &ChoiceOrdering,\n\n node_order: NewNodeOrder,\n\n context: &dyn Context,\n\n candidate: Candidate,\n\n cut: f64,\n\n) -> Option<Candidate> {\n\n Rollout {\n\n choice_order,\n\n node_order: &node_order,\n\n context,\n\n cut,\n\n }\n\n .descend(candidate)\n\n}\n\n\n\nimpl NewNodeOrder {\n\n /// Called in montecarlo_descend, dispatch the choice of the next candidate according to our\n\n /// configuration\n\n pub fn pick_candidate(self, new_nodes: &[Candidate], cut: f64) -> Option<usize> {\n", "file_path": "src/explorer/local_selection.rs", "rank": 84, "score": 173625.421211963 }, { "content": "/// Prints the template if the a value is equal to another.\n\nfn ifeq(h: &Helper, r: &Handlebars, rc: &mut RenderContext) -> RenderResult {\n\n let param = h\n\n .param(0)\n\n .ok_or_else(|| RenderError::new(\"Param 0 not found for helper \\\"ifeq\\\"\"))?;\n\n let value = h\n\n .param(1)\n\n .ok_or_else(|| RenderError::new(\"Param 1 not found for helper \\\"ifeq\\\"\"))?;\n\n let template = if param.value() == value.value() {\n\n h.template()\n\n } else {\n\n h.inverse()\n\n };\n\n template.map(|t| t.render(r, rc)).unwrap_or(Ok(()))\n\n}\n\n\n\n/// Creates a printer form an iterator.\n\n// TODO(cleanup): remove printing macros\n\nmacro_rules! iter_printer {\n\n ($iter: expr, $item: pat, $($format_args: tt)*) => {\n\n crate::print::Printer(move |f: &mut Formatter| {\n\n #[allow(clippy::for_loop_over_option)]\n\n for $item in $iter { write!(f, $($format_args)*)?; }\n\n Ok(())\n\n })\n\n };\n\n}\n\n\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 85, "score": 172963.59614245454 }, { "content": "/// Performs string substitution.\n\nfn replace(h: &Helper, _: &Handlebars, rc: &mut RenderContext) -> RenderResult {\n\n if h.is_block() {\n\n return Err(RenderError::new(\"replace is not valid on blocks\"));\n\n }\n\n let mut string = match h.param(0).map(|p| p.value()) {\n\n None => return Err(RenderError::new(\"missing argument for replace\")),\n\n Some(&JsonValue::String(ref string)) => string.clone(),\n\n Some(x) => {\n\n debug!(\"replace argument = {}\", x);\n\n debug!(\"in context {:?}\", rc.context());\n\n return Err(RenderError::new(\"replace expects string arguments\"));\n\n }\n\n };\n\n for (key, value) in h.hash() {\n\n let value = value.value().as_str().ok_or_else(|| {\n\n let err = format!(\n\n \"replace maps strings to strings (got {:?}), in context {:?}\",\n\n value,\n\n rc.context()\n\n );\n\n RenderError::new(err)\n\n })?;\n\n string = string.replace(key, value);\n\n }\n\n rc.writer.write_all(string.into_bytes().as_ref())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 86, "score": 172957.69495340006 }, { "content": "/// Process a file and stores the result in an other file. This is meant to be used from build.rs\n\n/// files and may print output to be interpreted by cargo on stdout.\n\npub fn process_file(\n\n input_path: &path::Path,\n\n output_path: &path::Path,\n\n format: bool,\n\n) -> Result<(), error::Error> {\n\n let mut output = fs::File::create(path::Path::new(output_path)).unwrap();\n\n info!(\n\n \"compiling {} to {}\",\n\n input_path.display(),\n\n output_path.display()\n\n );\n\n process(None, &mut output, input_path)?;\n\n\n\n if format {\n\n match process::Command::new(\"rustfmt\")\n\n .arg(output_path.as_os_str())\n\n .status()\n\n {\n\n Ok(status) => {\n\n if !status.success() {\n", "file_path": "telamon-gen/src/lib.rs", "rank": 87, "score": 171279.0982457215 }, { "content": "/// Instruments a kernel and stores the results in a `Table`.\n\n///\n\n/// * `args_range`: the arguments that must vary, with their range.\n\n/// * `perf_counters`: the CUDA performance counters to monitor.\n\n/// * `result`: the table in which to store the results.\n\npub fn run(\n\n context: &mut Context,\n\n space: &SearchSpace,\n\n args_range: &[(&str, &[i32])],\n\n counters: &PerfCounterSet,\n\n result_prefix: &[u64],\n\n result: &mut Table<u64>,\n\n) {\n\n if let Some(choice) = explorer::choice::default_list(space).next() {\n\n panic!(\"The benchmark is not completely scheduled: {:?}\", choice);\n\n }\n\n let dev_fun = codegen::Function::build(space);\n\n let kernel = Kernel::compile(&dev_fun, context.gpu(), context.executor(), 1);\n\n for &(arg, range) in args_range {\n\n bind_scalar(arg, range[0], context);\n\n }\n\n kernel.instrument(context, counters);\n\n let args_range_len = args_range.iter().map(|&(_, x)| x.len()).collect_vec();\n\n for index in NDRange::new(&args_range_len) {\n\n let mut entry = result_prefix.iter().cloned().collect_vec();\n", "file_path": "backend/cuda/src/characterize/gen.rs", "rank": 88, "score": 171275.11109309315 }, { "content": "/// Multiplies each element of a virtual tensor `rhs` with a scalar\n\n/// operand `lhs`\n\npub fn tensor_elementwise_mul(\n\n builder: &mut Builder,\n\n lhs: &dyn AutoOperand,\n\n rhs: &VirtualTensor,\n\n) -> VirtualTensor {\n\n tensor_map(builder, rhs, |tensor_operand, builder| {\n\n builder.mul(tensor_operand, lhs)\n\n })\n\n}\n\n\n", "file_path": "kernels/src/compose.rs", "rank": 89, "score": 171274.15059328135 }, { "content": "/// Multiplies a matrix `lhs` with a vector `rhs`\n\npub fn matrix_vector_multiply(\n\n builder: &mut Builder,\n\n lhs: &VirtualTensor,\n\n rhs: &VirtualTensor,\n\n) -> VirtualTensor {\n\n assert!(lhs.num_dims() == 2 && rhs.num_dims() == 1);\n\n assert!(lhs[lhs.num_dims() - 1].size_eq(&rhs[0], builder.function()));\n\n\n\n // Assume (m x n) . (n) multiplication -> Result: (m)\n\n let m = &lhs[0];\n\n let n = &lhs[1];\n\n\n\n // Initialize accumulator\n\n let accu_init_m = builder.open_mapped_dim(&m);\n\n let accu_init_instr = builder.mov(&0f32);\n\n builder.close_dim(&accu_init_m);\n\n\n\n // Map operands and assign accumulator\n\n let acc_dim_m = builder.open_mapped_dim(&accu_init_m);\n\n let acc_dim_n = builder.open_mapped_dim(&n);\n", "file_path": "kernels/src/compose.rs", "rank": 90, "score": 171268.41272130236 }, { "content": "#[cfg(test)]\n\n#[doc(hidden)]\n\npub fn reset() {\n\n value::reset_ident_counter();\n\n ast::Variable::reset_prefix();\n\n}\n\n\n\n// TODO(cleanup): rewrite the fllowing with token streams instead of string templates\n\nuse crate::ir;\n\nuse handlebars::{self, Handlebars, Helper, RenderContext, RenderError, Renderable};\n\nuse itertools::Itertools;\n\nuse serde_json::value::Value as JsonValue;\n\nuse std::cmp::Ordering;\n\nuse std::collections::BinaryHeap;\n\nuse std::fmt::{self, Display, Formatter};\n\nuse std::hash::Hash;\n\nuse std::iter::FromIterator;\n\n//use std::io::prelude::*;\n\nuse indexmap::IndexMap;\n\nuse utils::*;\n\n\n\n// TODO(cleanup): use handlebars instead of printer macros and static templates\n", "file_path": "telamon-gen/src/print/mod.rs", "rank": 91, "score": 171268.41272130236 }, { "content": "/// Multiplies two matrices `lhs` and `rhs`\n\npub fn matrix_matrix_multiply(\n\n builder: &mut Builder,\n\n lhs: &VirtualTensor,\n\n rhs: &VirtualTensor,\n\n) -> VirtualTensor {\n\n assert!(lhs.num_dims() == 2 && rhs.num_dims() == 2);\n\n assert!(lhs[lhs.num_dims() - 1].size_eq(&rhs[0], builder.function()));\n\n\n\n // Assume (m x k) . (k x n) multiplication -> Result: (m x n)\n\n let m = &lhs[0];\n\n let n = &rhs[1];\n\n let k = &lhs[1];\n\n\n\n // Initialize accumulator\n\n let accu_init_m = builder.open_mapped_dim(&m);\n\n let accu_init_n = builder.open_mapped_dim(&n);\n\n\n\n let accu_init_instr = builder.mov(&0f32);\n\n\n\n builder.close_dim(&accu_init_m);\n", "file_path": "kernels/src/compose.rs", "rank": 92, "score": 171268.41272130236 }, { "content": "pub fn link_and_exec(\n\n lib_path: &str,\n\n fun_name: &str,\n\n mut args: Vec<*mut libc::c_void>,\n\n) -> f64 {\n\n let lib = libloading::Library::new(lib_path).expect(\"Library not found\");\n\n unsafe {\n\n let func: libloading::Symbol<unsafe extern \"C\" fn(*mut *mut libc::c_void)> = lib\n\n .get(fun_name.as_bytes())\n\n .expect(\"Could not find symbol in library\");\n\n let t0 = Instant::now();\n\n func(args.as_mut_ptr());\n\n let t = Instant::now() - t0;\n\n f64::from(t.subsec_nanos())\n\n }\n\n}\n", "file_path": "backend/x86/src/compile.rs", "rank": 93, "score": 171268.41272130236 }, { "content": "/// Normalizes a list of inputs.\n\npub fn dedup_inputs(\n\n mut inputs: Vec<ir::ChoiceInstance>,\n\n ir_desc: &ir::IrDesc,\n\n) -> (Vec<ir::ChoiceInstance>, ir::Adaptator) {\n\n let mut adaptator = ir::Adaptator::default();\n\n // Normalize inputs.\n\n for (pos, input) in inputs.iter_mut().enumerate() {\n\n if input.normalize(ir_desc) {\n\n adaptator.set_inversed(pos);\n\n }\n\n }\n\n let mut new_input_defs;\n\n // Assign new positions.\n\n {\n\n let mut input_map = FxHashMap::default();\n\n for (old_pos, input) in inputs.iter().enumerate() {\n\n let next_pos = input_map.len();\n\n let new_pos = *input_map.entry(input).or_insert(next_pos);\n\n adaptator.set_input(old_pos, new_pos);\n\n }\n", "file_path": "telamon-gen/src/constraint.rs", "rank": 94, "score": 171268.41272130236 }, { "content": "/// Restricts a choice to `value`. If `delayed` is true, actions are put in the\n\n/// `actions` vector instead of being directly applied.\n\npub fn restrict(\n\n choice_instance: &ir::ChoiceInstance,\n\n value: &print::Value,\n\n delayed: bool,\n\n ctx: &print::Context,\n\n) -> TokenStream {\n\n assert_eq!(\n\n &choice_instance.value_type(ctx.ir_desc).full_type(),\n\n value.value_type()\n\n );\n\n // TODO(span): keep the real span.\n\n let name = Ident::new(&choice_instance.choice, Span::call_site());\n\n let ids = ids(choice_instance, ctx);\n\n if delayed {\n\n quote!(actions.extend(#name::restrict_delayed(#ids ir_instance, store, #value)?);)\n\n } else {\n\n quote!(#name::restrict(#ids ir_instance, store, #value, diff)?;)\n\n }\n\n}\n\n\n", "file_path": "telamon-gen/src/print/choice.rs", "rank": 95, "score": 171268.41272130236 }, { "content": "/// Generates a wrap of syncthreads separated by a single instruction.\n\npub fn syncthread(\n\n signature: Arc<Signature>,\n\n device: Arc<dyn Device>,\n\n n_iter: &DimSize,\n\n n_chained: u32,\n\n wrap_size: u32,\n\n) -> SearchSpace {\n\n let mut builder = Builder::new(signature, device);\n\n let loop_size = n_iter.to_ir_size(&builder);\n\n let unroll_size = builder.cst_size(n_chained);\n\n let thread_size = builder.cst_size(wrap_size);\n\n\n\n let d0 = builder.open_dim_ex(loop_size, DimKind::LOOP);\n\n let d1 = builder.open_dim_ex(unroll_size, DimKind::UNROLL);\n\n let d2 = builder.open_dim_ex(thread_size, DimKind::THREAD);\n\n let _ = builder.mov(&0i32);\n\n\n\n builder.order(&d0, &d1, Order::OUTER);\n\n builder.order(&d0, &d2, Order::OUTER);\n\n\n\n let mut kernel = builder.get();\n\n kernel\n\n .domain_mut()\n\n .set_order(d1[0].into(), d2[0].into(), Order::OUTER);\n\n kernel\n\n}\n\n\n\n/// Generates a wrap of syncthreads separated by a single instruction.\n", "file_path": "backend/cuda/src/characterize/gen.rs", "rank": 96, "score": 171268.41272130236 }, { "content": "/// Runs the memory analysis.\n\npub fn analyse(\n\n space: &SearchSpace,\n\n gpu: &Gpu,\n\n inst: &ir::Instruction,\n\n sizes: &FxHashMap<ir::DimId, size::Range>,\n\n ctx: &dyn Context,\n\n) -> MemInfo {\n\n let flag = space.domain().get_inst_flag(inst.id());\n\n let info = match *inst.operator() {\n\n ir::Operator::Ld(_, _, ref pattern) | ir::Operator::St(_, _, _, ref pattern) => {\n\n let mem_space = access_pattern_space(pattern, space);\n\n let is_shared = mem_space.is(MemSpace::SHARED);\n\n match pattern {\n\n _ if flag.intersects(InstFlag::CACHE_READ_ONLY) => {\n\n unknown_info(inst, is_shared, gpu)\n\n }\n\n ir::AccessPattern::Unknown { .. } => unknown_info(inst, is_shared, gpu),\n\n ir::AccessPattern::Tensor { ref dims, .. } => {\n\n info(space, inst, dims, is_shared, gpu, sizes, ctx)\n\n }\n", "file_path": "backend/cuda/src/mem_model.rs", "rank": 97, "score": 171268.41272130236 }, { "content": "/// Same as `find_best`, but allows to specify pre-existing actions and also returns the\n\n/// actions for the best candidate.\n\npub fn find_best_ex(\n\n config: &Config,\n\n context: &dyn Context,\n\n candidates: Vec<Candidate>,\n\n check_result_fn: Option<&CheckResultFn<'_>>,\n\n) -> Option<Candidate> {\n\n match config.algorithm {\n\n config::SearchAlgorithm::Mcts(ref bandit_config) => {\n\n assert!(candidates.len() == 1);\n\n\n\n let builder = MctsBuilder {\n\n space: candidates.into_iter().next().unwrap().space,\n\n config,\n\n bandit_config,\n\n context,\n\n check_result_fn,\n\n };\n\n\n\n let default_policy = Box::new(bandit_config.new_nodes_order);\n\n\n", "file_path": "src/explorer/mod.rs", "rank": 98, "score": 171268.41272130236 }, { "content": "/// Zip copies of an object with an iterator.\n\npub fn zip_copy<I: IntoIterator, T: Clone>(it: I, object: T) -> ZipCopy<I::IntoIter, T> {\n\n let mut peek_it = it.into_iter().peekable();\n\n let object_option = peek_it.peek().map(|_| object);\n\n ZipCopy {\n\n it: peek_it,\n\n object: object_option,\n\n }\n\n}\n\n\n", "file_path": "telamon-utils/src/iterator.rs", "rank": 99, "score": 170855.3049341262 } ]
Rust
src/cluster/session.rs
a1ph/cdrs-tokio
99a536e705ff1fd95be36d57d0832ddc21d7478f
use async_trait::async_trait; use bb8; use fnv::FnvHashMap; use std::iter::Iterator; use std::sync::Arc; use tokio::{io::AsyncWriteExt, sync::Mutex}; #[cfg(feature = "unstable-dynamic-cluster")] use crate::cluster::NodeTcpConfig; #[cfg(feature = "rust-tls")] use crate::cluster::{new_rustls_pool, ClusterRustlsConfig, RustlsConnectionPool}; use crate::cluster::{new_tcp_pool, startup, CDRSSession, ClusterTcpConfig, ConnectionPool, GetCompressor, GetConnection, TcpConnectionPool, ResponseCache}; use crate::error; use crate::load_balancing::LoadBalancingStrategy; use crate::transport::{CDRSTransport, TransportTcp}; use crate::authenticators::Authenticator; use crate::cluster::SessionPager; use crate::compression::Compression; use crate::events::{new_listener, EventStream, EventStreamNonBlocking, Listener}; use crate::frame::events::{ServerEvent, SimpleServerEvent, StatusChange, StatusChangeType}; use crate::frame::parser::parse_frame; use crate::frame::{Frame, IntoBytes, StreamId}; use crate::query::{BatchExecutor, ExecExecutor, PrepareExecutor, QueryExecutor}; #[derive(Debug)] pub struct Session<LB> { load_balancing: Mutex<LB>, event_stream: Option<Mutex<EventStreamNonBlocking>>, responses: Mutex<FnvHashMap<StreamId, Frame>>, #[allow(dead_code)] pub compression: Compression, } impl<'a, LB> GetCompressor<'a> for Session<LB> { fn get_compressor(&self) -> Compression { self.compression.clone() } } impl<'a, LB: Sized> Session<LB> { pub fn paged< T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error>, >( &'a mut self, page_size: i32, ) -> SessionPager<'a, M, Session<LB>, T> where Session<LB>: CDRSSession<'static, T, M>, { return SessionPager::new(self, page_size); } } #[async_trait] impl< T: CDRSTransport + Send + Sync + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > GetConnection<T, M> for Session<LB> { async fn get_connection(&self) -> Option<Arc<ConnectionPool<M>>> { if cfg!(feature = "unstable-dynamic-cluster") { if let Some(ref event_stream_mx) = self.event_stream { if let Ok(ref mut event_stream) = event_stream_mx.try_lock() { loop { let next_event = event_stream.next(); match next_event { None => break, Some(ServerEvent::StatusChange(StatusChange { addr, change_type: StatusChangeType::Down, })) => { self.load_balancing .lock() .await .remove_node(|pool| pool.get_addr() == addr.addr); } Some(_) => continue, } } } } } self.load_balancing .lock() .await .next() } } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > QueryExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > PrepareExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > ExecExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > BatchExecutor<T, M> for Session<LB> { } impl< 'a, T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > CDRSSession<'a, T, M> for Session<LB> { } #[async_trait] impl <LB> ResponseCache for Session<LB> where LB: Send { async fn match_or_cache_response(&self, stream_id: i16, frame: Frame) -> Option<Frame> { if frame.stream == stream_id { return Some(frame); } let mut responses = self.responses.lock().await; responses.insert(frame.stream, frame); responses.remove(&stream_id) } } #[cfg(feature = "rust-tls")] async fn connect_tls_static<A, LB>( node_configs: &ClusterRustlsConfig<A>, mut load_balancing: LB, compression: Compression, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<RustlsConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_rustls_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); Ok(Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }) } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] async fn connect_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, mut load_balancing: LB, compression: Compression, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<RustlsConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_rustls_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); let mut session = Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }; let (listener, event_stream) = session.listen_non_blocking( event_src.addr, event_src.authenticator, vec![SimpleServerEvent::StatusChange], ).await?; tokio::spawn(listener.start(&Compression::None)); session.event_stream = Some(Mutex::new(event_stream)); Ok(session) } async fn connect_static<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, mut load_balancing: LB, compression: Compression, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<TcpConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_tcp_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); Ok(Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }) } #[cfg(feature = "unstable-dynamic-cluster")] async fn connect_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, mut load_balancing: LB, compression: Compression, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<TcpConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_tcp_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); let mut session = Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }; let (listener, event_stream) = session.listen_non_blocking( event_src.addr, event_src.authenticator, vec![SimpleServerEvent::StatusChange], ).await?; tokio::spawn(listener.start(&Compression::None)); session.event_stream = Some(Mutex::new(event_stream)); Ok(session) } pub async fn new<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::None).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::None, event_src).await } pub async fn new_snappy<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::Snappy).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_snappy_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::Snappy, event_src).await } pub async fn new_lz4<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::Lz4).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_lz4_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::Lz4, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::None).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::None, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_snappy_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::Snappy).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_snappy_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::Snappy, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_lz4_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::Lz4).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_lz4_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::Lz4, event_src).await } impl<'a, L> Session<L> { pub async fn listen<A: Authenticator + 'static + Sized>( &self, node: &str, authenticator: A, events: Vec<SimpleServerEvent>, ) -> error::Result<(Listener<Mutex<TransportTcp>>, EventStream)> { let compression = self.get_compressor(); let transport = TransportTcp::new(&node).await.map(Mutex::new)?; startup(&transport, &authenticator).await?; let query_frame = Frame::new_req_register(events).into_cbytes(); transport.lock().await.write(query_frame.as_slice()).await?; parse_frame(&transport, &compression).await?; Ok(new_listener(transport)) } pub async fn listen_non_blocking<A: Authenticator + 'static + Sized>( &self, node: &str, authenticator: A, events: Vec<SimpleServerEvent>, ) -> error::Result<(Listener<Mutex<TransportTcp>>, EventStreamNonBlocking)> { self.listen(node, authenticator, events).await.map(|l| { let (listener, stream) = l; (listener, stream.into()) }) } }
use async_trait::async_trait; use bb8; use fnv::FnvHashMap; use std::iter::Iterator; use std::sync::Arc; use tokio::{io::AsyncWriteExt, sync::Mutex}; #[cfg(feature = "unstable-dynamic-cluster")] use crate::cluster::NodeTcpConfig; #[cfg(feature = "rust-tls")] use crate::cluster::{new_rustls_pool, ClusterRustlsConfig, RustlsConnectionPool}; use crate::cluster::{new_tcp_pool, startup, CDRSSession, ClusterTcpConfig, ConnectionPool, GetCompressor, GetConnection, TcpConnectionPool, ResponseCache}; use crate::error; use crate::load_balancing::LoadBalancingStrategy; use crate::transport::{CDRSTransport, TransportTcp}; use crate::authenticators::Authenticator; use crate::cluster::SessionPager; use crate::compression::Compression; use crate::events::{new_listener, EventStream, EventStreamNonBlocking, Listener}; use crate::frame::events::{ServerEvent, SimpleServerEvent, StatusChange, StatusChangeType}; use crate::frame::parser::parse_frame; use crate::frame::{Frame, IntoBytes, StreamId}; use crate::query::{BatchExecutor, ExecExecutor, PrepareExecutor, QueryExecutor}; #[derive(Debug)] pub struct Session<LB> { load_balancing: Mutex<LB>, event_stream: Option<Mutex<EventStreamNonBlocking>>, responses: Mutex<FnvHashMap<StreamId, Frame>>, #[allow(dead_code)] pub compression: Compression, } impl<'a, LB> GetCompressor<'a> for Session<LB> { fn get_compressor(&self) -> Compression { self.compression.clone() } } impl<'a, LB: Sized> Session<LB> { pub fn paged< T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error>, >( &'a mut self, page_size: i32, ) -> SessionPager<'a, M, Session<LB>, T> where Session<LB>: CDRSSession<'static, T, M>, { return SessionPager::new(self, page_size); } } #[async_trait] impl< T: CDRSTransport + Send + Sync + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > GetConnection<T, M> for Session<LB> { async fn get_connection(&self) -> Option<Arc<ConnectionPool<M>>> { if cfg!(feature = "unstable-dynamic-cluster") { if let Some(ref event_stream_mx) = self.event_stream { if let Ok(ref mut event_stream) = event_stream_mx.try_lock() { loop { let next_event = event_stream.next(); match next_event { None => break, Some(ServerEvent::StatusChange(StatusChange { addr, change_type: StatusChangeType::Down, })) => { self.load_balancing .lock() .await .remove_node(|pool| pool.get_addr() == addr.addr); } Some(_) => continue, } } } } } self.load_balancing .lock() .await .next() } } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > QueryExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > PrepareExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > ExecExecutor<T, M> for Session<LB> { } #[async_trait] impl< 'a, T: CDRSTransport + Unpin + 'static, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, > BatchExecutor<T, M> for Session<LB> { } impl< 'a, T: CDRSTransport + Unpin + 'static, M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized, LB: LoadBalancingStrategy<ConnectionPool<M>> + Sized + Send + Sync, > CDRSSession<'a, T, M> for Session<LB> { } #[async_trait] impl <LB> ResponseCache for Session<LB> where LB: Send { async fn match_or_cache_response(&self, stream_id: i16, frame: Frame) -> Option<Frame> { if frame.stream == stream_id { return Some(frame); } let mut responses = self.responses.lock().await; responses.insert(frame.stream, frame); responses.remove(&stream_id) } } #[cfg(feature = "rust-tls")] async fn connect_tls_static<A, LB>( node_configs: &ClusterRustlsConfig<A>, mut load_balancing: LB, compression: Compression, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<RustlsConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_rustls_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes);
} #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] async fn connect_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, mut load_balancing: LB, compression: Compression, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<RustlsConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_rustls_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); let mut session = Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }; let (listener, event_stream) = session.listen_non_blocking( event_src.addr, event_src.authenticator, vec![SimpleServerEvent::StatusChange], ).await?; tokio::spawn(listener.start(&Compression::None)); session.event_stream = Some(Mutex::new(event_stream)); Ok(session) } async fn connect_static<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, mut load_balancing: LB, compression: Compression, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<TcpConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_tcp_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); Ok(Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }) } #[cfg(feature = "unstable-dynamic-cluster")] async fn connect_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, mut load_balancing: LB, compression: Compression, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { let mut nodes: Vec<Arc<TcpConnectionPool<A>>> = Vec::with_capacity(node_configs.0.len()); for node_config in &node_configs.0 { let node_connection_pool = new_tcp_pool(node_config.clone()).await?; nodes.push(Arc::new(node_connection_pool)); } load_balancing.init(nodes); let mut session = Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, }; let (listener, event_stream) = session.listen_non_blocking( event_src.addr, event_src.authenticator, vec![SimpleServerEvent::StatusChange], ).await?; tokio::spawn(listener.start(&Compression::None)); session.event_stream = Some(Mutex::new(event_stream)); Ok(session) } pub async fn new<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::None).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::None, event_src).await } pub async fn new_snappy<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::Snappy).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_snappy_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'a, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::Snappy, event_src).await } pub async fn new_lz4<A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_static(node_configs, load_balancing, Compression::Lz4).await } #[cfg(feature = "unstable-dynamic-cluster")] pub async fn new_lz4_dynamic<'a, A, LB>( node_configs: &ClusterTcpConfig<'_, A>, load_balancing: LB, event_src: NodeTcpConfig<'a, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<TcpConnectionPool<A>> + Sized, { connect_dynamic(node_configs, load_balancing, Compression::Lz4, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::None).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::None, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_snappy_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::Snappy).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_snappy_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::Snappy, event_src).await } #[cfg(feature = "rust-tls")] pub async fn new_lz4_tls<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_static(node_configs, load_balancing, Compression::Lz4).await } #[cfg(all(feature = "rust-tls", feature = "unstable-dynamic-cluster"))] pub async fn new_lz4_tls_dynamic<A, LB>( node_configs: &ClusterRustlsConfig<A>, load_balancing: LB, event_src: NodeTcpConfig<'_, A>, ) -> error::Result<Session<LB>> where A: Authenticator + 'static + Sized, LB: LoadBalancingStrategy<RustlsConnectionPool<A>> + Sized, { connect_tls_dynamic(node_configs, load_balancing, Compression::Lz4, event_src).await } impl<'a, L> Session<L> { pub async fn listen<A: Authenticator + 'static + Sized>( &self, node: &str, authenticator: A, events: Vec<SimpleServerEvent>, ) -> error::Result<(Listener<Mutex<TransportTcp>>, EventStream)> { let compression = self.get_compressor(); let transport = TransportTcp::new(&node).await.map(Mutex::new)?; startup(&transport, &authenticator).await?; let query_frame = Frame::new_req_register(events).into_cbytes(); transport.lock().await.write(query_frame.as_slice()).await?; parse_frame(&transport, &compression).await?; Ok(new_listener(transport)) } pub async fn listen_non_blocking<A: Authenticator + 'static + Sized>( &self, node: &str, authenticator: A, events: Vec<SimpleServerEvent>, ) -> error::Result<(Listener<Mutex<TransportTcp>>, EventStreamNonBlocking)> { self.listen(node, authenticator, events).await.map(|l| { let (listener, stream) = l; (listener, stream.into()) }) } }
Ok(Session { load_balancing: Mutex::new(load_balancing), event_stream: None, responses: Mutex::new(FnvHashMap::default()), compression, })
call_expression
[ { "content": "#[async_trait]\n\npub trait CDRSTransport: Sized + AsyncRead + AsyncWriteExt + Send + Sync {\n\n /// Creates a new independently owned handle to the underlying socket.\n\n ///\n\n /// The returned TcpStream is a reference to the same stream that this object references.\n\n /// Both handles will read and write the same stream of data, and options set on one stream\n\n /// will be propagated to the other stream.\n\n async fn try_clone(&self) -> io::Result<Self>;\n\n\n\n /// Shuts down the read, write, or both halves of this connection.\n\n async fn close(&mut self, close: net::Shutdown) -> io::Result<()>;\n\n\n\n /// Method that checks that transport is alive\n\n fn is_alive(&self) -> bool;\n\n}\n\n\n\n/// Default Tcp transport.\n\npub struct TransportTcp {\n\n tcp: TcpStream,\n\n addr: String,\n\n}\n", "file_path": "src/transport.rs", "rank": 0, "score": 225235.59943196102 }, { "content": "pub trait Authenticator: Clone + Send + Sync {\n\n fn get_auth_token(&self) -> CBytes;\n\n fn get_cassandra_name(&self) -> Option<&str>;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\n#[deprecated(\n\n since = \"2.1.0\",\n\n note = \"`PasswordAuthenticator` is deprecated in favour of `StaticPasswordAuthenticator` because the second one doesn't require static lifetime for credentials thus it's easier to use.\"\n\n)]\n\npub struct PasswordAuthenticator<'a> {\n\n username: &'a str,\n\n password: &'a str,\n\n}\n\n\n\n#[allow(deprecated)]\n\nimpl<'a> PasswordAuthenticator<'a> {\n\n pub fn new<'b>(username: &'b str, password: &'b str) -> PasswordAuthenticator<'b> {\n\n PasswordAuthenticator {\n\n username: username,\n", "file_path": "src/authenticators.rs", "rank": 1, "score": 209484.3278757598 }, { "content": "/// Tries to decode bytes array into `i16`.\n\npub fn try_i16_from_bytes(bytes: &[u8]) -> Result<i16, io::Error> {\n\n let mut c = Cursor::new(bytes);\n\n c.read_i16::<BigEndian>()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 2, "score": 188999.36053673213 }, { "content": "/// Tries to decode bytes array into `i32`.\n\npub fn try_i32_from_bytes(bytes: &[u8]) -> Result<i32, io::Error> {\n\n let mut c = Cursor::new(bytes);\n\n c.read_i32::<BigEndian>()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 3, "score": 188999.30011922517 }, { "content": "fn convert_frame_into_result(frame: Frame) -> error::Result<Frame> {\n\n match frame.opcode {\n\n Opcode::Error => frame.get_body().and_then(|err| match err {\n\n ResponseBody::Error(err) => Err(error::Error::Server(err)),\n\n _ => unreachable!(),\n\n }),\n\n _ => Ok(frame),\n\n }\n\n}\n", "file_path": "src/frame/parser.rs", "rank": 4, "score": 181243.9186039075 }, { "content": "// Decodes Cassandra `smallint` data (bytes) into Rust's `Result<i16, io::Error>`\n\npub fn decode_smallint(bytes: &[u8]) -> Result<i16, io::Error> {\n\n try_from_bytes(bytes).map(|i| i as i16)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 5, "score": 174431.47163203266 }, { "content": "// Decodes Cassandra `int` data (bytes) into Rust's `Result<i32, io::Error>`\n\npub fn decode_int(bytes: &[u8]) -> Result<i32, io::Error> {\n\n try_from_bytes(bytes).map(|i| i as i32)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 6, "score": 174431.41437435301 }, { "content": "// Decodes Cassandra `date` data (bytes) into Rust's `Result<i32, io::Error>` in following way\n\n// 0: -5877641-06-23\n\n// 2^31: 1970-1-1\n\n// 2^32: 5881580-07-11\n\npub fn decode_date(bytes: &[u8]) -> Result<i32, io::Error> {\n\n try_from_bytes(bytes).map(|i| i as i32)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 7, "score": 174431.23584706296 }, { "content": "/// Converts byte-array into i16\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given bytes could not be converted into `u16`\n\npub fn from_i16_bytes(bytes: &[u8]) -> i16 {\n\n try_i16_from_bytes(bytes).unwrap()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 8, "score": 165722.2146750846 }, { "content": "/// Converts number i16 into Cassandra's [short].\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given `i16` could not be converted into bytes\n\npub fn to_short(int: i16) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n // should not panic as input is i16\n\n let _ = bytes.write_i16::<BigEndian>(int).unwrap();\n\n\n\n bytes\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 9, "score": 150571.12153728434 }, { "content": "/// Convers integer into Cassandra's [int]\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given `i32` could not be converted into bytes\n\npub fn to_int(int: i32) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n // should not panic as input is i16\n\n let _ = bytes.write_i32::<BigEndian>(int).unwrap();\n\n\n\n bytes\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 10, "score": 150570.2696760157 }, { "content": "// Decodes Cassandra `inet` data (bytes) into Rust's `Result<net::IpAddr, io::Error>`\n\npub fn decode_inet(bytes: &[u8]) -> Result<net::IpAddr, io::Error> {\n\n match bytes.len() {\n\n // v4\n\n 4 => Ok(net::IpAddr::V4(net::Ipv4Addr::new(\n\n bytes[0], bytes[1], bytes[2], bytes[3],\n\n ))),\n\n // v6\n\n 16 => {\n\n let a = from_u16_bytes(&bytes[0..2]);\n\n let b = from_u16_bytes(&bytes[2..4]);\n\n let c = from_u16_bytes(&bytes[4..6]);\n\n let d = from_u16_bytes(&bytes[6..8]);\n\n let e = from_u16_bytes(&bytes[8..10]);\n\n let f = from_u16_bytes(&bytes[10..12]);\n\n let g = from_u16_bytes(&bytes[12..14]);\n\n let h = from_u16_bytes(&bytes[14..16]);\n\n Ok(net::IpAddr::V6(net::Ipv6Addr::new(a, b, c, d, e, f, g, h)))\n\n }\n\n _ => {\n\n // let message = format!(\"Unparseable Ip address {:?}\", bytes);\n\n Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Unparseable Ip address {:?}\", bytes),\n\n ))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 11, "score": 148689.93360910547 }, { "content": "pub fn column_is_empty_err<T: Display>(column_name: T) -> Error {\n\n Error::General(format!(\"Column or UDT property '{}' is empty\", column_name))\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Error::Io(ref err) => write!(f, \"IO error: {}\", err),\n\n Error::Compression(ref err) => write!(f, \"Compressor error: {}\", err),\n\n Error::Server(ref err) => write!(f, \"Server error: {:?}\", err.message),\n\n Error::FromUtf8(ref err) => write!(f, \"FromUtf8Error error: {:?}\", err),\n\n Error::UUIDParse(ref err) => write!(f, \"UUIDParse error: {:?}\", err),\n\n Error::General(ref err) => write!(f, \"GeneralParsing error: {:?}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for Error {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match *self {\n", "file_path": "src/error.rs", "rank": 12, "score": 148517.29947174905 }, { "content": "/// Factory function which returns a `Listener` and related `EventStream.`\n\n///\n\n/// `Listener` provides only one function `start` to start listening. It\n\n/// blocks a thread so should be moved into a separate one to no release\n\n/// main thread.\n\n///\n\n/// `EventStream` is an iterator which returns new events once they come.\n\n/// It is similar to `Receiver::iter`.\n\npub fn new_listener<X>(transport: X) -> (Listener<X>, EventStream) {\n\n let (tx, rx) = channel();\n\n let listener = Listener {\n\n transport: transport,\n\n tx: tx,\n\n };\n\n let stream = EventStream { rx: rx };\n\n (listener, stream)\n\n}\n\n\n\n/// `Listener` provides only one function `start` to start listening. It\n\n/// blocks a thread so should be moved into a separate one to no release\n\n/// main thread.\n\n\n\npub struct Listener<X> {\n\n transport: X,\n\n tx: Sender<ServerEvent>,\n\n}\n\n\n\nimpl<X: CDRSTransport + Unpin + 'static> Listener<Mutex<X>> {\n", "file_path": "src/events.rs", "rank": 13, "score": 146717.21210742666 }, { "content": "pub fn cursor_next_value(cursor: &mut Cursor<&[u8]>, len: u64) -> CDRSResult<Vec<u8>> {\n\n let l = len as usize;\n\n let current_position = cursor.position();\n\n let mut buff: Vec<u8> = Vec::with_capacity(l);\n\n unsafe {\n\n buff.set_len(l);\n\n }\n\n cursor.read_exact(&mut buff)?;\n\n cursor.set_position(current_position + len);\n\n Ok(buff)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::frame::traits::{FromCursor, IntoBytes};\n\n use std::io::Cursor;\n\n use std::mem::transmute;\n\n\n\n // CString\n", "file_path": "src/types/mod.rs", "rank": 14, "score": 142342.2907226981 }, { "content": "/// Tryies to decode bytes array into `u64`.\n\npub fn try_from_bytes(bytes: &[u8]) -> Result<u64, io::Error> {\n\n let l = bytes.len();\n\n let mut c = Cursor::new(bytes);\n\n c.read_uint::<BigEndian>(l)\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 15, "score": 134283.07013194708 }, { "content": "/// Tries to decode bytes array into `i64`.\n\npub fn try_i_from_bytes(bytes: &[u8]) -> Result<i64, io::Error> {\n\n let l = bytes.len();\n\n let mut c = Cursor::new(bytes);\n\n c.read_int::<BigEndian>(l)\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 16, "score": 134283.07013194708 }, { "content": "/// Tries to decode bytes array into `f32`.\n\npub fn try_f32_from_bytes(bytes: &[u8]) -> Result<f32, io::Error> {\n\n let mut c = Cursor::new(bytes);\n\n c.read_f32::<BigEndian>()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 17, "score": 131733.7202328671 }, { "content": "/// Tries to decode bytes array into `f64`.\n\npub fn try_f64_from_bytes(bytes: &[u8]) -> Result<f64, io::Error> {\n\n let mut c = Cursor::new(bytes);\n\n c.read_f64::<BigEndian>()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 18, "score": 131733.7202328671 }, { "content": "/// Tryies to decode bytes array into `u16`.\n\npub fn try_u16_from_bytes(bytes: &[u8]) -> Result<u16, io::Error> {\n\n let mut c = Cursor::new(bytes);\n\n c.read_u16::<BigEndian>()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 19, "score": 131733.7202328671 }, { "content": "// Decodes Cassandra `bigint` data (bytes) into Rust's `Result<i32, io::Error>`\n\npub fn decode_bigint(bytes: &[u8]) -> Result<i64, io::Error> {\n\n try_from_bytes(bytes).map(|i| i as i64)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 20, "score": 129337.09791888056 }, { "content": "// Decodes Cassandra `boolean` data (bytes) into Rust's `Result<i32, io::Error>`\n\npub fn decode_boolean(bytes: &[u8]) -> Result<bool, io::Error> {\n\n let false_byte: u8 = 0;\n\n if bytes.is_empty() {\n\n Err(io::Error::new(\n\n io::ErrorKind::UnexpectedEof,\n\n \"no bytes were found\",\n\n ))\n\n } else {\n\n Ok(bytes[0] != false_byte)\n\n }\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 21, "score": 129337.09791888056 }, { "content": "// Decodes Cassandra `timestamp` data (bytes) into Rust's `Result<i64, io::Error>`\n\n// `i32` represents a millisecond-precision\n\n// offset from the unix epoch (00:00:00, January 1st, 1970). Negative values\n\n// represent a negative offset from the epoch.\n\npub fn decode_timestamp(bytes: &[u8]) -> Result<i64, io::Error> {\n\n try_from_bytes(bytes).map(|i| i as i64)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 22, "score": 129335.91634223405 }, { "content": "// Decodes Cassandra `float` data (bytes) into Rust's `Result<f32, io::Error>`\n\npub fn decode_float(bytes: &[u8]) -> Result<f32, io::Error> {\n\n try_f32_from_bytes(bytes)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 23, "score": 129332.66599490124 }, { "content": "// Decodes Cassandra `tinyint` data (bytes) into Rust's `Result<i8, io::Error>`\n\npub fn decode_tinyint(bytes: &[u8]) -> Result<i8, io::Error> {\n\n Ok(bytes[0] as i8)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 24, "score": 129332.66599490124 }, { "content": "// Decodes Cassandra `decimal` data (bytes) into Rust's `Result<f32, io::Error>`\n\npub fn decode_decimal(bytes: &[u8]) -> Result<Decimal, io::Error> {\n\n let lr = bytes.split_at(INT_LEN);\n\n\n\n let scale = try_i_from_bytes(lr.0)? as u32;\n\n let unscaled = try_i_from_bytes(lr.1)?;\n\n\n\n Ok(Decimal::new(unscaled, scale))\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 25, "score": 129332.66599490124 }, { "content": "// Decodes Cassandra `varint` data (bytes) into Rust's `Result<i64, io::Error>`\n\npub fn decode_varint(bytes: &[u8]) -> Result<i64, io::Error> {\n\n try_i_from_bytes(bytes)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 26, "score": 129332.66599490124 }, { "content": "// Decodes Cassandra `double` data (bytes) into Rust's `Result<f32, io::Error>`\n\npub fn decode_double(bytes: &[u8]) -> Result<f64, io::Error> {\n\n try_f64_from_bytes(bytes)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 27, "score": 129332.66599490124 }, { "content": "// Decodes Cassandra `time` data (bytes) into Rust's `Result<String, FromUtf8Error>`.\n\npub fn decode_time(bytes: &[u8]) -> Result<i64, io::Error> {\n\n try_i_from_bytes(bytes)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 28, "score": 129332.66599490124 }, { "content": "// Decodes Cassandra `blob` data (bytes) into Rust's `Result<Vec<u8>, io::Error>`\n\npub fn decode_blob(bytes: &Vec<u8>) -> Result<Blob, io::Error> {\n\n // in fact we just pass it through.\n\n Ok(bytes.clone().into())\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 29, "score": 124370.3922282788 }, { "content": "// Decodes Cassandra `timeuuid` data (bytes) into Rust's `Result<uuid::Uuid, uuid::Error>`\n\npub fn decode_timeuuid(bytes: &[u8]) -> Result<uuid::Uuid, uuid::Error> {\n\n uuid::Uuid::from_slice(bytes)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 30, "score": 124370.3922282788 }, { "content": "pub trait TryFromRow: Sized {\n\n fn try_from_row(row: crate::types::rows::Row) -> error::Result<Self>;\n\n}\n\n\n", "file_path": "src/frame/traits.rs", "rank": 31, "score": 123439.45027926598 }, { "content": "pub trait TryFromUDT: Sized {\n\n fn try_from_udt(udt: crate::types::udt::UDT) -> error::Result<Self>;\n\n}\n", "file_path": "src/frame/traits.rs", "rank": 32, "score": 123439.45027926598 }, { "content": "// Decodes Cassandra `list` data (bytes) into Rust's `Result<Vec<CBytes>, io::Error>`\n\npub fn decode_list(bytes: &[u8]) -> Result<Vec<CBytes>, io::Error> {\n\n let mut cursor: io::Cursor<&[u8]> = io::Cursor::new(bytes);\n\n let l = CInt::from_cursor(&mut cursor)\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;\n\n let mut list = Vec::with_capacity(l as usize);\n\n for _ in 0..l {\n\n let b = CBytes::from_cursor(&mut cursor)\n\n .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;\n\n list.push(b);\n\n }\n\n Ok(list)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 33, "score": 122218.70943927433 }, { "content": "// Decodes Cassandra `set` data (bytes) into Rust's `Result<Vec<CBytes>, io::Error>`\n\npub fn decode_set(bytes: &[u8]) -> Result<Vec<CBytes>, io::Error> {\n\n decode_list(bytes)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 34, "score": 122218.70943927433 }, { "content": "// Decodes Cassandra `map` data (bytes) into Rust's `Result<Vec<(CBytes, CBytes)>, io::Error>`\n\npub fn decode_map(bytes: &[u8]) -> Result<Vec<(CBytes, CBytes)>, io::Error> {\n\n let mut cursor: io::Cursor<&[u8]> = io::Cursor::new(bytes);\n\n let l = CInt::from_cursor(&mut cursor)\n\n .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;\n\n let mut map = Vec::with_capacity(l as usize);\n\n for _ in 0..l {\n\n let n = CBytes::from_cursor(&mut cursor)\n\n .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;\n\n let v = CBytes::from_cursor(&mut cursor)\n\n .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;\n\n map.push((n, v));\n\n }\n\n Ok(map)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 35, "score": 115847.48879692832 }, { "content": "// Decodes Cassandra `Tuple` data (bytes) into Rust's `Result<Vec<CBytes>, io::Error>`\n\n// each `CBytes` is encoded type of field of user defined type\n\npub fn decode_tuple(bytes: &[u8], l: usize) -> Result<Vec<CBytes>, io::Error> {\n\n let mut cursor: io::Cursor<&[u8]> = io::Cursor::new(bytes);\n\n let mut udt = Vec::with_capacity(l);\n\n for _ in 0..l {\n\n let v = CBytes::from_cursor(&mut cursor)\n\n .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;\n\n udt.push(v);\n\n }\n\n Ok(udt)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::super::error::*;\n\n use super::super::super::frame::frame_result::*;\n\n use super::*;\n\n use std::net::IpAddr;\n\n\n\n #[test]\n\n fn decode_custom_test() {\n", "file_path": "src/types/data_serialization_types.rs", "rank": 36, "score": 115635.80710058843 }, { "content": "// Decodes Cassandra `Udt` data (bytes) into Rust's `Result<Vec<CBytes>, io::Error>`\n\n// each `CBytes` is encoded type of field of user defined type\n\npub fn decode_udt(bytes: &[u8], l: usize) -> Result<Vec<CBytes>, io::Error> {\n\n let mut cursor: io::Cursor<&[u8]> = io::Cursor::new(bytes);\n\n let mut udt = Vec::with_capacity(l);\n\n for _ in 0..l {\n\n let v = CBytes::from_cursor(&mut cursor)\n\n .or_else(|err| match err {\n\n error::Error::Io(io_err) => {\n\n if io_err.kind() == io::ErrorKind::UnexpectedEof {\n\n Ok(CBytes::new_empty())\n\n } else {\n\n Err(io_err.into())\n\n }\n\n }\n\n _ => Err(err),\n\n })\n\n .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;\n\n udt.push(v);\n\n }\n\n Ok(udt)\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 37, "score": 115635.80710058843 }, { "content": "#[derive(Clone, Debug, IntoCDRSValue, TryFromRow, PartialEq)]\n\nstruct RowStruct {\n\n key: i32,\n\n}\n\n\n\nimpl RowStruct {\n\n fn into_query_values(self) -> QueryValues {\n\n query_values!(\"key\" => self.key)\n\n }\n\n}\n\n\n", "file_path": "examples/paged_query.rs", "rank": 38, "score": 114934.44519634428 }, { "content": "// Decodes Cassandra `varchar` data (bytes) into Rust's `Result<String, FromUtf8Error>`.\n\npub fn decode_varchar(bytes: &[u8]) -> Result<String, FromUtf8Error> {\n\n Ok(String::from_utf8_lossy(bytes).into_owned())\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 39, "score": 114806.25050651368 }, { "content": "// Decodes Cassandra `ascii` data (bytes) into Rust's `Result<String, FromUtf8Error>`.\n\npub fn decode_ascii(bytes: &[u8]) -> Result<String, FromUtf8Error> {\n\n Ok(String::from_utf8_lossy(bytes).into_owned())\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 40, "score": 114806.25050651368 }, { "content": "// Decodes Cassandra `ascii` data (bytes) into Rust's `Result<String, FromUtf8Error>`.\n\npub fn decode_custom(bytes: &[u8]) -> Result<String, FromUtf8Error> {\n\n Ok(String::from_utf8_lossy(bytes).into_owned())\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 41, "score": 114806.25050651368 }, { "content": "// Decodes Cassandra `text` data (bytes) into Rust's `Result<String, FromUtf8Error>`.\n\npub fn decode_text(bytes: &[u8]) -> Result<String, FromUtf8Error> {\n\n Ok(String::from_utf8_lossy(bytes).into_owned())\n\n}\n\n\n", "file_path": "src/types/data_serialization_types.rs", "rank": 42, "score": 114806.25050651368 }, { "content": "type CurrentSession = Session<RoundRobin<TcpConnectionPool<NoneAuthenticator>>>;\n\n\n", "file_path": "examples/paged_query.rs", "rank": 43, "score": 107051.57610335416 }, { "content": "/// Converts byte-array into i64\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given bytes could not be converted into `i64`\n\npub fn from_i_bytes(bytes: &[u8]) -> i64 {\n\n try_i_from_bytes(bytes).unwrap()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 44, "score": 103238.02513756811 }, { "content": "/// Converts byte-array into u64\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given bytes could not be converted into `u64`\n\npub fn from_bytes(bytes: &[u8]) -> u64 {\n\n try_from_bytes(bytes).unwrap()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 45, "score": 103238.02513756811 }, { "content": "#[derive(Clone, Debug, IntoCDRSValue, TryFromRow, PartialEq)]\n\nstruct AnotherTestTable {\n\n a: i32,\n\n b: i32,\n\n c: i32,\n\n d: i32,\n\n e: i32,\n\n}\n\n\n\nimpl AnotherTestTable {\n\n fn into_query_values(self) -> QueryValues {\n\n query_values!(\"a\" => self.a, \"b\" => self.b, \"c\" => self.c, \"d\" => self.d, \"e\" => self.e)\n\n }\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let node = NodeTcpConfigBuilder::new(\"127.0.0.1:9042\", NoneAuthenticator {}).build();\n\n let cluster_config = ClusterTcpConfig(vec![node]);\n\n let lb = RoundRobin::new();\n\n let mut no_compression = new_session(&cluster_config, lb)\n", "file_path": "examples/paged_query.rs", "rank": 46, "score": 101813.44699886549 }, { "content": "/// Converts byte-array into u16\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given bytes could not be converted into `u16`\n\npub fn from_u16_bytes(bytes: &[u8]) -> u16 {\n\n try_u16_from_bytes(bytes).unwrap()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 47, "score": 101054.85303657655 }, { "content": "/// Convers integer into Cassandra's [int]\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given `u32` could not be converted into bytes\n\npub fn to_u(int: u32) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n // should not panic as input is u64\n\n let _ = bytes.write_u32::<BigEndian>(int).unwrap();\n\n\n\n bytes\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 48, "score": 100664.01349007682 }, { "content": "/// Converts `f32` into bytes\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given `f32` could not be converted into bytes\n\npub fn to_float(f: f32) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n // should not panic as input is f32\n\n let _ = bytes.write_f32::<BigEndian>(f).unwrap();\n\n\n\n bytes\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 49, "score": 100664.01349007682 }, { "content": "/// Compressor trait that defines functionality\n\n/// which should be provided by typical compressor.\n\npub trait Compressor {\n\n /// Encodes given bytes and returns `Result` that contains either\n\n /// encoded data or an error which occures during the transformation.\n\n fn encode(&self, bytes: Vec<u8>) -> Result<Vec<u8>>;\n\n /// Encodes given encoded data and returns `Result` that contains either\n\n /// encoded bytes or an error which occures during the transformation.\n\n fn decode(&self, bytes: Vec<u8>) -> Result<Vec<u8>>;\n\n /// Returns a string which is a name of a compressor. This name should be\n\n /// exactly the same as one which returns a server in a response to\n\n /// `Options` request.\n\n fn into_string(&self) -> Option<String>;\n\n}\n\n\n\n/// Enum which represents a type of compression. Only non-startup frame's body can be compressen.\n\n#[derive(Debug, PartialEq, Clone, Copy, Eq, Ord, PartialOrd)]\n\npub enum Compression {\n\n /// [lz4](https://code.google.com/p/lz4/) compression\n\n Lz4,\n\n /// [snappy](https://code.google.com/p/snappy/) compression\n\n Snappy,\n", "file_path": "src/compression.rs", "rank": 50, "score": 100209.07546036223 }, { "content": "/// Converts number i16 into Cassandra's `short`.\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given `u16` could not be converted into bytes\n\npub fn to_u_short(int: u16) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n // should not panic as input is i16\n\n let _ = bytes.write_u16::<BigEndian>(int).unwrap();\n\n\n\n bytes\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 51, "score": 98485.28410010951 }, { "content": "/// Converts integer into Cassandra's [varint].\n\npub fn to_varint(int: i64) -> Vec<u8> {\n\n if int == 0 {\n\n return vec![0];\n\n }\n\n\n\n let mut int_bytes = to_bigint(int);\n\n match int.signum() {\n\n 1 => {\n\n int_bytes = int_bytes.into_iter().skip_while(|b| *b == 0x00).collect();\n\n if int_bytes\n\n .get(0)\n\n .map(|b| b.leading_zeros() == 0)\n\n .unwrap_or(true)\n\n {\n\n int_bytes.insert(0, 0x00);\n\n }\n\n }\n\n -1 => {\n\n int_bytes = int_bytes.into_iter().skip_while(|b| *b == 0xFF).collect();\n\n if int_bytes\n", "file_path": "src/types/mod.rs", "rank": 52, "score": 98480.84138908525 }, { "content": "/// Convers integer into Cassandra's `int`\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given `u64` could not be converted into `u64`\n\npub fn to_u_big(int: u64) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n // should not panic as input is u64\n\n let _ = bytes.write_u64::<BigEndian>(int).unwrap();\n\n\n\n bytes\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 53, "score": 98480.84138908525 }, { "content": "/// Converts `f64` into array of bytes\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given `f63` could not be converted into bytes\n\npub fn to_float_big(f: f64) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n // should not panic as input is f64\n\n let _ = bytes.write_f64::<BigEndian>(f).unwrap();\n\n\n\n bytes\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct CString {\n\n string: String,\n\n}\n\n\n\nimpl CString {\n\n pub fn new(string: String) -> CString {\n\n CString { string }\n\n }\n\n\n\n /// Converts internal value into pointer of `str`.\n\n pub fn as_str(&self) -> &str {\n", "file_path": "src/types/mod.rs", "rank": 54, "score": 98480.84138908525 }, { "content": "/// Convers integer into Cassandra's [int]\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given `i64` could not be converted into bytes\n\npub fn to_bigint(int: i64) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n // should not panic as input is i64\n\n let _ = bytes.write_i64::<BigEndian>(int).unwrap();\n\n\n\n bytes\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 55, "score": 98480.84138908525 }, { "content": "/// `IntoBytes` should be used to convert a structure into array of bytes.\n\npub trait IntoBytes {\n\n /// It should convert a struct into an array of bytes.\n\n fn into_cbytes(&self) -> Vec<u8>;\n\n}\n\n\n", "file_path": "src/frame/traits.rs", "rank": 56, "score": 95768.95878772651 }, { "content": "/// `AsBytes` should be used to convert a value into a single byte.\n\npub trait AsByte {\n\n /// It should represent a struct as a single byte.\n\n fn as_byte(&self) -> u8;\n\n}\n\n\n", "file_path": "src/frame/traits.rs", "rank": 57, "score": 95768.95878772651 }, { "content": "/// `FromBytes` should be used to parse an array of bytes into a structure.\n\npub trait FromBytes {\n\n /// It gets and array of bytes and should return an implementor struct.\n\n fn from_bytes(bytes: &[u8]) -> error::Result<Self>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "src/frame/traits.rs", "rank": 58, "score": 95768.95878772651 }, { "content": "/// `FromCursor` should be used to get parsed structure from an `io:Cursor`\n\n/// wich bound to an array of bytes.\n\npub trait FromCursor {\n\n /// It should return an implementor from an `io::Cursor` over an array of bytes.\n\n fn from_cursor(cursor: &mut Cursor<&[u8]>) -> error::Result<Self>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "src/frame/traits.rs", "rank": 59, "score": 95768.72341452586 }, { "content": "/// The trait that allows transformation of `Self` to CDRS query values.\n\npub trait IntoQueryValues {\n\n fn into_query_values(self) -> query::QueryValues;\n\n}\n\n\n", "file_path": "src/frame/traits.rs", "rank": 60, "score": 93114.64739463037 }, { "content": "/// `FromSingleByte` should be used to convert a single byte into a value.\n\n/// It is opposite to `AsByte`.\n\npub trait FromSingleByte {\n\n /// It should convert a single byte into an implementor struct.\n\n fn from_byte(byte: u8) -> Self;\n\n}\n\n\n", "file_path": "src/frame/traits.rs", "rank": 61, "score": 93114.40670732174 }, { "content": "/// Converts u64 numerical value into array of n bytes\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given unisigned integer could not be converted in an array of n bytes\n\npub fn to_n_bytes(int: u64, n: usize) -> Vec<u8> {\n\n try_to_n_bytes(int, n).unwrap()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 62, "score": 92234.98406021447 }, { "content": "/// Converts u64 numerical value into array of n bytes\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if given integer could not be converted in an array of n bytes\n\npub fn i_to_n_bytes(int: i64, n: usize) -> Vec<u8> {\n\n try_i_to_n_bytes(int, n).unwrap()\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 63, "score": 92234.98406021447 }, { "content": "pub fn prepare_flags(with_tracing: bool, with_warnings: bool) -> Vec<Flag> {\n\n let mut flags = vec![];\n\n\n\n if with_tracing {\n\n flags.push(Flag::Tracing);\n\n }\n\n\n\n if with_warnings {\n\n flags.push(Flag::Warning);\n\n }\n\n\n\n flags\n\n}\n\n\n\npub async fn send_frame<S, T, M>(sender: &S, frame_bytes: Vec<u8>, stream_id: StreamId) -> error::Result<Frame>\n\nwhere\n\n S: GetConnection<T, M> + GetCompressor<'static> + ResponseCache + Sized,\n\n T: CDRSTransport + Unpin + 'static,\n\n M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized,\n\n{\n", "file_path": "src/query/utils.rs", "rank": 64, "score": 88513.8833256929 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::frame::*;\n\nuse crate::types::to_short;\n\n\n\nconst CQL_VERSION: &'static str = \"CQL_VERSION\";\n\nconst CQL_VERSION_VAL: &'static str = \"3.0.0\";\n\nconst COMPRESSION: &'static str = \"COMPRESSION\";\n\n\n\n#[derive(Debug)]\n\npub struct BodyReqStartup<'a> {\n\n pub map: HashMap<&'static str, &'a str>,\n\n}\n\n\n\nimpl<'a> BodyReqStartup<'a> {\n\n pub fn new<'b>(compression: Option<&'b str>) -> BodyReqStartup<'b> {\n\n let mut map = HashMap::new();\n\n map.insert(CQL_VERSION, CQL_VERSION_VAL);\n\n if let Some(c) = compression {\n\n map.insert(COMPRESSION, c);\n", "file_path": "src/frame/frame_startup.rs", "rank": 65, "score": 85937.7946204562 }, { "content": " v.extend_from_slice(key.as_bytes());\n\n // push val len\n\n v.extend_from_slice(to_short(val.len() as i16).as_slice());\n\n // push val itself\n\n v.extend_from_slice(val.as_bytes());\n\n }\n\n v\n\n }\n\n}\n\n\n\n// Frame implementation related to BodyReqStartup\n\n\n\nimpl Frame {\n\n /// Creates new frame of type `startup`.\n\n pub fn new_req_startup(compression: Option<&str>) -> Frame {\n\n let version = Version::Request;\n\n let flag = Flag::Ignore;\n\n let opcode = Opcode::Startup;\n\n let body = BodyReqStartup::new(compression);\n\n\n", "file_path": "src/frame/frame_startup.rs", "rank": 66, "score": 85929.70454778512 }, { "content": " Frame::new(version, vec![flag], opcode, body.into_cbytes(), None, vec![])\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::frame::{Flag, Frame, Opcode, Version};\n\n\n\n #[test]\n\n fn new_body_req_startup_some_compression() {\n\n let compression = \"test_compression\";\n\n let body = BodyReqStartup::new(Some(compression));\n\n assert_eq!(body.map.get(\"CQL_VERSION\"), Some(&\"3.0.0\"));\n\n assert_eq!(body.map.get(\"COMPRESSION\"), Some(&compression));\n\n assert_eq!(body.map.len(), 2);\n\n }\n\n\n\n #[test]\n\n fn new_body_req_startup_none_compression() {\n", "file_path": "src/frame/frame_startup.rs", "rank": 67, "score": 85928.73147077861 }, { "content": " let body = BodyReqStartup::new(None);\n\n assert_eq!(body.map.get(\"CQL_VERSION\"), Some(&\"3.0.0\"));\n\n assert_eq!(body.map.len(), 1);\n\n }\n\n\n\n #[test]\n\n fn new_req_startup() {\n\n let compression = Some(\"test_compression\");\n\n let frame = Frame::new_req_startup(compression);\n\n assert_eq!(frame.version, Version::Request);\n\n assert_eq!(frame.flags, vec![Flag::Ignore]);\n\n assert_eq!(frame.opcode, Opcode::Startup);\n\n assert_eq!(frame.tracing_id, None);\n\n assert_eq!(frame.warnings, vec![] as Vec<String>);\n\n }\n\n}\n", "file_path": "src/frame/frame_startup.rs", "rank": 68, "score": 85925.2827835907 }, { "content": " }\n\n BodyReqStartup { map: map }\n\n }\n\n\n\n // should be [u8; 2]\n\n // Number of key-value pairs\n\n fn num(&self) -> Vec<u8> {\n\n to_short(self.map.len() as i16)\n\n }\n\n}\n\n\n\nimpl<'a> IntoBytes for BodyReqStartup<'a> {\n\n fn into_cbytes(&self) -> Vec<u8> {\n\n let mut v = vec![];\n\n // push number of key-value pairs\n\n v.extend_from_slice(&self.num().as_slice());\n\n for (key, val) in self.map.iter() {\n\n // push key len\n\n v.extend_from_slice(to_short(key.len() as i16).as_slice());\n\n // push key itself\n", "file_path": "src/frame/frame_startup.rs", "rank": 69, "score": 85920.75643907847 }, { "content": "use std::io::Cursor;\n\n\n\nuse crate::error;\n\nuse crate::frame::FromCursor;\n\nuse crate::types::CString;\n\n\n\n/// A server authentication challenge.\n\n#[derive(Debug)]\n\npub struct BodyResAuthenticate {\n\n pub data: CString,\n\n}\n\n\n\nimpl FromCursor for BodyResAuthenticate {\n\n fn from_cursor(mut cursor: &mut Cursor<&[u8]>) -> error::Result<BodyResAuthenticate> {\n\n Ok(BodyResAuthenticate {\n\n data: CString::from_cursor(&mut cursor)?,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/frame/frame_authenticate.rs", "rank": 70, "score": 85832.48885477375 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::frame::traits::FromCursor;\n\n use std::io::Cursor;\n\n\n\n #[test]\n\n fn body_res_authenticate() {\n\n // string \"abcde\"\n\n let data = [0, 5, 97, 98, 99, 100, 101];\n\n let mut cursor: Cursor<&[u8]> = Cursor::new(&data);\n\n let body = BodyResAuthenticate::from_cursor(&mut cursor).unwrap();\n\n assert_eq!(body.data.as_str(), \"abcde\");\n\n }\n\n}\n", "file_path": "src/frame/frame_authenticate.rs", "rank": 71, "score": 85820.44202327784 }, { "content": "/// `CDRSSession` trait wrap ups whole query functionality. Use it only if whole query\n\n/// machinery is needed and direct sub traits otherwise.\n\npub trait CDRSSession<\n\n 'a,\n\n T: CDRSTransport + Unpin + 'static,\n\n M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error>,\n\n>:\n\n GetCompressor<'static>\n\n + GetConnection<T, M>\n\n + QueryExecutor<T, M>\n\n + PrepareExecutor<T, M>\n\n + ExecExecutor<T, M>\n\n + BatchExecutor<T, M>\n\n{\n\n}\n", "file_path": "src/cluster/mod.rs", "rank": 72, "score": 85792.8148880864 }, { "content": " /// Consistency level of query.\n\n pub cl: Consistency,\n\n /// `i32` representing the number of nodes having acknowledged the request.\n\n pub received: CInt,\n\n /// `i32` representing the number of replicas whose acknowledgement is required to achieve `cl`.\n\n pub blockfor: CInt,\n\n /// Represents the number of nodes that experience a failure while executing the request.\n\n pub num_failures: CInt,\n\n data_present: u8,\n\n}\n\n\n\nimpl ReadFailureError {\n\n /// Shows if replica has resonded to a query.\n\n pub fn replica_has_responded(&self) -> bool {\n\n self.data_present != 0\n\n }\n\n}\n\n\n\nimpl FromCursor for ReadFailureError {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<ReadFailureError> {\n", "file_path": "src/frame/frame_error.rs", "rank": 73, "score": 85493.8347161956 }, { "content": "impl FromCursor for SimpleError {\n\n fn from_cursor(mut _cursor: &mut io::Cursor<&[u8]>) -> error::Result<SimpleError> {\n\n Ok(SimpleError {})\n\n }\n\n}\n\n\n\n/// Additional info about\n\n/// [unavailable exception]\n\n/// (https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v4.spec#L1025)\n\n#[derive(Debug)]\n\npub struct UnavailableError {\n\n /// Consistency level of query.\n\n pub cl: Consistency,\n\n /// Number of nodes that should be available to respect `cl`.\n\n pub required: CInt,\n\n /// Number of replicas that we were know to be alive.\n\n pub alive: CInt,\n\n}\n\n\n\nimpl FromCursor for UnavailableError {\n", "file_path": "src/frame/frame_error.rs", "rank": 74, "score": 85489.8372240818 }, { "content": " fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<UnavailableError> {\n\n let cl = Consistency::from_cursor(&mut cursor)?;\n\n let required = CInt::from_cursor(&mut cursor)?;\n\n let alive = CInt::from_cursor(&mut cursor)?;\n\n\n\n Ok(UnavailableError {\n\n cl: cl,\n\n required: required,\n\n alive: alive,\n\n })\n\n }\n\n}\n\n\n\n/// Timeout exception during a write request.\n\n#[derive(Debug)]\n\npub struct WriteTimeoutError {\n\n /// Consistency level of query.\n\n pub cl: Consistency,\n\n /// `i32` representing the number of nodes having acknowledged the request.\n\n pub received: CInt,\n", "file_path": "src/frame/frame_error.rs", "rank": 75, "score": 85489.4490661617 }, { "content": "/// depending of type of error it could contain an additional information about an error.\n\n/// This additional information is represented by `additional_info` property which is `ErrorKind`.\n\n#[derive(Debug)]\n\npub struct CDRSError {\n\n /// `i32` that points to a type of error.\n\n pub error_code: CInt,\n\n /// Error message string.\n\n pub message: CString,\n\n /// Additional information.\n\n pub additional_info: AdditionalErrorInfo,\n\n}\n\n\n\nimpl FromCursor for CDRSError {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<CDRSError> {\n\n let error_code = CInt::from_cursor(&mut cursor)?;\n\n let message = CString::from_cursor(&mut cursor)?;\n\n let additional_info = AdditionalErrorInfo::from_cursor_with_code(&mut cursor, error_code)?;\n\n\n\n Ok(CDRSError {\n\n error_code: error_code,\n", "file_path": "src/frame/frame_error.rs", "rank": 76, "score": 85489.23975427236 }, { "content": "}\n\n\n\n/// Timeout exception during a read request.\n\n#[derive(Debug)]\n\npub struct ReadTimeoutError {\n\n /// Consistency level of query.\n\n pub cl: Consistency,\n\n /// `i32` representing the number of nodes having acknowledged the request.\n\n pub received: CInt,\n\n /// `i32` representing the number of replicas whose acknowledgement is required to achieve `cl`.\n\n pub blockfor: CInt,\n\n data_present: u8,\n\n}\n\n\n\nimpl ReadTimeoutError {\n\n /// Shows if replica has resonded to a query.\n\n pub fn replica_has_responded(&self) -> bool {\n\n self.data_present != 0\n\n }\n\n}\n", "file_path": "src/frame/frame_error.rs", "rank": 77, "score": 85488.68205986045 }, { "content": "\n\n/// A non-timeout exception during a write request.\n\n/// [Read more...](https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v4.spec#L1106)\n\n#[derive(Debug)]\n\npub struct WriteFailureError {\n\n /// Consistency of the query having triggered the exception.\n\n pub cl: Consistency,\n\n /// Represents the number of nodes having answered the request.\n\n pub received: CInt,\n\n /// Represents the number of replicas whose acknowledgement is required to achieve `cl`.\n\n pub blockfor: CInt,\n\n /// Represents the number of nodes that experience a failure while executing the request.\n\n pub num_failures: CInt,\n\n /// describes the type of the write that failed.\n\n pub write_type: WriteType,\n\n}\n\n\n\nimpl FromCursor for WriteFailureError {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<WriteFailureError> {\n\n let cl = Consistency::from_cursor(&mut cursor)?;\n", "file_path": "src/frame/frame_error.rs", "rank": 78, "score": 85488.54673877613 }, { "content": " /// `i32` representing the number of replicas whose acknowledgement is required to achieve `cl`.\n\n pub blockfor: CInt,\n\n /// Describes the type of the write that timed out\n\n pub write_type: WriteType,\n\n}\n\n\n\nimpl FromCursor for WriteTimeoutError {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<WriteTimeoutError> {\n\n let cl = Consistency::from_cursor(&mut cursor)?;\n\n let received = CInt::from_cursor(&mut cursor)?;\n\n let blockfor = CInt::from_cursor(&mut cursor)?;\n\n let write_type = WriteType::from_cursor(&mut cursor)?;\n\n\n\n Ok(WriteTimeoutError {\n\n cl: cl,\n\n received: received,\n\n blockfor: blockfor,\n\n write_type: write_type,\n\n })\n\n }\n", "file_path": "src/frame/frame_error.rs", "rank": 79, "score": 85487.99292227844 }, { "content": "//! This modules contains [Cassandra's errors]\n\n//! (https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v4.spec#L1011)\n\n//! which server could respond to client.\n\n\n\nuse std::io;\n\nuse std::result;\n\n\n\nuse crate::consistency::Consistency;\n\nuse crate::error;\n\nuse crate::frame::traits::FromCursor;\n\nuse crate::frame::Frame;\n\nuse crate::types::*;\n\n\n\n/// CDRS specific `Result` which contains a [`Frame`] in case of `Ok` and `CDRSError` if `Err`.\n\n///\n\n/// [`Frame`]: ../frame/struct.Frame.html\n\npub type Result = result::Result<Frame, CDRSError>;\n\n\n\n/// CDRS error which could be returned by Cassandra server as a response. As it goes\n\n/// from the specification it contains an error code and an error message. Apart of those\n", "file_path": "src/frame/frame_error.rs", "rank": 80, "score": 85487.95102475164 }, { "content": "\n\nimpl FromCursor for ReadTimeoutError {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<ReadTimeoutError> {\n\n let cl = Consistency::from_cursor(&mut cursor)?;\n\n let received = CInt::from_cursor(&mut cursor)?;\n\n let blockfor = CInt::from_cursor(&mut cursor)?;\n\n let data_present = try_from_bytes(cursor_next_value(&mut cursor, 1)?.as_slice())? as u8;\n\n\n\n Ok(ReadTimeoutError {\n\n cl: cl,\n\n received: received,\n\n blockfor: blockfor,\n\n data_present: data_present,\n\n })\n\n }\n\n}\n\n\n\n/// A non-timeout exception during a read request.\n\n#[derive(Debug)]\n\npub struct ReadFailureError {\n", "file_path": "src/frame/frame_error.rs", "rank": 81, "score": 85487.73310789026 }, { "content": " ReadFailure(ReadFailureError),\n\n FunctionFailure(FunctionFailureError),\n\n WriteFailure(WriteFailureError),\n\n Syntax(SimpleError),\n\n Unauthorized(SimpleError),\n\n Invalid(SimpleError),\n\n Config(SimpleError),\n\n AlreadyExists(AlreadyExistsError),\n\n Unprepared(UnpreparedError),\n\n}\n\n\n\nimpl AdditionalErrorInfo {\n\n pub fn from_cursor_with_code(\n\n mut cursor: &mut io::Cursor<&[u8]>,\n\n error_code: CInt,\n\n ) -> error::Result<AdditionalErrorInfo> {\n\n match error_code {\n\n 0x0000 => Ok(AdditionalErrorInfo::Server(SimpleError::from_cursor(\n\n &mut cursor,\n\n )?)),\n", "file_path": "src/frame/frame_error.rs", "rank": 82, "score": 85487.60135975824 }, { "content": " &mut cursor,\n\n )?)),\n\n 0x2300 => Ok(AdditionalErrorInfo::Config(SimpleError::from_cursor(\n\n &mut cursor,\n\n )?)),\n\n 0x2400 => Ok(AdditionalErrorInfo::AlreadyExists(\n\n AlreadyExistsError::from_cursor(&mut cursor)?,\n\n )),\n\n 0x2500 => Ok(AdditionalErrorInfo::Unprepared(\n\n UnpreparedError::from_cursor(&mut cursor)?,\n\n )),\n\n _ => Err(\"Unexpected additional error info\".into()),\n\n }\n\n }\n\n}\n\n\n\n/// Is used if error does not contain any additional info.\n\n#[derive(Debug)]\n\npub struct SimpleError {}\n\n\n", "file_path": "src/frame/frame_error.rs", "rank": 83, "score": 85486.01069629828 }, { "content": " pub keyspace: CString,\n\n /// The name of the failed function\n\n pub function: CString,\n\n /// `Vec<CString>` one string for each argument type (as CQL type) of the failed function.\n\n pub arg_types: CStringList,\n\n}\n\n\n\nimpl FromCursor for FunctionFailureError {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<FunctionFailureError> {\n\n let keyspace = CString::from_cursor(&mut cursor)?;\n\n let function = CString::from_cursor(&mut cursor)?;\n\n let arg_types = CStringList::from_cursor(&mut cursor)?;\n\n\n\n Ok(FunctionFailureError {\n\n keyspace: keyspace,\n\n function: function,\n\n arg_types: arg_types,\n\n })\n\n }\n\n}\n", "file_path": "src/frame/frame_error.rs", "rank": 84, "score": 85485.82638005938 }, { "content": " let ks = CString::from_cursor(&mut cursor)?;\n\n let table = CString::from_cursor(&mut cursor)?;\n\n\n\n Ok(AlreadyExistsError {\n\n ks: ks,\n\n table: table,\n\n })\n\n }\n\n}\n\n\n\n/// Can be thrown while a prepared statement tries to be\n\n/// executed if the provided prepared statement ID is not known by\n\n/// this host. [Read more...]\n\n/// (https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v4.spec#L1150)\n\n#[derive(Debug)]\n\npub struct UnpreparedError {\n\n /// Unknown ID.\n\n pub id: CBytesShort,\n\n}\n\n\n\nimpl FromCursor for UnpreparedError {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<UnpreparedError> {\n\n let id = CBytesShort::from_cursor(&mut cursor)?;\n\n\n\n Ok(UnpreparedError { id: id })\n\n }\n\n}\n", "file_path": "src/frame/frame_error.rs", "rank": 85, "score": 85485.26576339289 }, { "content": " \"COUNTER\" => Ok(WriteType::Counter),\n\n \"BATCH_LOG\" => Ok(WriteType::BatchLog),\n\n _ => Err(\"Unexpected write type\".into()),\n\n })\n\n }\n\n}\n\n\n\n/// The query attempted to create a keyspace or a table that was already existing.\n\n/// [Read more...](https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v4.spec#L1140)\n\n#[derive(Debug)]\n\npub struct AlreadyExistsError {\n\n /// Represents either the keyspace that already exists,\n\n /// or the keyspace in which the table that already exists is.\n\n pub ks: CString,\n\n /// Represents the name of the table that already exists.\n\n pub table: CString,\n\n}\n\n\n\nimpl FromCursor for AlreadyExistsError {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<AlreadyExistsError> {\n", "file_path": "src/frame/frame_error.rs", "rank": 86, "score": 85484.32724641908 }, { "content": " let cl = Consistency::from_cursor(&mut cursor)?;\n\n let received = CInt::from_cursor(&mut cursor)?;\n\n let blockfor = CInt::from_cursor(&mut cursor)?;\n\n let num_failures = CInt::from_cursor(&mut cursor)?;\n\n let data_present = try_from_bytes(cursor_next_value(&mut cursor, 1)?.as_slice())? as u8;\n\n\n\n Ok(ReadFailureError {\n\n cl: cl,\n\n received: received,\n\n blockfor: blockfor,\n\n num_failures: num_failures,\n\n data_present: data_present,\n\n })\n\n }\n\n}\n\n\n\n/// A (user defined) function failed during execution.\n\n#[derive(Debug)]\n\npub struct FunctionFailureError {\n\n /// The keyspace of the failed function.\n", "file_path": "src/frame/frame_error.rs", "rank": 87, "score": 85483.88588522107 }, { "content": " 0x000A => Ok(AdditionalErrorInfo::Protocol(SimpleError::from_cursor(\n\n &mut cursor,\n\n )?)),\n\n 0x0100 => Ok(AdditionalErrorInfo::Authentication(\n\n SimpleError::from_cursor(&mut cursor)?,\n\n )),\n\n 0x1000 => Ok(AdditionalErrorInfo::Unavailable(\n\n UnavailableError::from_cursor(&mut cursor)?,\n\n )),\n\n 0x1001 => Ok(AdditionalErrorInfo::Overloaded(SimpleError::from_cursor(\n\n &mut cursor,\n\n )?)),\n\n 0x1002 => Ok(AdditionalErrorInfo::IsBootstrapping(\n\n SimpleError::from_cursor(&mut cursor)?,\n\n )),\n\n 0x1003 => Ok(AdditionalErrorInfo::Truncate(SimpleError::from_cursor(\n\n &mut cursor,\n\n )?)),\n\n 0x1100 => Ok(AdditionalErrorInfo::WriteTimeout(\n\n WriteTimeoutError::from_cursor(&mut cursor)?,\n", "file_path": "src/frame/frame_error.rs", "rank": 88, "score": 85482.96755600967 }, { "content": " Simple,\n\n /// The write was a (logged) batch write.\n\n /// If this type is received, it means the batch log\n\n /// has been successfully written\n\n Batch,\n\n /// The write was an unlogged batch. No batch log write has been attempted.\n\n UnloggedBatch,\n\n /// The write was a counter write (batched or not)\n\n Counter,\n\n /// The failure occured during the write to the batch log when a (logged) batch\n\n /// write was requested.\n\n BatchLog,\n\n}\n\n\n\nimpl FromCursor for WriteType {\n\n fn from_cursor(mut cursor: &mut io::Cursor<&[u8]>) -> error::Result<WriteType> {\n\n CString::from_cursor(&mut cursor).and_then(|wt| match wt.as_str() {\n\n \"SIMPLE\" => Ok(WriteType::Simple),\n\n \"BATCH\" => Ok(WriteType::Batch),\n\n \"UNLOGGED_BATCH\" => Ok(WriteType::UnloggedBatch),\n", "file_path": "src/frame/frame_error.rs", "rank": 89, "score": 85481.43269108325 }, { "content": " message: message,\n\n additional_info: additional_info,\n\n })\n\n }\n\n}\n\n\n\n/// Additional error info in accordance to\n\n/// [Cassandra protocol v4]\n\n/// (https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v4.spec#L1011).\n\n#[derive(Debug)]\n\npub enum AdditionalErrorInfo {\n\n Server(SimpleError),\n\n Protocol(SimpleError),\n\n Authentication(SimpleError),\n\n Unavailable(UnavailableError),\n\n Overloaded(SimpleError),\n\n IsBootstrapping(SimpleError),\n\n Truncate(SimpleError),\n\n WriteTimeout(WriteTimeoutError),\n\n ReadTimeout(ReadTimeoutError),\n", "file_path": "src/frame/frame_error.rs", "rank": 90, "score": 85480.90532231946 }, { "content": " )),\n\n 0x1200 => Ok(AdditionalErrorInfo::ReadTimeout(\n\n ReadTimeoutError::from_cursor(&mut cursor)?,\n\n )),\n\n 0x1300 => Ok(AdditionalErrorInfo::ReadFailure(\n\n ReadFailureError::from_cursor(&mut cursor)?,\n\n )),\n\n 0x1400 => Ok(AdditionalErrorInfo::FunctionFailure(\n\n FunctionFailureError::from_cursor(&mut cursor)?,\n\n )),\n\n 0x1500 => Ok(AdditionalErrorInfo::WriteFailure(\n\n WriteFailureError::from_cursor(&mut cursor)?,\n\n )),\n\n 0x2000 => Ok(AdditionalErrorInfo::Syntax(SimpleError::from_cursor(\n\n &mut cursor,\n\n )?)),\n\n 0x2100 => Ok(AdditionalErrorInfo::Unauthorized(SimpleError::from_cursor(\n\n &mut cursor,\n\n )?)),\n\n 0x2200 => Ok(AdditionalErrorInfo::Invalid(SimpleError::from_cursor(\n", "file_path": "src/frame/frame_error.rs", "rank": 91, "score": 85480.16564908324 }, { "content": " let received = CInt::from_cursor(&mut cursor)?;\n\n let blockfor = CInt::from_cursor(&mut cursor)?;\n\n let num_failures = CInt::from_cursor(&mut cursor)?;\n\n let write_type = WriteType::from_cursor(&mut cursor)?;\n\n\n\n Ok(WriteFailureError {\n\n cl: cl,\n\n received: received,\n\n blockfor: blockfor,\n\n num_failures: num_failures,\n\n write_type: write_type,\n\n })\n\n }\n\n}\n\n\n\n/// Describes the type of the write that failed.\n\n/// [Read more...](https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v4.spec#L1118)\n\n#[derive(Debug)]\n\npub enum WriteType {\n\n /// The write was a non-batched non-counter write\n", "file_path": "src/frame/frame_error.rs", "rank": 92, "score": 85479.49613548692 }, { "content": "#[async_trait]\n\npub trait QueryExecutor<\n\n T: CDRSTransport + Unpin + 'static,\n\n M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized,\n\n>: GetConnection<T, M> + GetCompressor<'static> + ResponseCache + Sync\n\n{\n\n async fn query_with_params_tw<Q: ToString + Send>(\n\n &self,\n\n query: Q,\n\n query_params: QueryParams,\n\n with_tracing: bool,\n\n with_warnings: bool,\n\n ) -> error::Result<Frame>\n\n where\n\n Self: Sized,\n\n {\n\n let query = Query {\n\n query: query.to_string(),\n\n params: query_params,\n\n };\n\n\n", "file_path": "src/query/query_executor.rs", "rank": 93, "score": 84542.9148371348 }, { "content": "#[async_trait]\n\npub trait PrepareExecutor<\n\n T: CDRSTransport + Unpin + 'static,\n\n M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized,\n\n>: GetConnection<T, M> + GetCompressor<'static> + ResponseCache + Sync\n\n{\n\n /// It prepares a query for execution, along with query itself the\n\n /// method takes `with_tracing` and `with_warnings` flags to get\n\n /// tracing information and warnings. Return the raw prepared\n\n /// query result.\n\n async fn prepare_raw_tw<Q: ToString + Sync + Send>(\n\n &self,\n\n query: Q,\n\n with_tracing: bool,\n\n with_warnings: bool,\n\n ) -> error::Result<BodyResResultPrepared>\n\n where\n\n Self: Sized,\n\n {\n\n let flags = prepare_flags(with_tracing, with_warnings);\n\n\n", "file_path": "src/query/prepare_executor.rs", "rank": 94, "score": 84542.9148371348 }, { "content": "#[async_trait]\n\npub trait ExecExecutor<\n\n T: CDRSTransport + Unpin + 'static,\n\n M: bb8::ManageConnection<Connection = Mutex<T>, Error = error::Error> + Sized,\n\n>: GetConnection<T, M> + GetCompressor<'static> + ResponseCache + Sync\n\n{\n\n async fn exec_with_params_tw(\n\n &self,\n\n prepared: &PreparedQuery,\n\n query_parameters: QueryParams,\n\n with_tracing: bool,\n\n with_warnings: bool,\n\n ) -> error::Result<Frame>\n\n where\n\n Self: Sized,\n\n {\n\n let flags = prepare_flags(with_tracing, with_warnings);\n\n let options_frame = Frame::new_req_execute(prepared, query_parameters, flags);\n\n\n\n send_frame(self, options_frame.into_cbytes(), options_frame.stream).await\n\n }\n", "file_path": "src/query/exec_executor.rs", "rank": 95, "score": 84542.9148371348 }, { "content": "/// Tries to converts u64 numerical value into array of n bytes.\n\npub fn try_to_n_bytes(int: u64, n: usize) -> io::Result<Vec<u8>> {\n\n let mut bytes = vec![];\n\n bytes.write_uint::<BigEndian>(int, n)?;\n\n\n\n Ok(bytes)\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 96, "score": 83531.23102063843 }, { "content": "pub fn try_i_to_n_bytes(int: i64, n: usize) -> io::Result<Vec<u8>> {\n\n let mut bytes = Vec::with_capacity(n);\n\n unsafe {\n\n bytes.set_len(n);\n\n }\n\n BigEndian::write_int(&mut bytes, int, n);\n\n\n\n Ok(bytes)\n\n}\n\n\n", "file_path": "src/types/mod.rs", "rank": 97, "score": 83531.23102063843 }, { "content": "fn start_node_a<A>(_: A) -> io::Result<Output> {\n\n Command::new(\"docker\")\n\n .args(&[\n\n \"run\",\n\n \"-d\",\n\n \"-p\",\n\n \"9042:9042\",\n\n \"--name\",\n\n \"cass1\",\n\n \"cassandra:3.9\",\n\n ])\n\n .output()\n\n}\n\n\n", "file_path": "examples/dynamic_cluster.rs", "rank": 98, "score": 83455.13740183425 }, { "content": "pub trait LoadBalancingStrategy<N>: Sized {\n\n fn init(&mut self, cluster: Vec<Arc<N>>);\n\n fn next(&self) -> Option<Arc<N>>;\n\n fn remove_node<F>(&mut self, _filter: F)\n\n where\n\n F: FnMut(&N) -> bool,\n\n {\n\n // default implementation does nothing\n\n }\n\n}\n", "file_path": "src/load_balancing/mod.rs", "rank": 99, "score": 81788.3958729995 } ]
Rust
src/utils/v6/ipv6_cidr_separator.rs
sanderv32/cidr-utils
c0f7607e086d7e3ea25b429a55041a86fe624ddd
extern crate num_traits; use std::cmp::Ordering; use crate::cidr::Ipv6Cidr; use crate::num_bigint::BigUint; use crate::utils::Ipv6CidrCombiner; use num_traits::{One, ToPrimitive}; #[derive(Debug)] pub struct Ipv6CidrSeparator; impl Ipv6CidrSeparator { pub fn divide_by(cidr: &Ipv6Cidr, n: usize) -> Option<Vec<Ipv6CidrCombiner>> { let size = cidr.size(); let n_big_int = BigUint::from(n); if n == 0 || n_big_int > size { return None; } else if n == 1 { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(*cidr); return Some(vec![combiner]); } let log2_n = (n as f64).log2(); let mut output = Vec::with_capacity(n); if (log2_n - log2_n.floor()).abs() < 2.0 * std::f64::EPSILON { let mut iter = cidr.iter(); let bits = cidr.get_bits() + log2_n as u8; let usize_max_big_int = BigUint::from(usize::max_value()); let d = size / n_big_int; if d <= usize_max_big_int { for ip in iter.step_by(d.to_usize().unwrap()) { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); } } else { let nth = d - BigUint::one(); if let Some(ip) = iter.next() { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); while let Some(ip) = iter.nth_big_uint(nth.clone()) { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); } } } } else { let d = size / n_big_int; let iter = cidr.iter(); let mut current_combiner = Ipv6CidrCombiner::new(); let mut i = BigUint::one(); for ip in iter { current_combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, 128).unwrap()); if i == d { output.push(current_combiner); current_combiner = Ipv6CidrCombiner::new(); i = BigUint::one(); } else { i += BigUint::one(); } } let last_combiner = output.last_mut().unwrap(); for cidr in current_combiner.into_ipv6_cidr_vec().into_iter() { last_combiner.push(cidr); } } Some(output) } pub fn sub_networks(cidr: &Ipv6Cidr, bits: u8) -> Option<Vec<Ipv6Cidr>> { let cidr_bits = cidr.get_bits(); match cidr_bits.cmp(&bits) { Ordering::Greater => return None, Ordering::Equal => return Some(vec![*cidr]), Ordering::Less => (), } let n = 2usize.pow(u32::from(bits - cidr_bits)); let n_big_int = BigUint::from(n); let mut output = Vec::with_capacity(n); let size = cidr.size(); let d = size / n_big_int; let mut iter = cidr.iter(); let usize_max_big_int = BigUint::from(usize::max_value()); if d <= usize_max_big_int { for ip in iter.step_by(d.to_usize().unwrap()) { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); } } else { let nth = d - BigUint::one(); if let Some(ip) = iter.next() { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); while let Some(ip) = iter.nth_big_uint(nth.clone()) { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); } } } Some(output) } }
extern crate num_traits; use std::cmp::Ordering; use crate::cidr::Ipv6Cidr; use crate::num_bigint::BigUint; use crate::utils::Ipv6CidrCombiner; use num_traits::{One, ToPrimitive}; #[derive(Debug)] pub struct Ipv6CidrSeparator; impl Ipv6CidrSeparator { pub fn divide_by(cidr: &Ipv6Cidr, n: usize) -> Option<Vec<Ipv6CidrCombiner>> { let size = cidr.size(); let n_big_int = BigUint::from(n); if n == 0 || n_big_int > size { return None; } else if n == 1 { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(*cidr); return Some(vec![combiner]); } let log2_n = (n as f64).log2(); let mut output = Vec::with_capacity(n); if (log2_n - log2_n.floor()).abs() < 2.0 * std::f64::EPSILON { let mut iter = cidr.iter(); let bits = cidr.get_bits() + log2_n as u8; let usize_max_big_int = BigUint::from(usize::max_value()); let d = size / n_big_int; if d <= usize_max_big_int { for ip in iter.step_by(d.to_usize().unwrap()) { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); } } else { let nth = d - BigUint::one(); if let Some(ip) = iter.next() { let mut combiner = I
usize::max_value()); if d <= usize_max_big_int { for ip in iter.step_by(d.to_usize().unwrap()) { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); } } else { let nth = d - BigUint::one(); if let Some(ip) = iter.next() { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); while let Some(ip) = iter.nth_big_uint(nth.clone()) { output.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); } } } Some(output) } }
pv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); while let Some(ip) = iter.nth_big_uint(nth.clone()) { let mut combiner = Ipv6CidrCombiner::with_capacity(1); combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, bits).unwrap()); output.push(combiner); } } } } else { let d = size / n_big_int; let iter = cidr.iter(); let mut current_combiner = Ipv6CidrCombiner::new(); let mut i = BigUint::one(); for ip in iter { current_combiner.push(Ipv6Cidr::from_prefix_and_bits(ip, 128).unwrap()); if i == d { output.push(current_combiner); current_combiner = Ipv6CidrCombiner::new(); i = BigUint::one(); } else { i += BigUint::one(); } } let last_combiner = output.last_mut().unwrap(); for cidr in current_combiner.into_ipv6_cidr_vec().into_iter() { last_combiner.push(cidr); } } Some(output) } pub fn sub_networks(cidr: &Ipv6Cidr, bits: u8) -> Option<Vec<Ipv6Cidr>> { let cidr_bits = cidr.get_bits(); match cidr_bits.cmp(&bits) { Ordering::Greater => return None, Ordering::Equal => return Some(vec![*cidr]), Ordering::Less => (), } let n = 2usize.pow(u32::from(bits - cidr_bits)); let n_big_int = BigUint::from(n); let mut output = Vec::with_capacity(n); let size = cidr.size(); let d = size / n_big_int; let mut iter = cidr.iter(); let usize_max_big_int = BigUint::from(
random
[ { "content": "#[test]\n\nfn simple_test() {\n\n let mut combiner = IpCidrCombiner::new();\n\n\n\n combiner.push(IpCidr::from_str(\"192.168.1.100\").unwrap());\n\n combiner.push(IpCidr::from_str(\"192.168.1.101\").unwrap());\n\n combiner.push(IpCidr::from_str(\"192.168.1.102\").unwrap());\n\n combiner.push(IpCidr::from_str(\"192.168.1.103\").unwrap());\n\n\n\n combiner.push(IpCidr::from_str(\"::ffff:192.168.1.100\").unwrap());\n\n combiner.push(IpCidr::from_str(\"::ffff:192.168.1.101\").unwrap());\n\n combiner.push(IpCidr::from_str(\"::ffff:192.168.1.102\").unwrap());\n\n combiner.push(IpCidr::from_str(\"::ffff:192.168.1.103\").unwrap());\n\n\n\n assert_eq!(1, combiner.get_ipv4_cidrs().len());\n\n assert_eq!(1, combiner.get_ipv6_cidrs().len());\n\n assert_eq!(Ipv4Cidr::from_str(\"192.168.1.100/30\").unwrap(), combiner.get_ipv4_cidrs()[0]);\n\n assert_eq!(\n\n Ipv6Cidr::from_str(\"::ffff:192.168.1.100/126\").unwrap(),\n\n combiner.get_ipv6_cidrs()[0]\n\n );\n\n}\n", "file_path": "tests/ip_cidr_combiner.rs", "rank": 0, "score": 94738.53265467803 }, { "content": "#[test]\n\nfn size() {\n\n let cidr_1 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n let cidr_2 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/0\").unwrap();\n\n\n\n assert_eq!(BigUint::from(65536u128), cidr_1.size());\n\n assert_eq!(\n\n BigUint::from_str(\"340282366920938463463374607431768211456\").unwrap(),\n\n cidr_2.size()\n\n );\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 1, "score": 91560.2277409306 }, { "content": "#[test]\n\nfn iter() {\n\n let cidr = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let mut iter = cidr.iter();\n\n\n\n assert_eq!(u128::from(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 0)), iter.next().unwrap());\n\n assert_eq!(u128::from(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 1)), iter.next().unwrap());\n\n assert_eq!(u128::from(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 2)), iter.next().unwrap());\n\n assert_eq!(u128::from(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65535)), iter.last().unwrap());\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 2, "score": 89925.50877721398 }, { "content": "#[test]\n\nfn iter() {\n\n let cidr = Ipv4Cidr::from_str(\"192.168.51.1/16\").unwrap();\n\n\n\n let mut iter = cidr.iter();\n\n\n\n assert_eq!(u32::from(Ipv4Addr::new(192, 168, 0, 0)), iter.next().unwrap());\n\n assert_eq!(u32::from(Ipv4Addr::new(192, 168, 0, 1)), iter.next().unwrap());\n\n assert_eq!(u32::from(Ipv4Addr::new(192, 168, 0, 2)), iter.next().unwrap());\n\n assert_eq!(u32::from(Ipv4Addr::new(192, 168, 255, 255)), iter.last().unwrap());\n\n}\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 3, "score": 89925.50877721398 }, { "content": "#[derive(Debug)]\n\nenum IpCidrIpsAddrIterator {\n\n V4(Ipv4CidrIpv4AddrIterator),\n\n V6(Ipv6CidrIpv6AddrIterator),\n\n}\n\n\n\n/// To iterate IP CIDRs.\n\n#[derive(Debug)]\n\npub struct IpCidrIpAddrIterator {\n\n iter: IpCidrIpsAddrIterator,\n\n}\n\n\n\nimpl Iterator for IpCidrIpAddrIterator {\n\n type Item = IpAddr;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<IpAddr> {\n\n match &mut self.iter {\n\n IpCidrIpsAddrIterator::V4(iter) => iter.next().map(IpAddr::V4),\n\n IpCidrIpsAddrIterator::V6(iter) => iter.next().map(IpAddr::V6),\n\n }\n", "file_path": "src/cidr/ip_cidr_iterators.rs", "rank": 4, "score": 71416.16577952469 }, { "content": "#[test]\n\nfn from_prefix_and_bits() {\n\n let cidr_1 = Ipv4Cidr::from_prefix_and_bits([192, 168, 51, 1], 24).unwrap();\n\n let cidr_2 = Ipv4Cidr::from_prefix_and_bits([192, 168, 43, 1], 25).unwrap();\n\n\n\n assert_eq!(24, cidr_1.get_bits());\n\n assert_eq!(Ipv4Addr::new(255, 255, 255, 0), cidr_1.get_mask_as_ipv4_addr());\n\n assert_eq!(Ipv4Addr::new(192, 168, 51, 0), cidr_1.get_prefix_as_ipv4_addr());\n\n\n\n assert_eq!(25, cidr_2.get_bits());\n\n assert_eq!(Ipv4Addr::new(255, 255, 255, 128), cidr_2.get_mask_as_ipv4_addr());\n\n assert_eq!(Ipv4Addr::new(192, 168, 43, 0), cidr_2.get_prefix_as_ipv4_addr());\n\n}\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 5, "score": 69388.75575968836 }, { "content": "#[test]\n\nfn from_prefix_and_bits() {\n\n let cidr_1 = Ipv6Cidr::from_prefix_and_bits([0, 0, 0, 0, 0, 65535, 65535, 0], 112).unwrap();\n\n let cidr_2 = Ipv6Cidr::from_prefix_and_bits([0, 0, 0, 0, 0, 65535, 65500, 0], 113).unwrap();\n\n\n\n assert_eq!(112, cidr_1.get_bits());\n\n assert_eq!(\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 0),\n\n cidr_1.get_mask_as_ipv6_addr()\n\n );\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 0), cidr_1.get_prefix_as_ipv6_addr());\n\n\n\n assert_eq!(113, cidr_2.get_bits());\n\n assert_eq!(\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 32768),\n\n cidr_2.get_mask_as_ipv6_addr()\n\n );\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65500, 0), cidr_2.get_prefix_as_ipv6_addr());\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 6, "score": 69388.75575968836 }, { "content": "#[test]\n\nfn divide_by() {\n\n let cidr = IpCidr::from_str(\"192.168.56.0/24\").unwrap();\n\n\n\n let result = IpCidrSeparator::divide_by(&cidr, 4).unwrap();\n\n\n\n assert_eq!(4, result.len());\n\n\n\n let cidr = IpCidr::from_str(\"192.168.56.0/24\").unwrap();\n\n\n\n let result = IpCidrSeparator::divide_by(&cidr, 5).unwrap();\n\n\n\n assert_eq!(5, result.len());\n\n\n\n let cidr = IpCidr::from_str(\"0.0.0.0/0\").unwrap();\n\n\n\n let result = IpCidrSeparator::divide_by(&cidr, 1).unwrap();\n\n\n\n assert_eq!(1, result.len());\n\n\n\n let result = IpCidrSeparator::divide_by(&cidr, 2).unwrap();\n", "file_path": "tests/ip_cidr_separator.rs", "rank": 7, "score": 68333.34983339011 }, { "content": "#[test]\n\nfn push() {\n\n let mut combiner = Ipv4CidrCombiner::new();\n\n\n\n assert_eq!(0, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.1.0/24\").unwrap());\n\n\n\n assert_eq!(1, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.1.0/24\").unwrap());\n\n\n\n assert_eq!(1, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.1.52/32\").unwrap());\n\n\n\n assert_eq!(1, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.2.0/24\").unwrap());\n\n\n\n assert_eq!(2, combiner.len());\n", "file_path": "tests/ipv4_cidr_combiner.rs", "rank": 8, "score": 68230.76244196885 }, { "content": "#[test]\n\nfn iter_rev() {\n\n let cidr = Ipv4Cidr::from_str(\"192.168.51.1/16\").unwrap();\n\n\n\n let mut iter = cidr.iter().rev();\n\n\n\n assert_eq!(u32::from(Ipv4Addr::new(192, 168, 255, 255)), iter.next().unwrap());\n\n assert_eq!(u32::from(Ipv4Addr::new(192, 168, 255, 254)), iter.next().unwrap());\n\n assert_eq!(u32::from(Ipv4Addr::new(192, 168, 255, 253)), iter.next().unwrap());\n\n assert_eq!(u32::from(Ipv4Addr::new(192, 168, 0, 0)), iter.last().unwrap());\n\n}\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 9, "score": 68019.95307578708 }, { "content": "#[test]\n\nfn iter_rev() {\n\n let cidr = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let mut iter = cidr.iter().rev();\n\n\n\n assert_eq!(u128::from(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65535)), iter.next().unwrap());\n\n assert_eq!(u128::from(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65534)), iter.next().unwrap());\n\n assert_eq!(u128::from(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65533)), iter.next().unwrap());\n\n assert_eq!(u128::from(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 0)), iter.last().unwrap());\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 10, "score": 68019.95307578708 }, { "content": "#[test]\n\nfn sub_networks() {\n\n let cidr = IpCidr::from_str(\"192.168.56.0/24\").unwrap();\n\n\n\n let result = IpCidrSeparator::sub_networks(&cidr, 26).unwrap();\n\n\n\n assert_eq!(4, result.len());\n\n\n\n let cidr = IpCidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let result = IpCidrSeparator::sub_networks(&cidr, 114).unwrap();\n\n\n\n assert_eq!(4, result.len());\n\n}\n", "file_path": "tests/ip_cidr_separator.rs", "rank": 11, "score": 66278.12230498143 }, { "content": "#[test]\n\nfn simple_test() {\n\n let mut combiner = Ipv6CidrCombiner::new();\n\n\n\n combiner.push(Ipv6Cidr::from_str(\"::ffff:192.168.1.100\").unwrap());\n\n combiner.push(Ipv6Cidr::from_str(\"::ffff:192.168.1.101\").unwrap());\n\n combiner.push(Ipv6Cidr::from_str(\"::ffff:192.168.1.102\").unwrap());\n\n combiner.push(Ipv6Cidr::from_str(\"::ffff:192.168.1.103\").unwrap());\n\n\n\n assert_eq!(1, combiner.len());\n\n assert_eq!(Ipv6Cidr::from_str(\"::ffff:192.168.1.100/126\").unwrap(), combiner[0]);\n\n}\n", "file_path": "tests/ipv6_cidr_combiner.rs", "rank": 12, "score": 66179.08828226014 }, { "content": "#[test]\n\nfn simple_test() {\n\n let mut combiner = Ipv4CidrCombiner::new();\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.1.100\").unwrap());\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.1.101\").unwrap());\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.1.102\").unwrap());\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.1.103\").unwrap());\n\n\n\n assert_eq!(1, combiner.len());\n\n assert_eq!(Ipv4Cidr::from_str(\"192.168.1.100/30\").unwrap(), combiner[0]);\n\n}\n", "file_path": "tests/ipv4_cidr_combiner.rs", "rank": 13, "score": 66179.08828226014 }, { "content": "#[test]\n\nfn iter_as_ipv4_addr() {\n\n let cidr = Ipv4Cidr::from_str(\"192.168.51.1/16\").unwrap();\n\n\n\n let mut iter = cidr.iter_as_ipv4_addr();\n\n\n\n assert_eq!(Ipv4Addr::new(192, 168, 0, 0), iter.next().unwrap());\n\n assert_eq!(Ipv4Addr::new(192, 168, 0, 1), iter.next().unwrap());\n\n assert_eq!(Ipv4Addr::new(192, 168, 0, 2), iter.next().unwrap());\n\n assert_eq!(Ipv4Addr::new(192, 168, 255, 255), iter.last().unwrap());\n\n}\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 14, "score": 65975.58082124499 }, { "content": "#[test]\n\nfn iter_as_ipv6_addr() {\n\n let cidr = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let mut iter = cidr.iter_as_ipv6_addr();\n\n\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 0), iter.next().unwrap());\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 1), iter.next().unwrap());\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 2), iter.next().unwrap());\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65535), iter.last().unwrap());\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 15, "score": 65975.58082124499 }, { "content": "#[test]\n\nfn iter_rev_as_ipv4_addr() {\n\n let cidr = Ipv4Cidr::from_str(\"192.168.51.1/16\").unwrap();\n\n\n\n let mut iter = cidr.iter_as_ipv4_addr().rev();\n\n\n\n assert_eq!(Ipv4Addr::new(192, 168, 255, 255), iter.next().unwrap());\n\n assert_eq!(Ipv4Addr::new(192, 168, 255, 254), iter.next().unwrap());\n\n assert_eq!(Ipv4Addr::new(192, 168, 255, 253), iter.next().unwrap());\n\n assert_eq!(Ipv4Addr::new(192, 168, 0, 0), iter.last().unwrap());\n\n}\n", "file_path": "tests/ipv4_cidr.rs", "rank": 16, "score": 64068.091105928455 }, { "content": "#[test]\n\nfn iter_rev_as_ipv6_addr() {\n\n let cidr = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let mut iter = cidr.iter_as_ipv6_addr().rev();\n\n\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65535), iter.next().unwrap());\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65534), iter.next().unwrap());\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65533), iter.next().unwrap());\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 0), iter.last().unwrap());\n\n}\n", "file_path": "tests/ipv6_cidr.rs", "rank": 17, "score": 64068.091105928455 }, { "content": "extern crate cidr_utils;\n\n\n\nuse cidr_utils::{\n\n cidr::{IpCidr, Ipv4Cidr, Ipv6Cidr},\n\n utils::IpCidrCombiner,\n\n};\n\n\n\n#[test]\n", "file_path": "tests/ip_cidr_combiner.rs", "rank": 18, "score": 61278.351317801025 }, { "content": "use std::fmt::{self, Display, Formatter, Write};\n\nuse std::net::IpAddr;\n\n\n\nuse crate::cidr::{IpCidr, Ipv4Cidr, Ipv6Cidr};\n\nuse crate::num_bigint::BigUint;\n\nuse crate::utils::{Ipv4CidrCombiner, Ipv6CidrCombiner};\n\n\n\n/// To combine multiple IPv4 CIDRs and IPv6 CIDRs to supernetworks.\n\n#[derive(Debug)]\n\npub struct IpCidrCombiner {\n\n ipv4: Ipv4CidrCombiner,\n\n ipv6: Ipv6CidrCombiner,\n\n}\n\n\n\nimpl IpCidrCombiner {\n\n #[inline]\n\n /// Create a new `IpCidrCombiner` instance.\n\n pub fn new() -> IpCidrCombiner {\n\n IpCidrCombiner {\n\n ipv4: Ipv4CidrCombiner::new(),\n", "file_path": "src/utils/ip_cidr_combiner.rs", "rank": 19, "score": 59082.633335853025 }, { "content": "\n\n false\n\n }\n\n\n\n #[inline]\n\n pub fn ipv4_size(&self) -> u64 {\n\n self.ipv4.size()\n\n }\n\n\n\n #[inline]\n\n pub fn ipv6_size(&self) -> BigUint {\n\n self.ipv6.size()\n\n }\n\n}\n\n\n\nimpl Display for IpCidrCombiner {\n\n #[inline]\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n\n f.write_char('[')?;\n\n\n", "file_path": "src/utils/ip_cidr_combiner.rs", "rank": 20, "score": 59081.6545170596 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn get_ipv6_cidrs(&self) -> &[Ipv6Cidr] {\n\n &self.ipv6\n\n }\n\n}\n\n\n\nimpl IpCidrCombiner {\n\n /// Push a CIDR into this combiner.\n\n pub fn push(&mut self, cidr: IpCidr) {\n\n match cidr {\n\n IpCidr::V4(cidr) => {\n\n self.ipv4.push(cidr);\n\n }\n\n IpCidr::V6(cidr) => {\n\n self.ipv6.push(cidr);\n\n }\n\n }\n\n }\n", "file_path": "src/utils/ip_cidr_combiner.rs", "rank": 21, "score": 59079.267954736155 }, { "content": " ipv6: Ipv6CidrCombiner::new(),\n\n }\n\n }\n\n\n\n #[inline]\n\n /// Create a new `IpCidrCombiner` instance with specific capacities.\n\n pub fn with_capacity(ipv4_capacity: usize, ipv6_capacity: usize) -> IpCidrCombiner {\n\n IpCidrCombiner {\n\n ipv4: Ipv4CidrCombiner::with_capacity(ipv4_capacity),\n\n ipv6: Ipv6CidrCombiner::with_capacity(ipv6_capacity),\n\n }\n\n }\n\n\n\n #[allow(clippy::missing_safety_doc)]\n\n #[inline]\n\n pub unsafe fn from_cidr_vec_unchecked(\n\n ipv4_cidr_vec: Vec<Ipv4Cidr>,\n\n ipv6_cidr_vec: Vec<Ipv6Cidr>,\n\n ) -> IpCidrCombiner {\n\n IpCidrCombiner {\n", "file_path": "src/utils/ip_cidr_combiner.rs", "rank": 22, "score": 59077.4869005862 }, { "content": "\n\n for cidr in self.ipv6.iter().take(length_dec) {\n\n f.write_fmt(format_args!(\"{}, \", cidr))?\n\n }\n\n\n\n f.write_fmt(format_args!(\"{}\", self.ipv6[length_dec]))?;\n\n }\n\n\n\n f.write_char(']')\n\n }\n\n}\n\n\n\nimpl Default for IpCidrCombiner {\n\n #[inline]\n\n fn default() -> Self {\n\n IpCidrCombiner::new()\n\n }\n\n}\n", "file_path": "src/utils/ip_cidr_combiner.rs", "rank": 23, "score": 59076.25596788348 }, { "content": " ipv4: Ipv4CidrCombiner::from_ipv4_cidr_vec_unchecked(ipv4_cidr_vec),\n\n ipv6: Ipv6CidrCombiner::from_ipv6_cidr_vec_unchecked(ipv6_cidr_vec),\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn into_ipv4_cidr_vec(self) -> Vec<Ipv4Cidr> {\n\n self.ipv4.into_ipv4_cidr_vec()\n\n }\n\n\n\n #[inline]\n\n pub fn into_ipv6_cidr_vec(self) -> Vec<Ipv6Cidr> {\n\n self.ipv6.into_ipv6_cidr_vec()\n\n }\n\n}\n\n\n\nimpl IpCidrCombiner {\n\n #[inline]\n\n pub fn get_ipv4_cidrs(&self) -> &[Ipv4Cidr] {\n\n &self.ipv4\n", "file_path": "src/utils/ip_cidr_combiner.rs", "rank": 24, "score": 59074.84191904675 }, { "content": "\n\n #[inline]\n\n /// Check an IP whether it is in these CIDRs.\n\n pub fn contains(&self, ip: IpAddr) -> bool {\n\n match ip {\n\n IpAddr::V4(ipv4) => {\n\n for cidr in self.ipv4.iter() {\n\n if cidr.contains(&ipv4) {\n\n return true;\n\n }\n\n }\n\n }\n\n IpAddr::V6(ipv6) => {\n\n for cidr in self.ipv6.iter() {\n\n if cidr.contains(&ipv6) {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/utils/ip_cidr_combiner.rs", "rank": 25, "score": 59074.77786178865 }, { "content": " let ipv4_length = self.ipv4.len();\n\n\n\n if ipv4_length > 0 {\n\n let length_dec = ipv4_length - 1;\n\n\n\n for cidr in self.ipv4.iter().take(length_dec) {\n\n f.write_fmt(format_args!(\"{}, \", cidr))?\n\n }\n\n\n\n f.write_fmt(format_args!(\"{}\", self.ipv4[length_dec]))?;\n\n }\n\n\n\n let ipv6_length = self.ipv6.len();\n\n\n\n if ipv6_length > 0 {\n\n let length_dec = ipv6_length - 1;\n\n\n\n if ipv4_length > 0 {\n\n f.write_str(\", \")?;\n\n }\n", "file_path": "src/utils/ip_cidr_combiner.rs", "rank": 26, "score": 59067.67788356907 }, { "content": " #[inline]\n\n fn next_back(&mut self) -> Option<IpAddr> {\n\n match &mut self.iter {\n\n IpCidrIpsAddrIterator::V4(iter) => iter.next_back().map(IpAddr::V4),\n\n IpCidrIpsAddrIterator::V6(iter) => iter.next_back().map(IpAddr::V6),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn nth_back(&mut self, n: usize) -> Option<IpAddr> {\n\n match &mut self.iter {\n\n IpCidrIpsAddrIterator::V4(iter) => iter.nth_back(n).map(IpAddr::V4),\n\n IpCidrIpsAddrIterator::V6(iter) => iter.nth_back(n).map(IpAddr::V6),\n\n }\n\n }\n\n}\n\n\n\nimpl IpCidr {\n\n #[inline]\n\n pub fn iter_as_ip_addr(&self) -> IpCidrIpAddrIterator {\n", "file_path": "src/cidr/ip_cidr_iterators.rs", "rank": 27, "score": 58872.46461537373 }, { "content": " }\n\n\n\n #[inline]\n\n fn last(self) -> Option<IpAddr> {\n\n match self.iter {\n\n IpCidrIpsAddrIterator::V4(iter) => iter.last().map(IpAddr::V4),\n\n IpCidrIpsAddrIterator::V6(iter) => iter.last().map(IpAddr::V6),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn nth(&mut self, n: usize) -> Option<IpAddr> {\n\n match &mut self.iter {\n\n IpCidrIpsAddrIterator::V4(iter) => iter.nth(n).map(IpAddr::V4),\n\n IpCidrIpsAddrIterator::V6(iter) => iter.nth(n).map(IpAddr::V6),\n\n }\n\n }\n\n}\n\n\n\nimpl DoubleEndedIterator for IpCidrIpAddrIterator {\n", "file_path": "src/cidr/ip_cidr_iterators.rs", "rank": 28, "score": 58871.0821514912 }, { "content": "use std::net::IpAddr;\n\n\n\nuse super::{IpCidr, Ipv4CidrIpv4AddrIterator, Ipv6CidrIpv6AddrIterator};\n\n\n\n// TODO: IpCidrIpAddrIterator\n\n#[derive(Debug)]\n", "file_path": "src/cidr/ip_cidr_iterators.rs", "rank": 29, "score": 58865.76946798769 }, { "content": " match self {\n\n IpCidr::V4(cidr) => {\n\n IpCidrIpAddrIterator {\n\n iter: IpCidrIpsAddrIterator::V4(cidr.iter_as_ipv4_addr()),\n\n }\n\n }\n\n IpCidr::V6(cidr) => {\n\n IpCidrIpAddrIterator {\n\n iter: IpCidrIpsAddrIterator::V6(cidr.iter_as_ipv6_addr()),\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn iter(&self) -> IpCidrIpAddrIterator {\n\n self.iter_as_ip_addr()\n\n }\n\n}\n", "file_path": "src/cidr/ip_cidr_iterators.rs", "rank": 30, "score": 58865.268950636084 }, { "content": "/// The type which can be taken as an IPv6 address.\n\n/// *An `u128` value represents an IPv6 byte array (`[u8; 16]`) in big-endian (BE) order.*\n\npub trait Ipv6Able {\n\n fn get_u128(&self) -> u128;\n\n}\n\n\n\nimpl Ipv6Able for u128 {\n\n #[inline]\n\n fn get_u128(&self) -> u128 {\n\n *self\n\n }\n\n}\n\n\n\nimpl Ipv6Able for [u8; 16] {\n\n #[inline]\n\n fn get_u128(&self) -> u128 {\n\n u128::from_be_bytes(*self)\n\n }\n\n}\n\n\n\nimpl Ipv6Able for [u16; 8] {\n\n #[inline]\n", "file_path": "src/cidr/v6/ipv6_able.rs", "rank": 31, "score": 44994.31497789171 }, { "content": "/// The type which can be taken as an IPv4 address.\n\n/// *An `u32` value represents an IPv4 byte array (`[u8; 4]`) in big-endian (BE) order.*\n\npub trait Ipv4Able {\n\n fn get_u32(&self) -> u32;\n\n}\n\n\n\nimpl Ipv4Able for u32 {\n\n #[inline]\n\n fn get_u32(&self) -> u32 {\n\n *self\n\n }\n\n}\n\n\n\nimpl Ipv4Able for [u8; 4] {\n\n #[inline]\n\n fn get_u32(&self) -> u32 {\n\n u32::from_be_bytes(*self)\n\n }\n\n}\n\n\n\nimpl Ipv4Able for Ipv4Addr {\n\n #[inline]\n", "file_path": "src/cidr/v4/ipv4_able.rs", "rank": 32, "score": 44994.31497789171 }, { "content": "#[test]\n\nfn last() {\n\n let cidr_1 = Ipv4Cidr::from_str(\"192.168.51.1/16\").unwrap();\n\n let cidr_2 = Ipv4Cidr::from_str(\"192.168.43.1/17\").unwrap();\n\n\n\n assert_eq!(Ipv4Addr::new(192, 168, 255, 255), cidr_1.last_as_ipv4_addr());\n\n assert_eq!(Ipv4Addr::new(192, 168, 127, 255), cidr_2.last_as_ipv4_addr());\n\n}\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 33, "score": 39856.39295076557 }, { "content": "#[test]\n\nfn contains() {\n\n let cidr_1 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n let cidr_2 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFDC:0/113\").unwrap();\n\n\n\n assert_eq!(false, cidr_1.contains([0, 0, 0, 0, 0, 65535, 65534, 65535]));\n\n assert_eq!(true, cidr_1.contains([0, 0, 0, 0, 0, 65535, 65535, 1]));\n\n assert_eq!(false, cidr_2.contains([0, 0, 0, 0, 0, 65535, 65500, 32768]));\n\n assert_eq!(true, cidr_2.contains([0, 0, 0, 0, 0, 65535, 65500, 32767]));\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 34, "score": 39856.39295076557 }, { "content": "#[test]\n\nfn compare() {\n\n let cidr_1 = Ipv4Cidr::from_str(\"192.168.51.1/24\").unwrap();\n\n let cidr_2 = Ipv4Cidr::from_str(\"192.168.43.1/25\").unwrap();\n\n\n\n assert_eq!(Ordering::Greater, cidr_1.partial_cmp(&cidr_2).unwrap());\n\n\n\n let cidr_3 = Ipv4Cidr::from_str(\"10.0.10.254\").unwrap();\n\n let cidr_4 = Ipv4Cidr::from_str(\"127.0.0.1\").unwrap();\n\n\n\n assert_eq!(Ordering::Less, cidr_3.partial_cmp(&cidr_4).unwrap());\n\n\n\n let cidr_5 = Ipv4Cidr::from_str(\"127.0.0.1\").unwrap();\n\n let cidr_6 = Ipv4Cidr::from_str(\"127.0.0.1\").unwrap();\n\n let cidr_7 = Ipv4Cidr::from_str(\"127.0.0.1/31\").unwrap();\n\n\n\n assert_eq!(Ordering::Equal, cidr_5.partial_cmp(&cidr_6).unwrap());\n\n assert_eq!(Ordering::Greater, cidr_5.partial_cmp(&cidr_7).unwrap());\n\n\n\n let cidr_8 = Ipv4Cidr::from_str(\"200.1.0.0/24\").unwrap();\n\n let cidr_9 = Ipv4Cidr::from_str(\"192.160.0.0/12\").unwrap();\n\n\n\n assert_eq!(Ordering::Greater, cidr_8.partial_cmp(&cidr_9).unwrap());\n\n assert_eq!(Ordering::Less, cidr_9.partial_cmp(&cidr_8).unwrap());\n\n}\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 35, "score": 39856.39295076557 }, { "content": "#[test]\n\nfn from_str() {\n\n let cidr_1 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n let cidr_2 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFDC:0/113\").unwrap();\n\n\n\n assert_eq!(112, cidr_1.get_bits());\n\n assert_eq!(\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 0),\n\n cidr_1.get_mask_as_ipv6_addr()\n\n );\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 0), cidr_1.get_prefix_as_ipv6_addr());\n\n\n\n assert_eq!(113, cidr_2.get_bits());\n\n assert_eq!(\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 32768),\n\n cidr_2.get_mask_as_ipv6_addr()\n\n );\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65500, 0), cidr_2.get_prefix_as_ipv6_addr());\n\n\n\n let cidr_3 = Ipv6Cidr::from_str(\"::ffff:0.128.0.128\").unwrap();\n\n\n\n assert_eq!(128, cidr_3.get_bits());\n\n assert_eq!(\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535),\n\n cidr_3.get_mask_as_ipv6_addr()\n\n );\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 128, 128), cidr_3.get_prefix_as_ipv6_addr());\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 36, "score": 39856.39295076557 }, { "content": "#[test]\n\nfn from_str() {\n\n let cidr_1 = Ipv4Cidr::from_str(\"192.168.51.1/24\").unwrap();\n\n let cidr_2 = Ipv4Cidr::from_str(\"192.168.43.1/25\").unwrap();\n\n\n\n assert_eq!(24, cidr_1.get_bits());\n\n assert_eq!(Ipv4Addr::new(255, 255, 255, 0), cidr_1.get_mask_as_ipv4_addr());\n\n assert_eq!(Ipv4Addr::new(192, 168, 51, 0), cidr_1.get_prefix_as_ipv4_addr());\n\n\n\n assert_eq!(25, cidr_2.get_bits());\n\n assert_eq!(Ipv4Addr::new(255, 255, 255, 128), cidr_2.get_mask_as_ipv4_addr());\n\n assert_eq!(Ipv4Addr::new(192, 168, 43, 0), cidr_2.get_prefix_as_ipv4_addr());\n\n\n\n let cidr_3 = Ipv4Cidr::from_str(\"0.0.255.0/255.255.0.0\").unwrap();\n\n\n\n assert_eq!(16, cidr_3.get_bits());\n\n assert_eq!(Ipv4Addr::new(255, 255, 0, 0), cidr_3.get_mask_as_ipv4_addr());\n\n assert_eq!(Ipv4Addr::new(0, 0, 0, 0), cidr_3.get_prefix_as_ipv4_addr());\n\n\n\n let cidr_4 = Ipv4Cidr::from_str(\"0.0.255.0\").unwrap();\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 37, "score": 39856.39295076557 }, { "content": "#[test]\n\nfn last() {\n\n let cidr_1 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n let cidr_2 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFDC:0/113\").unwrap();\n\n\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 65535), cidr_1.last_as_ipv6_addr());\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65500, 32767), cidr_2.last_as_ipv6_addr());\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 38, "score": 39856.39295076557 }, { "content": "#[test]\n\nfn compare() {\n\n let cidr_1 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n let cidr_2 = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFDC:0/113\").unwrap();\n\n\n\n assert_eq!(Ordering::Greater, cidr_1.partial_cmp(&cidr_2).unwrap());\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 39, "score": 39856.39295076557 }, { "content": "#[test]\n\nfn contains() {\n\n let cidr_1 = Ipv4Cidr::from_str(\"192.168.51.1/16\").unwrap();\n\n let cidr_2 = Ipv4Cidr::from_str(\"192.168.43.1/17\").unwrap();\n\n\n\n assert_eq!(false, cidr_1.contains([127, 0, 0, 1]));\n\n assert_eq!(false, cidr_1.contains([192, 167, 0, 0]));\n\n assert_eq!(true, cidr_1.contains([192, 168, 0, 0]));\n\n assert_eq!(true, cidr_1.contains([192, 168, 51, 0]));\n\n assert_eq!(true, cidr_1.contains([192, 168, 255, 255]));\n\n assert_eq!(false, cidr_1.contains([192, 169, 0, 0]));\n\n assert_eq!(true, cidr_2.contains([192, 168, 127, 255]));\n\n assert_eq!(false, cidr_2.contains([192, 168, 128, 0]));\n\n}\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 40, "score": 39856.39295076557 }, { "content": "#[test]\n\nfn divide_by() {\n\n let cidr = Ipv4Cidr::from_str(\"192.168.56.0/24\").unwrap();\n\n\n\n let result = Ipv4CidrSeparator::divide_by(&cidr, 4).unwrap();\n\n\n\n assert_eq!(4, result.len());\n\n assert_eq!(64, result[0].size());\n\n assert_eq!(64, result[1].size());\n\n assert_eq!(64, result[2].size());\n\n assert_eq!(64, result[3].size());\n\n\n\n let cidr = Ipv4Cidr::from_str(\"192.168.56.0/24\").unwrap();\n\n\n\n let result = Ipv4CidrSeparator::divide_by(&cidr, 5).unwrap();\n\n\n\n assert_eq!(5, result.len());\n\n assert_eq!(51, result[0].size());\n\n assert_eq!(51, result[1].size());\n\n assert_eq!(51, result[2].size());\n\n assert_eq!(51, result[3].size());\n", "file_path": "tests/ipv4_cidr_separator.rs", "rank": 41, "score": 38749.18453241947 }, { "content": "#[test]\n\nfn from_prefix_and_mask() {\n\n let cidr_1 =\n\n Ipv4Cidr::from_prefix_and_mask([192, 168, 51, 1], Ipv4Addr::new(255, 255, 255, 0)).unwrap();\n\n let cidr_2 =\n\n Ipv4Cidr::from_prefix_and_mask([192, 168, 43, 1], Ipv4Addr::new(255, 255, 255, 128))\n\n .unwrap();\n\n\n\n assert_eq!(24, cidr_1.get_bits());\n\n assert_eq!(Ipv4Addr::new(255, 255, 255, 0), cidr_1.get_mask_as_ipv4_addr());\n\n assert_eq!(Ipv4Addr::new(192, 168, 51, 0), cidr_1.get_prefix_as_ipv4_addr());\n\n\n\n assert_eq!(25, cidr_2.get_bits());\n\n assert_eq!(Ipv4Addr::new(255, 255, 255, 128), cidr_2.get_mask_as_ipv4_addr());\n\n assert_eq!(Ipv4Addr::new(192, 168, 43, 0), cidr_2.get_prefix_as_ipv4_addr());\n\n}\n\n\n", "file_path": "tests/ipv4_cidr.rs", "rank": 42, "score": 38749.18453241947 }, { "content": "#[test]\n\nfn divide_by() {\n\n let cidr = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let result = Ipv6CidrSeparator::divide_by(&cidr, 4).unwrap();\n\n\n\n assert_eq!(4, result.len());\n\n assert_eq!(BigUint::from(16384u128), result[0].size());\n\n assert_eq!(BigUint::from(16384u128), result[1].size());\n\n assert_eq!(BigUint::from(16384u128), result[2].size());\n\n assert_eq!(BigUint::from(16384u128), result[3].size());\n\n\n\n let cidr = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let result = Ipv6CidrSeparator::divide_by(&cidr, 5).unwrap();\n\n\n\n assert_eq!(5, result.len());\n\n assert_eq!(BigUint::from(13107u128), result[0].size());\n\n assert_eq!(BigUint::from(13107u128), result[1].size());\n\n assert_eq!(BigUint::from(13107u128), result[2].size());\n\n assert_eq!(BigUint::from(13107u128), result[3].size());\n", "file_path": "tests/ipv6_cidr_separator.rs", "rank": 43, "score": 38749.18453241947 }, { "content": "#[test]\n\nfn from_prefix_and_mask() {\n\n let cidr_1 = Ipv6Cidr::from_prefix_and_mask(\n\n [0, 0, 0, 0, 0, 65535, 65535, 0],\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 0),\n\n )\n\n .unwrap();\n\n let cidr_2 = Ipv6Cidr::from_prefix_and_mask(\n\n [0, 0, 0, 0, 0, 65535, 65500, 0],\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 32768),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(112, cidr_1.get_bits());\n\n assert_eq!(\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 0),\n\n cidr_1.get_mask_as_ipv6_addr()\n\n );\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65535, 0), cidr_1.get_prefix_as_ipv6_addr());\n\n\n\n assert_eq!(113, cidr_2.get_bits());\n\n assert_eq!(\n\n Ipv6Addr::new(65535, 65535, 65535, 65535, 65535, 65535, 65535, 32768),\n\n cidr_2.get_mask_as_ipv6_addr()\n\n );\n\n assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 65535, 65500, 0), cidr_2.get_prefix_as_ipv6_addr());\n\n}\n\n\n", "file_path": "tests/ipv6_cidr.rs", "rank": 44, "score": 38749.18453241947 }, { "content": "#[test]\n\nfn sub_networks() {\n\n let cidr = Ipv6Cidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let result = Ipv6CidrSeparator::sub_networks(&cidr, 114).unwrap();\n\n\n\n assert_eq!(4, result.len());\n\n assert_eq!(BigUint::from(16384u128), result[0].size());\n\n assert_eq!(BigUint::from(16384u128), result[1].size());\n\n assert_eq!(BigUint::from(16384u128), result[2].size());\n\n assert_eq!(BigUint::from(16384u128), result[3].size());\n\n}\n", "file_path": "tests/ipv6_cidr_separator.rs", "rank": 45, "score": 37718.67793256355 }, { "content": "#[test]\n\nfn sub_networks() {\n\n let cidr = Ipv4Cidr::from_str(\"192.168.56.0/24\").unwrap();\n\n\n\n let result = Ipv4CidrSeparator::sub_networks(&cidr, 26).unwrap();\n\n\n\n assert_eq!(4, result.len());\n\n assert_eq!(64, result[0].size());\n\n assert_eq!(64, result[1].size());\n\n assert_eq!(64, result[2].size());\n\n assert_eq!(64, result[3].size());\n\n}\n", "file_path": "tests/ipv4_cidr_separator.rs", "rank": 46, "score": 37718.67793256355 }, { "content": "extern crate cidr_utils;\n\n\n\nuse cidr_utils::{cidr::IpCidr, utils::IpCidrSeparator};\n\n\n\n#[test]\n", "file_path": "tests/ip_cidr_separator.rs", "rank": 47, "score": 30697.618136046975 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn size(&self) -> BigUint {\n\n match self {\n\n IpCidr::V4(cidr) => BigUint::from(cidr.size()),\n\n IpCidr::V6(cidr) => cidr.size(),\n\n }\n\n }\n\n}\n\n\n\nimpl IpCidr {\n\n #[inline]\n\n pub fn contains(&self, ip: IpAddr) -> bool {\n\n match self {\n\n IpCidr::V4(cidr) => {\n\n match ip {\n\n IpAddr::V4(ip) => cidr.contains(ip),\n\n IpAddr::V6(_) => false,\n\n }\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 48, "score": 30697.037810467707 }, { "content": "use std::cmp::Ordering;\n\nuse std::convert::TryFrom;\n\nuse std::fmt::{self, Debug, Display, Formatter};\n\nuse std::net::IpAddr;\n\nuse std::str::FromStr;\n\n\n\nuse crate::num_bigint::BigUint;\n\n\n\nuse super::{IpCidrError, Ipv4Cidr, Ipv4CidrError, Ipv6Cidr, Ipv6CidrError};\n\n\n\n// The type which can be taken as an IP address.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub enum IpCidr {\n\n V4(Ipv4Cidr),\n\n V6(Ipv6Cidr),\n\n}\n\n\n\nimpl IpCidr {\n\n #[allow(clippy::should_implement_trait)]\n\n pub fn from_str<S: AsRef<str>>(s: S) -> Result<IpCidr, IpCidrError> {\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 49, "score": 30696.77350027848 }, { "content": " pub fn is_ipv6_cidr<S: AsRef<str>>(s: S) -> bool {\n\n Ipv4Cidr::from_str(s).is_ok()\n\n }\n\n}\n\n\n\nimpl IpCidr {\n\n #[inline]\n\n pub fn first_as_ip_addr(&self) -> IpAddr {\n\n match self {\n\n IpCidr::V4(cidr) => IpAddr::V4(cidr.first_as_ipv4_addr()),\n\n IpCidr::V6(cidr) => IpAddr::V6(cidr.first_as_ipv6_addr()),\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn last_as_ip_addr(&self) -> IpAddr {\n\n match self {\n\n IpCidr::V4(cidr) => IpAddr::V4(cidr.last_as_ipv4_addr()),\n\n IpCidr::V6(cidr) => IpAddr::V6(cidr.last_as_ipv6_addr()),\n\n }\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 50, "score": 30693.30397096759 }, { "content": " }\n\n IpCidr::V6(cidr) => {\n\n match ip {\n\n IpAddr::V4(_) => false,\n\n IpAddr::V6(ip) => cidr.contains(ip),\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Display for IpCidr {\n\n #[inline]\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n\n match self {\n\n IpCidr::V4(cidr) => Display::fmt(&cidr, f),\n\n IpCidr::V6(cidr) => Display::fmt(&cidr, f),\n\n }\n\n }\n\n}\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 51, "score": 30693.167388690752 }, { "content": "\n\nimpl FromStr for IpCidr {\n\n type Err = IpCidrError;\n\n\n\n #[inline]\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n IpCidr::from_str(s)\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for IpCidr {\n\n type Error = IpCidrError;\n\n\n\n #[inline]\n\n fn try_from(s: &str) -> Result<Self, Self::Error> {\n\n IpCidr::from_str(s)\n\n }\n\n}\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 52, "score": 30692.104693812442 }, { "content": "\n\nimpl PartialEq<Ipv4Cidr> for IpCidr {\n\n #[inline]\n\n fn eq(&self, other: &Ipv4Cidr) -> bool {\n\n match self {\n\n IpCidr::V4(cidr) => cidr.eq(other),\n\n IpCidr::V6(_) => false,\n\n }\n\n }\n\n}\n\n\n\nimpl PartialEq<IpCidr> for Ipv4Cidr {\n\n #[inline]\n\n fn eq(&self, other: &IpCidr) -> bool {\n\n match other {\n\n IpCidr::V4(cidr) => self.eq(cidr),\n\n IpCidr::V6(_) => false,\n\n }\n\n }\n\n}\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 53, "score": 30691.833724923683 }, { "content": "\n\nimpl PartialEq<Ipv6Cidr> for IpCidr {\n\n #[inline]\n\n fn eq(&self, other: &Ipv6Cidr) -> bool {\n\n match self {\n\n IpCidr::V4(_) => false,\n\n IpCidr::V6(cidr) => cidr.eq(other),\n\n }\n\n }\n\n}\n\n\n\nimpl PartialEq<IpCidr> for Ipv6Cidr {\n\n #[inline]\n\n fn eq(&self, other: &IpCidr) -> bool {\n\n match other {\n\n IpCidr::V4(_) => false,\n\n IpCidr::V6(cidr) => self.eq(cidr),\n\n }\n\n }\n\n}\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 54, "score": 30691.833724923683 }, { "content": "\n\nimpl PartialOrd<Ipv4Cidr> for IpCidr {\n\n #[inline]\n\n fn partial_cmp(&self, other: &Ipv4Cidr) -> Option<Ordering> {\n\n match self {\n\n IpCidr::V4(cidr) => cidr.partial_cmp(other),\n\n IpCidr::V6(_) => Some(Ordering::Greater),\n\n }\n\n }\n\n}\n\n\n\nimpl PartialOrd<IpCidr> for Ipv4Cidr {\n\n #[inline]\n\n fn partial_cmp(&self, other: &IpCidr) -> Option<Ordering> {\n\n match other {\n\n IpCidr::V4(cidr) => self.partial_cmp(cidr),\n\n IpCidr::V6(_) => Some(Ordering::Less),\n\n }\n\n }\n\n}\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 55, "score": 30691.55989772569 }, { "content": "\n\nimpl PartialOrd<Ipv6Cidr> for IpCidr {\n\n #[inline]\n\n fn partial_cmp(&self, other: &Ipv6Cidr) -> Option<Ordering> {\n\n match self {\n\n IpCidr::V4(_) => Some(Ordering::Less),\n\n IpCidr::V6(cidr) => cidr.partial_cmp(other),\n\n }\n\n }\n\n}\n\n\n\nimpl PartialOrd<IpCidr> for Ipv6Cidr {\n\n #[inline]\n\n fn partial_cmp(&self, other: &IpCidr) -> Option<Ordering> {\n\n match other {\n\n IpCidr::V4(_) => Some(Ordering::Greater),\n\n IpCidr::V6(cidr) => self.partial_cmp(cidr),\n\n }\n\n }\n\n}\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 56, "score": 30691.55989772569 }, { "content": " let s = s.as_ref();\n\n\n\n match Ipv4Cidr::from_str(s) {\n\n Ok(cidr) => Ok(IpCidr::V4(cidr)),\n\n Err(err) => {\n\n match err {\n\n Ipv4CidrError::IncorrectBitsRange => Err(IpCidrError::IncorrectBitsRange),\n\n Ipv4CidrError::IncorrectMask => Err(IpCidrError::IncorrectMask),\n\n Ipv4CidrError::IncorrectIpv4CIDRString => {\n\n match Ipv6Cidr::from_str(s) {\n\n Ok(cidr) => Ok(IpCidr::V6(cidr)),\n\n Err(err) => {\n\n match err {\n\n Ipv6CidrError::IncorrectBitsRange => {\n\n Err(IpCidrError::IncorrectBitsRange)\n\n }\n\n Ipv6CidrError::IncorrectMask => Err(IpCidrError::IncorrectMask),\n\n Ipv6CidrError::IncorrectIpv6CIDRString => {\n\n Err(IpCidrError::IncorrectIpCIDRString)\n\n }\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 57, "score": 30691.421558295744 }, { "content": " }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn is_ip_cidr<S: AsRef<str>>(s: S) -> bool {\n\n Self::from_str(s).is_ok()\n\n }\n\n\n\n #[inline]\n\n pub fn is_ipv4_cidr<S: AsRef<str>>(s: S) -> bool {\n\n Ipv4Cidr::from_str(s).is_ok()\n\n }\n\n\n\n #[inline]\n", "file_path": "src/cidr/ip_cidr.rs", "rank": 58, "score": 30690.68595927519 }, { "content": "\n\n assert_eq!(2, result.len());\n\n\n\n let cidr = IpCidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let result = IpCidrSeparator::divide_by(&cidr, 4).unwrap();\n\n\n\n assert_eq!(4, result.len());\n\n\n\n let cidr = IpCidr::from_str(\"0:0:0:0:0:FFFF:FFFF:0/112\").unwrap();\n\n\n\n let result = IpCidrSeparator::divide_by(&cidr, 5).unwrap();\n\n\n\n assert_eq!(5, result.len());\n\n\n\n let cidr = IpCidr::from_str(\"::0/0\").unwrap();\n\n\n\n let result = IpCidrSeparator::divide_by(&cidr, 1).unwrap();\n\n\n\n assert_eq!(1, result.len());\n\n\n\n let result = IpCidrSeparator::divide_by(&cidr, 2).unwrap();\n\n\n\n assert_eq!(2, result.len());\n\n}\n\n\n", "file_path": "tests/ip_cidr_separator.rs", "rank": 59, "score": 30688.922656568186 }, { "content": "extern crate cidr_utils;\n\n\n\nuse cidr_utils::{cidr::Ipv6Cidr, utils::Ipv6CidrCombiner};\n\n\n\n#[test]\n", "file_path": "tests/ipv6_cidr_combiner.rs", "rank": 60, "score": 30590.465473822314 }, { "content": "extern crate cidr_utils;\n\n\n\nuse cidr_utils::{cidr::Ipv4Cidr, utils::Ipv4CidrCombiner};\n\n\n\n#[test]\n", "file_path": "tests/ipv4_cidr_combiner.rs", "rank": 61, "score": 30590.465473822314 }, { "content": " assert_eq!(1, combiner.len());\n\n assert_eq!(\"192.168.0.0/14\", combiner[0].to_string());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.167.0.0/16\").unwrap());\n\n assert_eq!(2, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.166.0.0/16\").unwrap());\n\n assert_eq!(2, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.166.0.0/16\").unwrap());\n\n assert_eq!(2, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.164.0.0/15\").unwrap());\n\n assert_eq!(2, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.172.0.0/14\").unwrap());\n\n assert_eq!(2, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.160.0.0/14\").unwrap());\n\n assert_eq!(1, combiner.len());\n", "file_path": "tests/ipv4_cidr_combiner.rs", "rank": 62, "score": 30583.110328105122 }, { "content": "\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.0.0/16\").unwrap());\n\n\n\n assert_eq!(1, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.168.3.0/24\").unwrap());\n\n\n\n assert_eq!(1, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.171.0.0/16\").unwrap());\n\n\n\n assert_eq!(2, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.170.0.0/16\").unwrap());\n\n\n\n assert_eq!(2, combiner.len());\n\n assert_eq!(\"192.170.0.0/15\", combiner[1].to_string());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"192.169.0.0/16\").unwrap());\n\n\n", "file_path": "tests/ipv4_cidr_combiner.rs", "rank": 63, "score": 30583.06263684883 }, { "content": " //\n\n // 192.168.0.0/14 + 192.164.0.0/14 + 192.172.0.0/14 = 192.168.0.0/14 + 192.164.0.0/13\n\n //\n\n // 192.168.0.0/14 + 192.164.0.0/13 + 192.160.0.0/14 = 192.160.0.0/13 + 192.164.0.0/13 = 192.160.0.0/12\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"200.1.0.0/24\").unwrap());\n\n assert_eq!(2, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"200.1.1.0/24\").unwrap());\n\n assert_eq!(2, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"0.0.0.0/1\").unwrap());\n\n assert_eq!(3, combiner.len());\n\n\n\n combiner.push(Ipv4Cidr::from_str(\"0.0.0.0/0\").unwrap());\n\n assert_eq!(1, combiner.len());\n\n\n\n // 192.160.0.0/12 + 200.1.0.0/24 = 192.160.0.0/12 + 200.1.0.0/24\n\n //\n\n // 192.160.0.0/12 + 200.1.0.0/24 + 200.1.1.0/24 = 192.160.0.0/12 + 200.1.0.0/23\n\n //\n\n // 192.160.0.0/12 + 200.1.0.0/23 + 0.0.0.0/1 = 192.160.0.0/12 + 200.1.0.0/23 + 0.0.0.0/1\n\n //\n\n // 192.160.0.0/12 + 200.1.0.0/23 + 0.0.0.0/1 + 0.0.0.0/0 = 0.0.0.0/0\n\n}\n\n\n", "file_path": "tests/ipv4_cidr_combiner.rs", "rank": 64, "score": 30583.035484957374 }, { "content": "\n\n // 192.168.1.0/24 + 192.168.1.0/24 = 192.168.1.0/24\n\n //\n\n // 192.168.1.0/24 + 192.168.2.0/24 = 192.168.1.0/24 + 192.168.2.0/24\n\n //\n\n // 192.168.1.0/24 + 192.168.2.0/24 + 192.168.0.0/16 = 192.168.0.0/16\n\n //\n\n // 192.168.0.0/16 + 192.168.3.0/24 = 192.168.0.0/16\n\n //\n\n // 192.168.0.0/16 + 192.171.0.0/16 = 192.168.0.0/16 + 192.171.0.0/16\n\n //\n\n // 192.168.0.0/16 + 192.171.0.0/16 + 192.170.0.0/16 = 192.168.0.0/16 + 192.170.0.0/15\n\n //\n\n // 192.168.0.0/16 + 192.170.0.0/15 + 192.169.0.0/16 = 192.168.0.0/15 + 192.170.0.0/15 = 192.168.0.0/14\n\n //\n\n // 192.168.0.0/14 + 192.167.0.0/16 = 192.168.0.0/14 + 192.167.0.0/16\n\n //\n\n // 192.168.0.0/14 + 192.167.0.0/16 + 192.166.0.0/16 = 192.168.0.0/14 + 192.166.0.0/15\n\n //\n\n // 192.168.0.0/14 + 192.166.0.0/15 + 192.164.0.0/15 = 192.168.0.0/14 + 192.164.0.0/14\n", "file_path": "tests/ipv4_cidr_combiner.rs", "rank": 65, "score": 30578.75217215867 }, { "content": "use crate::cidr::IpCidr;\n\nuse crate::utils::{IpCidrCombiner, Ipv4CidrSeparator, Ipv6CidrSeparator};\n\n\n\n/// To divide an IP CIDR into subnetworks.\n\n#[derive(Debug)]\n\npub struct IpCidrSeparator;\n\n\n\nimpl IpCidrSeparator {\n\n /// Evenly divide an IP CIDR into a specific number of subnetworks.\n\n pub fn divide_by(cidr: &IpCidr, n: usize) -> Option<Vec<IpCidrCombiner>> {\n\n match cidr {\n\n IpCidr::V4(cidr) => {\n\n Ipv4CidrSeparator::divide_by(cidr, n).map(|v| {\n\n v.into_iter()\n\n .map(|combiner| unsafe {\n\n IpCidrCombiner::from_cidr_vec_unchecked(\n\n combiner.into_ipv4_cidr_vec(),\n\n vec![],\n\n )\n\n })\n", "file_path": "src/utils/ip_cidr_separator.rs", "rank": 66, "score": 29602.18676823116 }, { "content": " .collect()\n\n })\n\n }\n\n IpCidr::V6(cidr) => {\n\n Ipv6CidrSeparator::divide_by(cidr, n).map(|v| {\n\n v.into_iter()\n\n .map(|combiner| unsafe {\n\n IpCidrCombiner::from_cidr_vec_unchecked(\n\n vec![],\n\n combiner.into_ipv6_cidr_vec(),\n\n )\n\n })\n\n .collect()\n\n })\n\n }\n\n }\n\n }\n\n\n\n /// Divide an IP CIDR into subnetworks with a specific bits.\n\n pub fn sub_networks(cidr: &IpCidr, bits: u8) -> Option<Vec<IpCidr>> {\n", "file_path": "src/utils/ip_cidr_separator.rs", "rank": 67, "score": 29599.51238522089 }, { "content": "use std::error::Error;\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\nuse super::{Ipv4CidrError, Ipv6CidrError};\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\n/// Possible errors of `IpCidr`.\n\npub enum IpCidrError {\n\n IncorrectBitsRange,\n\n IncorrectMask,\n\n IncorrectIpCIDRString,\n\n}\n\n\n\nimpl From<Ipv4CidrError> for IpCidrError {\n\n #[inline]\n\n fn from(error: Ipv4CidrError) -> IpCidrError {\n\n match error {\n\n Ipv4CidrError::IncorrectBitsRange => IpCidrError::IncorrectBitsRange,\n\n Ipv4CidrError::IncorrectMask => IpCidrError::IncorrectMask,\n\n Ipv4CidrError::IncorrectIpv4CIDRString => IpCidrError::IncorrectIpCIDRString,\n", "file_path": "src/cidr/ip_cidr_error.rs", "rank": 68, "score": 29595.146590086166 }, { "content": " f.write_str(\"The subnet size (bits) is out of range.\")\n\n }\n\n IpCidrError::IncorrectMask => f.write_str(\"The mask is incorrect.\"),\n\n IpCidrError::IncorrectIpCIDRString => f.write_str(\"The CIDR string is incorrect.\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for IpCidrError {}\n", "file_path": "src/cidr/ip_cidr_error.rs", "rank": 69, "score": 29594.5742477448 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<Ipv6CidrError> for IpCidrError {\n\n #[inline]\n\n fn from(error: Ipv6CidrError) -> IpCidrError {\n\n match error {\n\n Ipv6CidrError::IncorrectBitsRange => IpCidrError::IncorrectBitsRange,\n\n Ipv6CidrError::IncorrectMask => IpCidrError::IncorrectMask,\n\n Ipv6CidrError::IncorrectIpv6CIDRString => IpCidrError::IncorrectIpCIDRString,\n\n }\n\n }\n\n}\n\n\n\nimpl Display for IpCidrError {\n\n #[inline]\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {\n\n match self {\n\n IpCidrError::IncorrectBitsRange => {\n", "file_path": "src/cidr/ip_cidr_error.rs", "rank": 70, "score": 29594.318998498024 }, { "content": " match cidr {\n\n IpCidr::V4(cidr) => {\n\n Ipv4CidrSeparator::sub_networks(cidr, bits)\n\n .map(|v| v.into_iter().map(IpCidr::V4).collect())\n\n }\n\n IpCidr::V6(cidr) => {\n\n Ipv6CidrSeparator::sub_networks(cidr, bits)\n\n .map(|v| v.into_iter().map(IpCidr::V6).collect())\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/utils/ip_cidr_separator.rs", "rank": 71, "score": 29593.652843262334 }, { "content": "extern crate num_traits;\n\n\n\nuse std::fmt::{self, Display, Formatter, Write};\n\nuse std::ops::Deref;\n\n\n\nuse crate::cidr::{Ipv6Able, Ipv6Cidr};\n\nuse crate::num_bigint::BigUint;\n\n\n\nuse num_traits::Zero;\n\n\n\n/// To combine multiple IPv6 CIDRs to supernetworks.\n\n#[derive(Debug)]\n\npub struct Ipv6CidrCombiner {\n\n cidr_array: Vec<Ipv6Cidr>,\n\n}\n\n\n\nimpl Ipv6CidrCombiner {\n\n #[inline]\n\n /// Create a new `Ipv6CidrCombiner` instance.\n\n pub fn new() -> Ipv6CidrCombiner {\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 72, "score": 28475.768935795604 }, { "content": " }\n\n\n\n #[inline]\n\n /// Check an IPv6 whether it is in these CIDRs.\n\n pub fn contains<IP: Ipv6Able>(&self, ipv6: IP) -> bool {\n\n for cidr in self.cidr_array.iter() {\n\n if cidr.contains(&ipv6) {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n #[inline]\n\n pub fn size(&self) -> BigUint {\n\n let mut sum = BigUint::zero();\n\n\n\n for cidr in self.cidr_array.iter() {\n\n let size = cidr.size();\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 73, "score": 28474.189050059955 }, { "content": "use std::fmt::{self, Display, Formatter, Write};\n\nuse std::ops::Deref;\n\n\n\nuse crate::cidr::{Ipv4Able, Ipv4Cidr};\n\n\n\n/// To combine multiple IPv4 CIDRs to supernetworks.\n\n#[derive(Debug)]\n\npub struct Ipv4CidrCombiner {\n\n cidr_array: Vec<Ipv4Cidr>,\n\n}\n\n\n\nimpl Ipv4CidrCombiner {\n\n #[inline]\n\n /// Create a new `Ipv4CidrCombiner` instance.\n\n pub fn new() -> Ipv4CidrCombiner {\n\n Ipv4CidrCombiner {\n\n cidr_array: Vec::new(),\n\n }\n\n }\n\n\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 74, "score": 28472.951551606846 }, { "content": " for cidr in self.cidr_array.iter() {\n\n if cidr.contains(&ipv4) {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n #[inline]\n\n pub fn size(&self) -> u64 {\n\n let mut sum = 0;\n\n\n\n for cidr in self.cidr_array.iter() {\n\n sum += cidr.size();\n\n }\n\n\n\n sum\n\n }\n\n}\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 75, "score": 28472.015449803155 }, { "content": "\n\n sum += size;\n\n }\n\n\n\n sum\n\n }\n\n}\n\n\n\nimpl Display for Ipv6CidrCombiner {\n\n #[inline]\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n\n f.write_char('[')?;\n\n\n\n let length = self.cidr_array.len();\n\n\n\n if length > 0 {\n\n let length_dec = length - 1;\n\n\n\n for cidr in self.cidr_array.iter().take(length_dec) {\n\n f.write_fmt(format_args!(\"{}, \", cidr))?\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 76, "score": 28470.692088307893 }, { "content": "\n\n #[inline]\n\n pub fn into_ipv6_cidr_vec(self) -> Vec<Ipv6Cidr> {\n\n self.cidr_array\n\n }\n\n}\n\n\n\nimpl Ipv6CidrCombiner {\n\n /// Push a CIDR into this combiner.\n\n pub fn push(&mut self, mut cidr: Ipv6Cidr) {\n\n if let Err(mut index) = self.cidr_array.binary_search(&cidr) {\n\n if self.cidr_array.is_empty() {\n\n self.cidr_array.push(cidr);\n\n } else {\n\n let pushable = if index == 0 {\n\n true\n\n } else {\n\n let previous_cidr = self.cidr_array.get(index - 1).unwrap();\n\n\n\n !previous_cidr.contains(&cidr.first())\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 77, "score": 28470.07254551629 }, { "content": "}\n\n\n\nimpl Ipv4CidrCombiner {\n\n /// Push a CIDR into this combiner.\n\n pub fn push(&mut self, mut cidr: Ipv4Cidr) {\n\n if let Err(mut index) = self.cidr_array.binary_search(&cidr) {\n\n if self.cidr_array.is_empty() {\n\n self.cidr_array.push(cidr);\n\n } else {\n\n let pushable = if index == 0 {\n\n true\n\n } else {\n\n let previous_cidr = self.cidr_array.get(index - 1).unwrap();\n\n\n\n !previous_cidr.contains(&cidr.first())\n\n };\n\n\n\n if pushable {\n\n loop {\n\n if index == self.cidr_array.len() {\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 78, "score": 28469.562667783193 }, { "content": " #[inline]\n\n /// Create a new `Ipv4CidrCombiner` instance with a specific capacity.\n\n pub fn with_capacity(capacity: usize) -> Ipv4CidrCombiner {\n\n Ipv4CidrCombiner {\n\n cidr_array: Vec::with_capacity(capacity),\n\n }\n\n }\n\n\n\n #[allow(clippy::missing_safety_doc)]\n\n #[inline]\n\n pub unsafe fn from_ipv4_cidr_vec_unchecked(cidr_vec: Vec<Ipv4Cidr>) -> Ipv4CidrCombiner {\n\n Ipv4CidrCombiner {\n\n cidr_array: cidr_vec,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn into_ipv4_cidr_vec(self) -> Vec<Ipv4Cidr> {\n\n self.cidr_array\n\n }\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 79, "score": 28468.416578853205 }, { "content": " previous_prefix,\n\n previous_bits - 1,\n\n )\n\n .unwrap();\n\n\n\n merging = true;\n\n }\n\n }\n\n }\n\n }\n\n\n\n self.cidr_array.insert(index, cidr);\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n /// Check an IPv4 whether it is in these CIDRs.\n\n pub fn contains<IP: Ipv4Able>(&self, ipv4: IP) -> bool {\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 80, "score": 28468.37921344022 }, { "content": " Ipv6CidrCombiner {\n\n cidr_array: Vec::new(),\n\n }\n\n }\n\n\n\n #[inline]\n\n /// Create a new `Ipv6CidrCombiner` instance with a specific capacity.\n\n pub fn with_capacity(capacity: usize) -> Ipv6CidrCombiner {\n\n Ipv6CidrCombiner {\n\n cidr_array: Vec::with_capacity(capacity),\n\n }\n\n }\n\n\n\n #[allow(clippy::missing_safety_doc)]\n\n #[inline]\n\n pub unsafe fn from_ipv6_cidr_vec_unchecked(cidr_vec: Vec<Ipv6Cidr>) -> Ipv6CidrCombiner {\n\n Ipv6CidrCombiner {\n\n cidr_array: cidr_vec,\n\n }\n\n }\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 81, "score": 28468.26178378641 }, { "content": "\n\nimpl Display for Ipv4CidrCombiner {\n\n #[inline]\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n\n f.write_char('[')?;\n\n\n\n let length = self.cidr_array.len();\n\n\n\n if length > 0 {\n\n let length_dec = length - 1;\n\n\n\n for cidr in self.cidr_array.iter().take(length_dec) {\n\n f.write_fmt(format_args!(\"{}, \", cidr))?\n\n }\n\n\n\n f.write_fmt(format_args!(\"{}\", self.cidr_array[length_dec]))?;\n\n }\n\n\n\n f.write_char(']')\n\n }\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 82, "score": 28467.9287521821 }, { "content": "}\n\n\n\nimpl Deref for Ipv4CidrCombiner {\n\n type Target = Vec<Ipv4Cidr>;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Vec<Ipv4Cidr> {\n\n &self.cidr_array\n\n }\n\n}\n\n\n\nimpl Default for Ipv4CidrCombiner {\n\n #[inline]\n\n fn default() -> Self {\n\n Ipv4CidrCombiner::new()\n\n }\n\n}\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 83, "score": 28467.18296902771 }, { "content": " }\n\n\n\n f.write_fmt(format_args!(\"{}\", self.cidr_array[length_dec]))?;\n\n }\n\n\n\n f.write_char(']')\n\n }\n\n}\n\n\n\nimpl Deref for Ipv6CidrCombiner {\n\n type Target = Vec<Ipv6Cidr>;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Vec<Ipv6Cidr> {\n\n &self.cidr_array\n\n }\n\n}\n\n\n\nimpl Default for Ipv6CidrCombiner {\n\n #[inline]\n\n fn default() -> Self {\n\n Ipv6CidrCombiner::new()\n\n }\n\n}\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 84, "score": 28466.6569851068 }, { "content": " merging = true;\n\n }\n\n }\n\n }\n\n\n\n if index > 0 {\n\n let index_dec = index - 1;\n\n\n\n let previous_cidr = self.cidr_array.get_mut(index_dec).unwrap();\n\n\n\n let previous_bits = previous_cidr.get_bits();\n\n let bits = cidr.get_bits();\n\n\n\n if bits == previous_bits {\n\n let previous_prefix = previous_cidr.get_prefix();\n\n let prefix = cidr.get_prefix();\n\n\n\n let d = prefix ^ previous_prefix;\n\n\n\n if d == 1 << (128 - bits) as u128 {\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 85, "score": 28466.39252959785 }, { "content": " if index > 0 {\n\n let index_dec = index - 1;\n\n\n\n let previous_cidr = self.cidr_array.get_mut(index_dec).unwrap();\n\n\n\n let previous_bits = previous_cidr.get_bits();\n\n let bits = cidr.get_bits();\n\n\n\n if bits == previous_bits {\n\n let previous_prefix = previous_cidr.get_prefix();\n\n let prefix = cidr.get_prefix();\n\n\n\n let d = prefix ^ previous_prefix;\n\n\n\n if d == 1 << (32 - bits) as u32 {\n\n self.cidr_array.remove(index_dec);\n\n\n\n index = index_dec;\n\n\n\n cidr = Ipv4Cidr::from_prefix_and_bits(\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 86, "score": 28466.134453204915 }, { "content": " let next_bits = next_cidr.get_bits();\n\n let bits = cidr.get_bits();\n\n\n\n if bits == next_bits {\n\n let next_prefix = next_cidr.get_prefix();\n\n let prefix = cidr.get_prefix();\n\n\n\n let d = next_prefix ^ prefix;\n\n\n\n if d == 1 << (32 - bits) as u32 {\n\n cidr =\n\n Ipv4Cidr::from_prefix_and_bits(prefix, bits - 1).unwrap();\n\n\n\n self.cidr_array.remove(index);\n\n\n\n merging = true;\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 87, "score": 28464.703491415985 }, { "content": " merging = false;\n\n\n\n if index < self.cidr_array.len() {\n\n let next_cidr = self.cidr_array.get(index).unwrap();\n\n\n\n let next_bits = next_cidr.get_bits();\n\n let bits = cidr.get_bits();\n\n\n\n if bits == next_bits {\n\n let next_prefix = next_cidr.get_prefix();\n\n let prefix = cidr.get_prefix();\n\n\n\n let d = next_prefix ^ prefix;\n\n\n\n if d == 1 << (128 - bits) as u128 {\n\n cidr =\n\n Ipv6Cidr::from_prefix_and_bits(prefix, bits - 1).unwrap();\n\n\n\n self.cidr_array.remove(index);\n\n\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 88, "score": 28464.571067910576 }, { "content": " self.cidr_array.remove(index_dec);\n\n\n\n index = index_dec;\n\n\n\n cidr = Ipv6Cidr::from_prefix_and_bits(\n\n previous_prefix,\n\n previous_bits - 1,\n\n )\n\n .unwrap();\n\n\n\n merging = true;\n\n }\n\n }\n\n }\n\n }\n\n\n\n self.cidr_array.insert(index, cidr);\n\n }\n\n }\n\n }\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 89, "score": 28463.70474407701 }, { "content": " };\n\n\n\n if pushable {\n\n loop {\n\n if index == self.cidr_array.len() {\n\n break;\n\n }\n\n\n\n let next = self.cidr_array.get(index).unwrap();\n\n\n\n if cidr.contains(next.first()) {\n\n self.cidr_array.remove(index);\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n let mut merging = true;\n\n\n\n while merging {\n", "file_path": "src/utils/v6/ipv6_cidr_combiner.rs", "rank": 90, "score": 28462.7572504126 }, { "content": " break;\n\n }\n\n\n\n let next = self.cidr_array.get(index).unwrap();\n\n\n\n if cidr.contains(next.first()) {\n\n self.cidr_array.remove(index);\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n let mut merging = true;\n\n\n\n while merging {\n\n merging = false;\n\n\n\n if index < self.cidr_array.len() {\n\n let next_cidr = self.cidr_array.get(index).unwrap();\n\n\n", "file_path": "src/utils/v4/ipv4_cidr_combiner.rs", "rank": 91, "score": 28462.53948730523 }, { "content": " #[inline]\n\n fn next_back(&mut self) -> Option<Self::Item> {\n\n if self.next < self.back {\n\n Some(unsafe { self.next_back_unchecked() })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n #[inline]\n\n fn nth_back(&mut self, n: usize) -> Option<Self::Item> {\n\n self.nth_back_big_uint(BigUint::from(n))\n\n }\n\n}\n\n\n\nimpl Ipv6Cidr {\n\n #[inline]\n\n pub fn iter_as_u8_array(&self) -> Ipv6CidrU8ArrayIterator {\n\n let from = self.first();\n\n let size = self.size();\n", "file_path": "src/cidr/v6/ipv6_cidr_iterators.rs", "rank": 92, "score": 28278.355703319357 }, { "content": "impl DoubleEndedIterator for Ipv4CidrU8ArrayIterator {\n\n #[inline]\n\n fn next_back(&mut self) -> Option<Self::Item> {\n\n if self.next < self.back {\n\n Some(unsafe { self.next_back_unchecked() })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n #[inline]\n\n fn nth_back(&mut self, n: usize) -> Option<Self::Item> {\n\n self.nth_back_u64(n as u64)\n\n }\n\n}\n\n\n\nimpl Ipv4Cidr {\n\n #[inline]\n\n pub fn iter_as_u8_array(&self) -> Ipv4CidrU8ArrayIterator {\n\n let from = self.first();\n", "file_path": "src/cidr/v4/ipv4_cidr_iterators.rs", "rank": 93, "score": 28276.834505731735 }, { "content": " fn next_back(&mut self) -> Option<Self::Item> {\n\n if self.next < self.back {\n\n Some(unsafe { self.next_back_unchecked() })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n #[inline]\n\n fn nth_back(&mut self, n: usize) -> Option<Self::Item> {\n\n self.nth_back_big_uint(BigUint::from(n))\n\n }\n\n}\n\n\n\nimpl Ipv6Cidr {\n\n #[inline]\n\n pub fn iter_as_u16_array(&self) -> Ipv6CidrU16ArrayIterator {\n\n let from = self.first();\n\n let size = self.size();\n\n\n", "file_path": "src/cidr/v6/ipv6_cidr_iterators.rs", "rank": 94, "score": 28275.766188584334 }, { "content": " fn nth(&mut self, n: usize) -> Option<Self::Item> {\n\n self.iter.nth(n).map(u32::from_be_bytes)\n\n }\n\n}\n\n\n\nimpl DoubleEndedIterator for Ipv4CidrIterator {\n\n #[inline]\n\n fn next_back(&mut self) -> Option<Self::Item> {\n\n self.iter.next_back().map(u32::from_be_bytes)\n\n }\n\n\n\n #[inline]\n\n fn nth_back(&mut self, n: usize) -> Option<Self::Item> {\n\n self.iter.nth_back(n).map(u32::from_be_bytes)\n\n }\n\n}\n\n\n\nimpl Ipv4Cidr {\n\n #[inline]\n\n pub fn iter(&self) -> Ipv4CidrIterator {\n", "file_path": "src/cidr/v4/ipv4_cidr_iterators.rs", "rank": 95, "score": 28274.005266273925 }, { "content": " } else {\n\n self.next = self.size;\n\n\n\n None\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn nth_back_u64(&mut self, n: u64) -> Option<[u8; 4]> {\n\n if self.back > n {\n\n self.back -= n;\n\n\n\n if self.next < self.back {\n\n return Some(unsafe { self.next_back_unchecked() });\n\n }\n\n }\n\n\n\n self.next = self.size;\n\n\n\n None\n", "file_path": "src/cidr/v4/ipv4_cidr_iterators.rs", "rank": 96, "score": 28273.745983592755 }, { "content": "use std::net::Ipv4Addr;\n\n\n\nuse super::Ipv4Cidr;\n\n\n\n// TODO: Ipv4CidrU8ArrayIterator\n\n\n\n/// To iterate IPv4 CIDRs.\n\n#[derive(Debug)]\n\npub struct Ipv4CidrU8ArrayIterator {\n\n from: u32,\n\n next: u64,\n\n back: u64,\n\n size: u64,\n\n}\n\n\n\nimpl Ipv4CidrU8ArrayIterator {\n\n #[inline]\n\n unsafe fn next_unchecked(&mut self) -> [u8; 4] {\n\n let p = self.from + self.next as u32;\n\n\n", "file_path": "src/cidr/v4/ipv4_cidr_iterators.rs", "rank": 97, "score": 28273.724333332826 }, { "content": " }\n\n\n\n #[inline]\n\n fn last(mut self) -> Option<Self::Item> {\n\n if self.next < self.back {\n\n self.next = self.back.clone() - BigUint::one();\n\n\n\n Some(unsafe { self.next_unchecked() })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n #[inline]\n\n fn nth(&mut self, n: usize) -> Option<Self::Item> {\n\n self.nth_big_uint(BigUint::from(n))\n\n }\n\n}\n\n\n\nimpl DoubleEndedIterator for Ipv6CidrU8ArrayIterator {\n", "file_path": "src/cidr/v6/ipv6_cidr_iterators.rs", "rank": 98, "score": 28273.694905318967 }, { "content": "extern crate num_traits;\n\n\n\nuse std::net::Ipv6Addr;\n\n\n\nuse crate::num_bigint::BigUint;\n\n\n\nuse num_traits::{One, ToPrimitive, Zero};\n\n\n\nuse super::functions::*;\n\nuse super::Ipv6Cidr;\n\n\n\n// TODO: Ipv6CidrU8ArrayIterator\n\n\n\n/// To iterate IPv6 CIDRs.\n\n#[derive(Debug)]\n\npub struct Ipv6CidrU8ArrayIterator {\n\n from: u128,\n\n next: BigUint,\n\n back: BigUint,\n\n size: BigUint,\n", "file_path": "src/cidr/v6/ipv6_cidr_iterators.rs", "rank": 99, "score": 28273.671084549558 } ]
Rust
crates/bench-api/src/lib.rs
dheaton-arm/wasmtime
86611d3bbc92b781ed136dcda7cdba9ec2c1cbee
mod unsafe_send_sync; use crate::unsafe_send_sync::UnsafeSendSync; use anyhow::{anyhow, Context, Result}; use std::os::raw::{c_int, c_void}; use std::slice; use std::{env, path::PathBuf}; use wasmtime::{Config, Engine, Instance, Linker, Module, Store}; use wasmtime_wasi::{sync::WasiCtxBuilder, WasiCtx}; pub type ExitCode = c_int; pub const OK: ExitCode = 0; pub const ERR: ExitCode = -1; #[cfg(feature = "shuffling-allocator")] #[global_allocator] static ALLOC: shuffling_allocator::ShufflingAllocator<std::alloc::System> = shuffling_allocator::wrap!(&std::alloc::System); #[repr(C)] pub struct WasmBenchConfig { pub working_dir_ptr: *const u8, pub working_dir_len: usize, pub stdout_path_ptr: *const u8, pub stdout_path_len: usize, pub stderr_path_ptr: *const u8, pub stderr_path_len: usize, pub stdin_path_ptr: *const u8, pub stdin_path_len: usize, pub compilation_timer: *mut u8, pub compilation_start: extern "C" fn(*mut u8), pub compilation_end: extern "C" fn(*mut u8), pub instantiation_timer: *mut u8, pub instantiation_start: extern "C" fn(*mut u8), pub instantiation_end: extern "C" fn(*mut u8), pub execution_timer: *mut u8, pub execution_start: extern "C" fn(*mut u8), pub execution_end: extern "C" fn(*mut u8), } impl WasmBenchConfig { fn working_dir(&self) -> Result<PathBuf> { let working_dir = unsafe { std::slice::from_raw_parts(self.working_dir_ptr, self.working_dir_len) }; let working_dir = std::str::from_utf8(working_dir) .context("given working directory is not valid UTF-8")?; Ok(working_dir.into()) } fn stdout_path(&self) -> Result<PathBuf> { let stdout_path = unsafe { std::slice::from_raw_parts(self.stdout_path_ptr, self.stdout_path_len) }; let stdout_path = std::str::from_utf8(stdout_path).context("given stdout path is not valid UTF-8")?; Ok(stdout_path.into()) } fn stderr_path(&self) -> Result<PathBuf> { let stderr_path = unsafe { std::slice::from_raw_parts(self.stderr_path_ptr, self.stderr_path_len) }; let stderr_path = std::str::from_utf8(stderr_path).context("given stderr path is not valid UTF-8")?; Ok(stderr_path.into()) } fn stdin_path(&self) -> Result<Option<PathBuf>> { if self.stdin_path_ptr.is_null() { return Ok(None); } let stdin_path = unsafe { std::slice::from_raw_parts(self.stdin_path_ptr, self.stdin_path_len) }; let stdin_path = std::str::from_utf8(stdin_path).context("given stdin path is not valid UTF-8")?; Ok(Some(stdin_path.into())) } } #[no_mangle] pub extern "C" fn wasm_bench_create( config: WasmBenchConfig, out_bench_ptr: *mut *mut c_void, ) -> ExitCode { let result = (|| -> Result<_> { let working_dir = config.working_dir()?; let working_dir = cap_std::fs::Dir::open_ambient_dir(&working_dir, cap_std::ambient_authority()) .with_context(|| { format!( "failed to preopen the working directory: {}", working_dir.display(), ) })?; let stdout_path = config.stdout_path()?; let stderr_path = config.stderr_path()?; let stdin_path = config.stdin_path()?; let state = Box::new(BenchState::new( config.compilation_timer, config.compilation_start, config.compilation_end, config.instantiation_timer, config.instantiation_start, config.instantiation_end, config.execution_timer, config.execution_start, config.execution_end, move || { let mut cx = WasiCtxBuilder::new(); let stdout = std::fs::File::create(&stdout_path) .with_context(|| format!("failed to create {}", stdout_path.display()))?; let stdout = cap_std::fs::File::from_std(stdout, cap_std::ambient_authority()); let stdout = wasi_cap_std_sync::file::File::from_cap_std(stdout); cx = cx.stdout(Box::new(stdout)); let stderr = std::fs::File::create(&stderr_path) .with_context(|| format!("failed to create {}", stderr_path.display()))?; let stderr = cap_std::fs::File::from_std(stderr, cap_std::ambient_authority()); let stderr = wasi_cap_std_sync::file::File::from_cap_std(stderr); cx = cx.stderr(Box::new(stderr)); if let Some(stdin_path) = &stdin_path { let stdin = std::fs::File::open(stdin_path) .with_context(|| format!("failed to open {}", stdin_path.display()))?; let stdin = cap_std::fs::File::from_std(stdin, cap_std::ambient_authority()); let stdin = wasi_cap_std_sync::file::File::from_cap_std(stdin); cx = cx.stdin(Box::new(stdin)); } cx = cx.preopened_dir(working_dir.try_clone()?, ".")?; if let Ok(val) = env::var("WASM_BENCH_USE_SMALL_WORKLOAD") { cx = cx.env("WASM_BENCH_USE_SMALL_WORKLOAD", &val)?; } Ok(cx.build()) }, )?); Ok(Box::into_raw(state) as _) })(); if let Ok(bench_ptr) = result { unsafe { assert!(!out_bench_ptr.is_null()); *out_bench_ptr = bench_ptr; } } to_exit_code(result.map(|_| ())) } #[no_mangle] pub extern "C" fn wasm_bench_free(state: *mut c_void) { assert!(!state.is_null()); unsafe { Box::from_raw(state as *mut BenchState); } } #[no_mangle] pub extern "C" fn wasm_bench_compile( state: *mut c_void, wasm_bytes: *const u8, wasm_bytes_length: usize, ) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let wasm_bytes = unsafe { slice::from_raw_parts(wasm_bytes, wasm_bytes_length) }; let result = state.compile(wasm_bytes).context("failed to compile"); to_exit_code(result) } #[no_mangle] pub extern "C" fn wasm_bench_instantiate(state: *mut c_void) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let result = state.instantiate().context("failed to instantiate"); to_exit_code(result) } #[no_mangle] pub extern "C" fn wasm_bench_execute(state: *mut c_void) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let result = state.execute().context("failed to execute"); to_exit_code(result) } fn to_exit_code<T>(result: impl Into<Result<T>>) -> ExitCode { match result.into() { Ok(_) => OK, Err(error) => { eprintln!("{:?}", error); ERR } } } struct BenchState { linker: Linker<HostState>, compilation_timer: *mut u8, compilation_start: extern "C" fn(*mut u8), compilation_end: extern "C" fn(*mut u8), instantiation_timer: *mut u8, instantiation_start: extern "C" fn(*mut u8), instantiation_end: extern "C" fn(*mut u8), make_wasi_cx: Box<dyn FnMut() -> Result<WasiCtx>>, module: Option<Module>, store_and_instance: Option<(Store<HostState>, Instance)>, } struct HostState { wasi: WasiCtx, #[cfg(feature = "wasi-nn")] wasi_nn: wasmtime_wasi_nn::WasiNnCtx, #[cfg(feature = "wasi-crypto")] wasi_crypto: wasmtime_wasi_crypto::WasiCryptoCtx, } impl BenchState { fn new( compilation_timer: *mut u8, compilation_start: extern "C" fn(*mut u8), compilation_end: extern "C" fn(*mut u8), instantiation_timer: *mut u8, instantiation_start: extern "C" fn(*mut u8), instantiation_end: extern "C" fn(*mut u8), execution_timer: *mut u8, execution_start: extern "C" fn(*mut u8), execution_end: extern "C" fn(*mut u8), make_wasi_cx: impl FnMut() -> Result<WasiCtx> + 'static, ) -> Result<Self> { let mut config = Config::new(); config.wasm_simd(true); let engine = Engine::new(&config)?; let mut linker = Linker::<HostState>::new(&engine); let execution_timer = unsafe { UnsafeSendSync::new(execution_timer) }; linker.func_wrap("bench", "start", move || { execution_start(*execution_timer.get()); Ok(()) })?; linker.func_wrap("bench", "end", move || { execution_end(*execution_timer.get()); Ok(()) })?; wasmtime_wasi::add_to_linker(&mut linker, |cx| &mut cx.wasi)?; #[cfg(feature = "wasi-nn")] wasmtime_wasi_nn::add_to_linker(&mut linker, |cx| &mut cx.wasi_nn)?; #[cfg(feature = "wasi-crypto")] wasmtime_wasi_crypto::add_to_linker(&mut linker, |cx| &mut cx.wasi_crypto)?; Ok(Self { linker, compilation_timer, compilation_start, compilation_end, instantiation_timer, instantiation_start, instantiation_end, make_wasi_cx: Box::new(make_wasi_cx) as _, module: None, store_and_instance: None, }) } fn compile(&mut self, bytes: &[u8]) -> Result<()> { assert!( self.module.is_none(), "create a new engine to repeat compilation" ); (self.compilation_start)(self.compilation_timer); let module = Module::from_binary(self.linker.engine(), bytes)?; (self.compilation_end)(self.compilation_timer); self.module = Some(module); Ok(()) } fn instantiate(&mut self) -> Result<()> { let module = self .module .as_ref() .expect("compile the module before instantiating it"); let host = HostState { wasi: (self.make_wasi_cx)().context("failed to create a WASI context")?, #[cfg(feature = "wasi-nn")] wasi_nn: wasmtime_wasi_nn::WasiNnCtx::new()?, #[cfg(feature = "wasi-crypto")] wasi_crypto: wasmtime_wasi_nn::WasiCryptoCtx::new(), }; (self.instantiation_start)(self.instantiation_timer); let mut store = Store::new(self.linker.engine(), host); let instance = self.linker.instantiate(&mut store, &module)?; (self.instantiation_end)(self.instantiation_timer); self.store_and_instance = Some((store, instance)); Ok(()) } fn execute(&mut self) -> Result<()> { let (mut store, instance) = self .store_and_instance .take() .expect("instantiate the module before executing it"); let start_func = instance.get_typed_func::<(), (), _>(&mut store, "_start")?; match start_func.call(&mut store, ()) { Ok(_) => Ok(()), Err(trap) => { match trap.i32_exit_status() { Some(0) => Ok(()), Some(n) => Err(anyhow!("_start exited with a non-zero code: {}", n)), None => Err(anyhow!( "executing the benchmark resulted in a trap: {}", trap )), } } } } }
mod unsafe_send_sync; use crate::unsafe_send_sync::UnsafeSendSync; use anyhow::{anyhow, Context, Result}; use std::os::raw::{c_int, c_void}; use std::slice; use std::{env, path::PathBuf}; use wasmtime::{Config, Engine, Instance, Linker, Module, Store}; use wasmtime_wasi::{sync::WasiCtxBuilder, WasiCtx}; pub type ExitCode = c_int; pub const OK: ExitCode = 0; pub const ERR: ExitCode = -1; #[cfg(feature = "shuffling-allocator")] #[global_allocator] static ALLOC: shuffling_allocator::ShufflingAllocator<std::alloc::System> = shuffling_allocator::wrap!(&std::alloc::System); #[repr(C)] pub struct WasmBenchConfig { pub working_dir_ptr: *const u8, pub working_dir_len: usize, pub stdout_path_ptr: *const u8, pub stdout_path_len: usize, pub stderr_path_ptr: *const u8, pub stderr_path_len: usize, pub stdin_path_ptr: *const u8, pub stdin_path_len: usize, pub compilation_timer: *mut u8, pub compilation_start: extern "C" fn(*mut u8), pub compilation_end: extern "C" fn(*mut u8), pub instantiation_timer: *mut u8, pub instantiation_start: extern "C" fn(*mut u8), pub instantiation_end: extern "C" fn(*mut u8), pub execution_timer: *mut u8, pub execution_start: extern "C" fn(*mut u8), pub execution_end: extern "C" fn(*mut u8), } impl WasmBenchConfig { fn working_dir(&self) -> Result<PathBuf> { let working_dir = unsafe { std::slice::from_raw_parts(self.working_dir_ptr, self.working_dir_len) }; let working_dir = std::str::from_utf8(working_dir) .context("given working directory is not valid UTF-8")?; Ok(working_dir.into()) } fn stdout_path(&self) -> Result<PathBuf> { let stdout_path = unsafe { std::slice::from_raw_parts(self.stdout_path_ptr, self.stdout_path_len) }; let stdout_path = std::str::from_utf8(stdout_path).context("given stdout path is not valid UTF-8")?; Ok(stdout_path.into()) } fn stderr_path(&self) -> Result<PathBuf> { let stderr_path = unsafe { std::slice::from_raw_parts(self.stderr_path_ptr, self.stderr_path_len) }; let stderr_path = std::str::from_utf8(stderr_path).context("given stderr path is not valid UTF-8")?; Ok(stderr_path.into()) } fn stdin_path(&self) -> Result<Option<PathBuf>> { if self.stdin_path_ptr.is_null() { return Ok(None); } let stdin_path = unsafe { std::slice::from_raw_parts(self.stdin_path_ptr, self.stdin_path_len) }; let stdin_path = std::str::from_utf8(stdin_path).context("given stdin path is not valid UTF-8")?; Ok(Some(stdin_path.into())) } } #[no_mangle] pub extern "C" fn wasm_bench_create( config: WasmBenchConfig, out_bench_ptr: *mut *mut c_void, ) -> ExitCode { let result = (|| -> Result<_> { let working_dir = config.working_dir()?; let working_dir = cap_std::fs::Dir::open_ambient_dir(&working_dir, cap_std::ambient_authority()) .with_context(|| { format!( "failed to preopen the working directory: {}", working_dir.display(), ) })?; let stdout_path = config.stdout_path()?; let stderr_path = config.stderr_path()?; let stdin_path = config.stdin_path()?; let state = Box::new(BenchState::new( config.compilation_timer, config.compilation_start, config.compilation_end, config.instantiation_timer, config.instantiation_start, config.instantiation_end, config.execution_timer, config.execution_start, config.execution_end, move || { let mut cx = WasiCtxBuilder::new(); let stdout = std::fs::File::create(&stdout_path) .with_context(|| format!("failed to create {}", stdout_path.display()))?; let stdout = cap_std::fs::File::from_std(stdout, cap_std::ambient_authority()); let stdout = wasi_cap_std_sync::file::File::from_cap_std(stdout); cx = cx.stdout(Box::new(stdout)); let stderr = std::fs::File::create(&stderr_path) .with_context(|| format!("failed to create {}", stderr_path.display()))?; let stderr = cap_std::fs::File::from_std(stderr, cap_std::ambient_authority()); let stderr = wasi_cap_std_sync::file::File::from_cap_std(stderr); cx = cx.stderr(Box::new(stderr)); if let Some(stdin_path) = &stdin_path { let stdin = std::fs::File::open(stdin_path) .with_context(|| format!("failed to open {}", stdin_path.display()))?; let stdin = cap_std::fs::File::from_std(stdin, cap_std::ambient_authority()); let stdin = wasi_cap_std_sync::file::File::from_cap_std(stdin); cx = cx.stdin(Box::new(stdin)); } cx = cx.preopened_dir(working_dir.try_clone()?, ".")?; if let Ok(val) = env::var("WASM_BENCH_USE_SMALL_WORKLOAD") { cx = cx.env("WASM_BENCH_USE_SMALL_WORKLOAD", &val)?; } Ok(cx.build()) }, )?); Ok(Box::into_raw(state) as _) })(); if let Ok(bench_ptr) = result { unsafe { assert!(!out_bench_ptr.is_null()); *out_bench_ptr = bench_ptr; } } to_exit_code(result.map(|_| ())) } #[no_mangle] pub extern "C" fn wasm_bench_free(state: *mut c_void) { assert!(!state.is_null()); unsafe { Box::from_raw(state as *mut BenchState); } } #[no_mangle] pub extern "C" fn wasm_bench_compile( state: *mut c_void, wasm_bytes: *const u8, wasm_bytes_length: usize, ) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let wasm_bytes = unsafe { slice::from_raw_parts(wasm_bytes, wasm_bytes_length) }; let result = state.compile(wasm_bytes).context("failed to compile"); to_exit_code(result) } #[no_mangle] pub extern "C" fn wasm_bench_instantiate(state: *mut c_void) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let result = state.instantiate().context("failed to instantiate"); to_exit_code(result) } #[no_mangle] pub extern "C" fn wasm_bench_execute(state: *mut c_void) -> ExitCode { let state = unsafe { (state as *mut BenchState).as_mut().unwrap() }; let result = state.execute().context("failed to execute"); to_exit_code(result) } fn to_exit_code<T>(result: impl Into<Result<T>>) -> ExitCode { match result.into() { Ok(_) => OK, Err(error) => { eprintln!("{:?}", error); ERR } } } struct BenchState { linker: Linker<HostState>, compilation_timer: *mut u8, compilation_start: extern "C" fn(*mut u8), compilation_end: extern "C" fn(*mut u8), instantiation_timer: *mut u8, instantiation_start: extern "C" fn(*mut u8), instantiation_end: extern "C" fn(*mut u8), make_wasi_cx: Box<dyn FnMut() -> Result<WasiCtx>>, module: Option<Module>, store_and_instance: Option<(Store<HostState>, Instance)>, } struct HostState { wasi: WasiCtx, #[cfg(feature = "wasi-nn")] wasi_nn: wasmtime_wasi_nn::WasiNnCtx, #[cfg(feature = "wasi-crypto")] wasi_crypto: wasmtime_wasi_crypto::WasiCryptoCtx, } impl BenchState { fn new( compilation_timer: *mut u8, compilation_start: extern "C" fn(*mut u8), compilation_end: extern "C" fn(*mut u8), instantiation_timer: *mut u8, instantiation_start: extern "C" fn(*mut u8), instantiation_end: extern "C" fn(*mut u8), execution_timer: *mut u8, execution_start: extern "C" fn(*mut u8), execution_end: extern "C" fn(*mut u8), make_wasi_cx: impl FnMut() -> Result<WasiCtx> + 'static, ) -> Result<Self> { let mut config = Config::new(); config.wasm_simd(true); let engine = Engine::new(&config)?; let mut linker = Linker::<HostState>::new(&engine); let execution_timer = unsafe { UnsafeSendSync::new(execution_timer) }; linker.func_wrap("bench", "start", move || { execution_start(*execution_timer.get()); Ok(()) })?; linker.func_wrap("bench", "end", move || { execution_end(*execution_timer.get()); Ok(()) })?; wasmtime_wasi::add_to_linker(&mut linker, |cx| &mut cx.wasi)?; #[cfg(feature = "wasi-nn")] wasmtime_wasi_nn::add_to_linker(&mut linker, |cx| &mut cx.wasi_nn)?; #[cfg(feature = "wasi-crypto")] wasmtime_wasi_crypto::add_to_linker(&mut linker, |cx| &mut cx.wasi_crypto)?; Ok(Self { linker, compilation_timer, compilation_start, compilation_end, instantiation_timer, instantiation_start, instantiation_end, make_wasi_cx: Box::new(make_wasi_cx) as _, module: None, store_and_instance: None, }) } fn compile(&mut self, bytes: &[u8]) -> Result
antiate(&mut self) -> Result<()> { let module = self .module .as_ref() .expect("compile the module before instantiating it"); let host = HostState { wasi: (self.make_wasi_cx)().context("failed to create a WASI context")?, #[cfg(feature = "wasi-nn")] wasi_nn: wasmtime_wasi_nn::WasiNnCtx::new()?, #[cfg(feature = "wasi-crypto")] wasi_crypto: wasmtime_wasi_nn::WasiCryptoCtx::new(), }; (self.instantiation_start)(self.instantiation_timer); let mut store = Store::new(self.linker.engine(), host); let instance = self.linker.instantiate(&mut store, &module)?; (self.instantiation_end)(self.instantiation_timer); self.store_and_instance = Some((store, instance)); Ok(()) } fn execute(&mut self) -> Result<()> { let (mut store, instance) = self .store_and_instance .take() .expect("instantiate the module before executing it"); let start_func = instance.get_typed_func::<(), (), _>(&mut store, "_start")?; match start_func.call(&mut store, ()) { Ok(_) => Ok(()), Err(trap) => { match trap.i32_exit_status() { Some(0) => Ok(()), Some(n) => Err(anyhow!("_start exited with a non-zero code: {}", n)), None => Err(anyhow!( "executing the benchmark resulted in a trap: {}", trap )), } } } } }
<()> { assert!( self.module.is_none(), "create a new engine to repeat compilation" ); (self.compilation_start)(self.compilation_timer); let module = Module::from_binary(self.linker.engine(), bytes)?; (self.compilation_end)(self.compilation_timer); self.module = Some(module); Ok(()) } fn inst
random
[ { "content": "pub fn create_global(store: &mut StoreOpaque, gt: &GlobalType, val: Val) -> Result<InstanceId> {\n\n let mut module = Module::new();\n\n let mut func_imports = Vec::new();\n\n let mut externref_init = None;\n\n let mut shared_signature_id = None;\n\n\n\n let global = Global {\n\n wasm_ty: gt.content().to_wasm_type(),\n\n mutability: match gt.mutability() {\n\n Mutability::Const => false,\n\n Mutability::Var => true,\n\n },\n\n initializer: match val {\n\n Val::I32(i) => GlobalInit::I32Const(i),\n\n Val::I64(i) => GlobalInit::I64Const(i),\n\n Val::F32(f) => GlobalInit::F32Const(f),\n\n Val::F64(f) => GlobalInit::F64Const(f),\n\n Val::V128(i) => GlobalInit::V128Const(i.into()),\n\n Val::ExternRef(None) | Val::FuncRef(None) => GlobalInit::RefNullConst,\n\n Val::ExternRef(Some(x)) => {\n", "file_path": "crates/wasmtime/src/trampoline/global.rs", "rank": 0, "score": 588419.6553816786 }, { "content": "fn relocate_dwarf_sections(bytes: &mut [u8], code_region: (*const u8, usize)) -> Result<(), Error> {\n\n let mut relocations = Vec::new();\n\n let obj = File::parse(&bytes[..])?;\n\n for section in obj.sections() {\n\n let section_start = match section.file_range() {\n\n Some((start, _)) => start,\n\n None => continue,\n\n };\n\n for (off, r) in section.relocations() {\n\n if r.kind() != RelocationKind::Absolute\n\n || r.encoding() != RelocationEncoding::Generic\n\n || r.size() != 64\n\n {\n\n continue;\n\n }\n\n\n\n let sym = match r.target() {\n\n RelocationTarget::Symbol(index) => match obj.symbol_by_index(index) {\n\n Ok(sym) => sym,\n\n Err(_) => continue,\n", "file_path": "crates/jit/src/debug.rs", "rank": 1, "score": 560134.0788733837 }, { "content": "pub fn commit(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // Memory needs to be committed, so don't use the `region` crate\n\n if unsafe { VirtualAlloc(addr as _, len, MEM_COMMIT, PAGE_READWRITE).is_null() } {\n\n bail!(\"failed to commit memory as read/write\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 2, "score": 527253.7852975669 }, { "content": "pub fn decommit(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n if unsafe { VirtualFree(addr as _, len, MEM_DECOMMIT) } == 0 {\n\n bail!(\n\n \"failed to decommit memory pages: {}\",\n\n std::io::Error::last_os_error()\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 3, "score": 527253.7852975669 }, { "content": "/// Construct a dummy `Extern` from its type signature\n\npub fn dummy_extern<T>(store: &mut Store<T>, ty: ExternType) -> Result<Extern> {\n\n Ok(match ty {\n\n ExternType::Func(func_ty) => Extern::Func(dummy_func(store, func_ty)),\n\n ExternType::Global(global_ty) => Extern::Global(dummy_global(store, global_ty)),\n\n ExternType::Table(table_ty) => Extern::Table(dummy_table(store, table_ty)?),\n\n ExternType::Memory(mem_ty) => Extern::Memory(dummy_memory(store, mem_ty)?),\n\n ExternType::Instance(instance_ty) => Extern::Instance(dummy_instance(store, instance_ty)?),\n\n ExternType::Module(module_ty) => Extern::Module(dummy_module(store.engine(), module_ty)),\n\n })\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 4, "score": 525966.1006321936 }, { "content": "fn typecheck_externs(store: &mut StoreOpaque, module: &Module, imports: &[Extern]) -> Result<()> {\n\n for import in imports {\n\n if !import.comes_from_same_store(store) {\n\n bail!(\"cross-`Store` instantiation is not currently supported\");\n\n }\n\n }\n\n typecheck(store, module, imports, |cx, ty, item| cx.extern_(ty, item))\n\n}\n\n\n", "file_path": "crates/wasmtime/src/instance.rs", "rank": 5, "score": 523816.38121975225 }, { "content": "/// Create a set of dummy functions/globals/etc for the given imports.\n\npub fn dummy_linker<'module, T>(store: &mut Store<T>, module: &Module) -> Result<Linker<T>> {\n\n let mut linker = Linker::new(store.engine());\n\n linker.allow_shadowing(true);\n\n for import in module.imports() {\n\n match import.name() {\n\n Some(name) => {\n\n linker\n\n .define(import.module(), name, dummy_extern(store, import.ty())?)\n\n .unwrap();\n\n }\n\n None => match import.ty() {\n\n ExternType::Instance(ty) => {\n\n for ty in ty.exports() {\n\n linker\n\n .define(import.module(), ty.name(), dummy_extern(store, ty.ty())?)\n\n .unwrap();\n\n }\n\n }\n\n other => {\n\n linker\n\n .define_name(import.module(), dummy_extern(store, other)?)\n\n .unwrap();\n\n }\n\n },\n\n }\n\n }\n\n Ok(linker)\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 6, "score": 523241.7998362334 }, { "content": "/// Return an instance implementing the \"spectest\" interface used in the\n\n/// spec testsuite.\n\npub fn link_spectest<T>(linker: &mut Linker<T>, store: &mut Store<T>) -> Result<()> {\n\n linker.func_wrap(\"spectest\", \"print\", || {})?;\n\n linker.func_wrap(\"spectest\", \"print_i32\", |val: i32| println!(\"{}: i32\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_i64\", |val: i64| println!(\"{}: i64\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_f32\", |val: f32| println!(\"{}: f32\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_f64\", |val: f64| println!(\"{}: f64\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_i32_f32\", |i: i32, f: f32| {\n\n println!(\"{}: i32\", i);\n\n println!(\"{}: f32\", f);\n\n })?;\n\n linker.func_wrap(\"spectest\", \"print_f64_f64\", |f1: f64, f2: f64| {\n\n println!(\"{}: f64\", f1);\n\n println!(\"{}: f64\", f2);\n\n })?;\n\n\n\n let ty = GlobalType::new(ValType::I32, Mutability::Const);\n\n let g = Global::new(&mut *store, ty, Val::I32(666))?;\n\n linker.define(\"spectest\", \"global_i32\", g)?;\n\n\n\n let ty = GlobalType::new(ValType::I64, Mutability::Const);\n", "file_path": "crates/wast/src/spectest.rs", "rank": 7, "score": 514922.3785165342 }, { "content": "pub fn create_table(store: &mut StoreOpaque, table: &TableType) -> Result<InstanceId> {\n\n let mut module = Module::new();\n\n let table_plan = wasmtime_environ::TablePlan::for_table(\n\n table.wasmtime_table().clone(),\n\n &store.engine().config().tunables,\n\n );\n\n let table_id = module.table_plans.push(table_plan);\n\n // TODO: can this `exports.insert` get removed?\n\n module\n\n .exports\n\n .insert(String::new(), EntityIndex::Table(table_id));\n\n\n\n create_handle(module, store, Box::new(()), &[], None)\n\n}\n", "file_path": "crates/wasmtime/src/trampoline/table.rs", "rank": 8, "score": 513860.06614956155 }, { "content": "pub fn create_memory(store: &mut StoreOpaque, memory: &MemoryType) -> Result<InstanceId> {\n\n let mut module = Module::new();\n\n\n\n let memory_plan = wasmtime_environ::MemoryPlan::for_memory(\n\n memory.wasmtime_memory().clone(),\n\n &store.engine().config().tunables,\n\n );\n\n let memory_id = module.memory_plans.push(memory_plan);\n\n module\n\n .exports\n\n .insert(String::new(), EntityIndex::Memory(memory_id));\n\n\n\n create_handle(module, store, Box::new(()), &[], None)\n\n}\n\n\n", "file_path": "crates/wasmtime/src/trampoline/memory.rs", "rank": 9, "score": 513860.06614956155 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn decommit_stack_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, false)\n\n}\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 10, "score": 511892.58098159346 }, { "content": "pub fn commit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n commit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 11, "score": 511892.5809815936 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn commit_stack_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as stack pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 12, "score": 511892.58098159346 }, { "content": "pub fn commit_table_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as table pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 13, "score": 511892.58098159346 }, { "content": "pub fn commit_table_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as table pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 14, "score": 511892.5809815936 }, { "content": "pub fn commit_table_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as table pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 15, "score": 511892.58098159346 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn commit_stack_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as stack pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 16, "score": 511892.58098159346 }, { "content": "pub fn decommit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, true)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 17, "score": 511892.58098159346 }, { "content": "pub fn decommit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 18, "score": 511892.5809815936 }, { "content": "pub fn commit_memory_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as memory pages remain READ|WRITE with uffd\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 19, "score": 511892.5809815936 }, { "content": "pub fn decommit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 20, "score": 511892.5809815936 }, { "content": "pub fn commit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n commit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 21, "score": 511892.58098159346 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn decommit_stack_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, false)\n\n}\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 22, "score": 511892.58098159346 }, { "content": "pub fn decommit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 23, "score": 511892.58098159346 }, { "content": "pub fn commit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // Just change the protection level to READ|WRITE\n\n unsafe {\n\n region::protect(addr, len, region::Protection::READ_WRITE)\n\n .context(\"failed to make linear memory pages read/write\")\n\n }\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 24, "score": 511892.58098159346 }, { "content": "pub fn decommit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 25, "score": 511892.5809815936 }, { "content": "pub fn decommit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, true)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 26, "score": 511892.58098159346 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn decommit_stack_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n\n\n\n/// This is used to initialize the memory pool when uffd is enabled.\n\n///\n\n/// Without uffd, all of the memory pool's pages are initially protected with `NONE` to treat the entire\n\n/// range as guard pages. When an instance is created, the initial pages of the memory are\n\n/// changed to `READ_WRITE`.\n\n///\n\n/// With uffd, however, the potentially accessible pages of the each linear memory are made `READ_WRITE` and\n\n/// the page fault handler will detect an out of bounds access and treat the page, temporarily,\n\n/// as a guard page.\n\npub(super) fn initialize_memory_pool(pool: &MemoryPool) -> Result<()> {\n\n if pool.memory_size == 0 || pool.max_wasm_pages == 0 {\n\n return Ok(());\n\n }\n\n\n\n for i in 0..pool.max_instances {\n\n for base in pool.get(i) {\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 27, "score": 511892.5809815936 }, { "content": "pub fn decommit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, false)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 28, "score": 511892.58098159346 }, { "content": "pub fn commit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // Just change the protection level to READ|WRITE\n\n unsafe {\n\n region::protect(addr, len, region::Protection::READ_WRITE)\n\n .context(\"failed to make linear memory pages read/write\")\n\n }\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 29, "score": 511892.58098159346 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn commit_stack_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as stack pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 30, "score": 511892.5809815936 }, { "content": "pub fn decommit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, false)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 31, "score": 511892.58098159346 }, { "content": "fn instantiate(linker: &Linker<WasiCtx>, module: &Module) -> Result<()> {\n\n let wasi = WasiCtxBuilder::new().build();\n\n let mut store = Store::new(module.engine(), wasi);\n\n let _instance = linker.instantiate(&mut store, module)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "benches/instantiation.rs", "rank": 32, "score": 502558.10433610633 }, { "content": "fn initialize_tables(instance: &mut Instance, module: &Module) -> Result<(), InstantiationError> {\n\n for init in &module.table_initializers {\n\n instance\n\n .table_init_segment(\n\n init.table_index,\n\n &init.elements,\n\n get_table_init_start(init, instance)?,\n\n 0,\n\n init.elements.len() as u32,\n\n )\n\n .map_err(InstantiationError::Trap)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator.rs", "rank": 33, "score": 500858.7969433251 }, { "content": "/// Construct a dummy instance for the given instance type.\n\n///\n\n/// This is done by using the expected type to generate a module on-the-fly\n\n/// which we the instantiate.\n\npub fn dummy_instance<T>(store: &mut Store<T>, ty: InstanceType) -> Result<Instance> {\n\n let mut wat = WatGenerator::new();\n\n for ty in ty.exports() {\n\n wat.export(&ty);\n\n }\n\n let module = Module::new(store.engine(), &wat.finish()).unwrap();\n\n Instance::new(store, &module, &[])\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 34, "score": 495932.54104187887 }, { "content": "fn check_init_bounds(instance: &mut Instance, module: &Module) -> Result<(), InstantiationError> {\n\n check_table_init_bounds(instance, module)?;\n\n\n\n match &instance.module.memory_initialization {\n\n MemoryInitialization::Paged { out_of_bounds, .. } => {\n\n if *out_of_bounds {\n\n return Err(InstantiationError::Link(LinkError(\n\n \"memory out of bounds: data segment does not fit\".into(),\n\n )));\n\n }\n\n }\n\n MemoryInitialization::Segmented(initializers) => {\n\n check_memory_init_bounds(instance, initializers)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator.rs", "rank": 35, "score": 493305.9707545148 }, { "content": "/// Opens a fresh file descriptor for `path` where `path` should be a preopened\n\n/// directory.\n\npub fn open_scratch_directory(path: &str) -> Result<wasi::Fd, String> {\n\n unsafe {\n\n for i in 3.. {\n\n let stat = match wasi::fd_prestat_get(i) {\n\n Ok(s) => s,\n\n Err(_) => break,\n\n };\n\n if stat.tag != wasi::PREOPENTYPE_DIR {\n\n continue;\n\n }\n\n let mut dst = Vec::with_capacity(stat.u.dir.pr_name_len);\n\n if wasi::fd_prestat_dir_name(i, dst.as_mut_ptr(), dst.capacity()).is_err() {\n\n continue;\n\n }\n\n dst.set_len(stat.u.dir.pr_name_len);\n\n if dst == path.as_bytes() {\n\n let (base, inherit) = fd_get_rights(i);\n\n return Ok(\n\n wasi::path_open(i, 0, \".\", wasi::OFLAGS_DIRECTORY, base, inherit, 0)\n\n .expect(\"failed to open dir\"),\n", "file_path": "crates/test-programs/wasi-tests/src/lib.rs", "rank": 36, "score": 484978.3307327652 }, { "content": "fn decommit(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n unsafe {\n\n // On Linux, this tells the kernel to discard the backing of the pages in the range.\n\n // If the discarded pages are part of a uffd region, then the next access will fault\n\n // and the user fault handler will receive the event.\n\n // If the pages are not monitored by uffd, the kernel will zero the page on next access,\n\n // as if it were mmap'd for the first time.\n\n madvise(addr as _, len, Advice::LinuxDontNeed).context(\"madvise failed to decommit\")?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 37, "score": 462280.33320254984 }, { "content": "/// This is used to reset a linear memory's guard page back to read-write as the page might be accessible\n\n/// again in the future depending on how the linear memory grows.\n\nfn reset_guard_page(addr: *mut u8, len: usize) -> Result<()> {\n\n unsafe {\n\n region::protect(addr, len, region::Protection::READ_WRITE)\n\n .context(\"failed to reset guard page\")\n\n }\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 38, "score": 448248.0870401669 }, { "content": "/// Creates a new configuration file at specified path, or default path if None is passed.\n\n/// Fails if file already exists.\n\npub fn create_new_config<P: AsRef<Path> + Debug>(config_file: Option<P>) -> Result<PathBuf> {\n\n trace!(\"Creating new config file, path: {:?}\", config_file);\n\n\n\n let config_file = match config_file {\n\n Some(path) => path.as_ref().to_path_buf(),\n\n None => default_config_path()?,\n\n };\n\n\n\n if config_file.exists() {\n\n bail!(\n\n \"Configuration file '{}' already exists.\",\n\n config_file.display()\n\n );\n\n }\n\n\n\n let parent_dir = config_file\n\n .parent()\n\n .ok_or_else(|| anyhow!(\"Invalid cache config path: {}\", config_file.display()))?;\n\n\n\n fs::create_dir_all(parent_dir).with_context(|| {\n", "file_path": "crates/cache/src/config.rs", "rank": 39, "score": 447987.7546229878 }, { "content": "// Generated as internal constructor for term vector_all_ones.\n\npub fn constructor_vector_all_ones<C: Context>(ctx: &mut C, arg0: Type) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/isa/x64/inst.isle line 480.\n\n let expr0_0 = C::temp_writable_reg(ctx, pattern0_0);\n\n let expr1_0 = C::writable_reg_to_reg(ctx, expr0_0);\n\n let expr2_0: Type = I32X4;\n\n let expr3_0 = constructor_sse_cmp_op(ctx, expr2_0)?;\n\n let expr4_0 = RegMem::Reg { reg: expr1_0 };\n\n let expr5_0 = MInst::XmmRmR {\n\n op: expr3_0,\n\n src1: expr1_0,\n\n src2: expr4_0,\n\n dst: expr0_0,\n\n };\n\n let expr6_0 = C::emit(ctx, &expr5_0);\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 40, "score": 438342.4170459283 }, { "content": "// Generated as internal constructor for term temp_reg.\n\npub fn constructor_temp_reg<C: Context>(ctx: &mut C, arg0: Type) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/prelude.isle line 60.\n\n let expr0_0 = C::temp_writable_reg(ctx, pattern0_0);\n\n let expr1_0 = C::writable_reg_to_reg(ctx, expr0_0);\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 41, "score": 438342.4170459283 }, { "content": "// Generated as internal constructor for term temp_reg.\n\npub fn constructor_temp_reg<C: Context>(ctx: &mut C, arg0: Type) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/prelude.isle line 60.\n\n let expr0_0 = C::temp_writable_reg(ctx, pattern0_0);\n\n let expr1_0 = C::writable_reg_to_reg(ctx, expr0_0);\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 42, "score": 438342.4170459283 }, { "content": "/// This function is called to register state within `Store` whenever\n\n/// WebAssembly is entered within the `Store`.\n\n///\n\n/// This function sets up various limits such as:\n\n///\n\n/// * The stack limit. This is what ensures that we limit the stack space\n\n/// allocated by WebAssembly code and it's relative to the initial stack\n\n/// pointer that called into wasm.\n\n///\n\n/// * Stack canaries for externref gc tracing. Currently the implementation\n\n/// relies on walking frames but the stack walker isn't always 100% reliable,\n\n/// so a canary is used to ensure that if the canary is seen then it's\n\n/// guaranteed all wasm frames have been walked.\n\n///\n\n/// This function may fail if the the stack limit can't be set because an\n\n/// interrupt already happened.\n\nfn enter_wasm<T>(store: &mut StoreContextMut<'_, T>) -> Result<Option<usize>, Trap> {\n\n // If this is a recursive call, e.g. our stack canary is already set, then\n\n // we may be able to skip this function.\n\n //\n\n // For synchronous stores there's nothing else to do because all wasm calls\n\n // happen synchronously and on the same stack. This means that the previous\n\n // stack limit will suffice for the next recursive call.\n\n //\n\n // For asynchronous stores then each call happens on a separate native\n\n // stack. This means that the previous stack limit is no longer relevant\n\n // because we're on a separate stack. In this situation we need to\n\n // update the stack limit, but we don't need to update the gc stack canary\n\n // in this situation.\n\n if store\n\n .0\n\n .externref_activations_table()\n\n .stack_canary()\n\n .is_some()\n\n && !store.0.async_support()\n\n {\n", "file_path": "crates/wasmtime/src/func.rs", "rank": 43, "score": 438077.91942813667 }, { "content": "fn decommit(addr: *mut u8, len: usize, protect: bool) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // By creating a new mapping at the same location, this will discard the\n\n // mapping for the pages in the given range.\n\n // The new mapping will be to the CoW zero page, so this effectively\n\n // zeroes the pages.\n\n unsafe {\n\n rustix::io::mmap_anonymous(\n\n addr as _,\n\n len,\n\n if protect {\n\n rustix::io::ProtFlags::empty()\n\n } else {\n\n rustix::io::ProtFlags::READ | rustix::io::ProtFlags::WRITE\n\n },\n\n rustix::io::MapFlags::PRIVATE | rustix::io::MapFlags::FIXED,\n\n )\n\n .context(\"mmap failed to remap pages: {}\")?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 44, "score": 436292.6256924636 }, { "content": "fn decommit(addr: *mut u8, len: usize, protect: bool) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n unsafe {\n\n if protect {\n\n region::protect(addr, len, region::Protection::NONE)\n\n .context(\"failed to protect memory pages\")?;\n\n }\n\n\n\n // On Linux, this is enough to cause the kernel to initialize the pages to 0 on next access\n\n rustix::io::madvise(addr as _, len, rustix::io::Advice::LinuxDontNeed)\n\n .context(\"madvise failed to decommit: {}\")?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 45, "score": 436292.6256924636 }, { "content": "fn test_many_call_module(mut store: Store<()>) -> Result<()> {\n\n const N: i32 = 200;\n\n\n\n let mut wat = String::new();\n\n wat.push_str(\"(module\\n\");\n\n wat.push_str(\"(func $first (result i32) (i32.const 1))\\n\");\n\n for i in 0..N {\n\n wat.push_str(&format!(\"(func (export \\\"{}\\\") (result i32 i32)\\n\", i));\n\n wat.push_str(\"call $first\\n\");\n\n wat.push_str(&format!(\"i32.const {}\\n\", i));\n\n wat.push_str(\"i32.add\\n\");\n\n wat.push_str(\"call $last\\n\");\n\n wat.push_str(&format!(\"i32.const {}\\n\", i));\n\n wat.push_str(\"i32.add)\\n\");\n\n }\n\n wat.push_str(\"(func $last (result i32) (i32.const 2))\\n\");\n\n wat.push_str(\")\\n\");\n\n\n\n let module = Module::new(store.engine(), &wat)?;\n\n\n", "file_path": "tests/all/relocs.rs", "rank": 46, "score": 433952.40713009745 }, { "content": "// Generated as internal constructor for term vector_size.\n\npub fn constructor_vector_size<C: Context>(ctx: &mut C, arg0: Type) -> Option<VectorSize> {\n\n let pattern0_0 = arg0;\n\n if let Some((pattern1_0, pattern1_1)) = C::multi_lane(ctx, pattern0_0) {\n\n if pattern1_0 == 8 {\n\n if pattern1_1 == 16 {\n\n // Rule at src/isa/aarch64/inst.isle line 952.\n\n let expr0_0 = VectorSize::Size8x16;\n\n return Some(expr0_0);\n\n }\n\n }\n\n if pattern1_0 == 16 {\n\n if pattern1_1 == 8 {\n\n // Rule at src/isa/aarch64/inst.isle line 953.\n\n let expr0_0 = VectorSize::Size16x8;\n\n return Some(expr0_0);\n\n }\n\n }\n\n if pattern1_0 == 32 {\n\n if pattern1_1 == 4 {\n\n // Rule at src/isa/aarch64/inst.isle line 954.\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 47, "score": 433912.65872937895 }, { "content": "// Generated as internal constructor for term madd_op.\n\npub fn constructor_madd_op<C: Context>(ctx: &mut C, arg0: Type) -> Option<ALUOp3> {\n\n let pattern0_0 = arg0;\n\n if pattern0_0 == I64 {\n\n // Rule at src/isa/aarch64/lower.isle line 86.\n\n let expr0_0 = ALUOp3::MAdd64;\n\n return Some(expr0_0);\n\n }\n\n if let Some(pattern1_0) = C::fits_in_32(ctx, pattern0_0) {\n\n // Rule at src/isa/aarch64/lower.isle line 85.\n\n let expr0_0 = ALUOp3::MAdd32;\n\n return Some(expr0_0);\n\n }\n\n return None;\n\n}\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 48, "score": 433912.65872937895 }, { "content": "// Generated as internal constructor for term isub_op.\n\npub fn constructor_isub_op<C: Context>(ctx: &mut C, arg0: Type) -> Option<ALUOp> {\n\n let pattern0_0 = arg0;\n\n if pattern0_0 == I64 {\n\n // Rule at src/isa/aarch64/lower.isle line 81.\n\n let expr0_0 = ALUOp::Sub64;\n\n return Some(expr0_0);\n\n }\n\n if let Some(pattern1_0) = C::fits_in_32(ctx, pattern0_0) {\n\n // Rule at src/isa/aarch64/lower.isle line 80.\n\n let expr0_0 = ALUOp::Sub32;\n\n return Some(expr0_0);\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 49, "score": 433912.65872937895 }, { "content": "// Generated as internal constructor for term iadd_op.\n\npub fn constructor_iadd_op<C: Context>(ctx: &mut C, arg0: Type) -> Option<ALUOp> {\n\n let pattern0_0 = arg0;\n\n if pattern0_0 == I64 {\n\n // Rule at src/isa/aarch64/lower.isle line 76.\n\n let expr0_0 = ALUOp::Add64;\n\n return Some(expr0_0);\n\n }\n\n if let Some(pattern1_0) = C::fits_in_32(ctx, pattern0_0) {\n\n // Rule at src/isa/aarch64/lower.isle line 75.\n\n let expr0_0 = ALUOp::Add32;\n\n return Some(expr0_0);\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 50, "score": 433912.65872937895 }, { "content": "// Generated as internal constructor for term sse_xor_op.\n\npub fn constructor_sse_xor_op<C: Context>(ctx: &mut C, arg0: Type) -> Option<SseOpcode> {\n\n let pattern0_0 = arg0;\n\n if pattern0_0 == F32X4 {\n\n // Rule at src/isa/x64/inst.isle line 450.\n\n let expr0_0 = SseOpcode::Xorps;\n\n return Some(expr0_0);\n\n }\n\n if pattern0_0 == F64X2 {\n\n // Rule at src/isa/x64/inst.isle line 451.\n\n let expr0_0 = SseOpcode::Xorpd;\n\n return Some(expr0_0);\n\n }\n\n if let Some((pattern1_0, pattern1_1)) = C::multi_lane(ctx, pattern0_0) {\n\n // Rule at src/isa/x64/inst.isle line 452.\n\n let expr0_0 = SseOpcode::Pxor;\n\n return Some(expr0_0);\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 51, "score": 429637.1969133719 }, { "content": "// Generated as internal constructor for term sse_cmp_op.\n\npub fn constructor_sse_cmp_op<C: Context>(ctx: &mut C, arg0: Type) -> Option<SseOpcode> {\n\n let pattern0_0 = arg0;\n\n if pattern0_0 == F32X4 {\n\n // Rule at src/isa/x64/inst.isle line 465.\n\n let expr0_0 = SseOpcode::Cmpps;\n\n return Some(expr0_0);\n\n }\n\n if pattern0_0 == F64X2 {\n\n // Rule at src/isa/x64/inst.isle line 466.\n\n let expr0_0 = SseOpcode::Cmppd;\n\n return Some(expr0_0);\n\n }\n\n if let Some((pattern1_0, pattern1_1)) = C::multi_lane(ctx, pattern0_0) {\n\n if pattern1_0 == 8 {\n\n if pattern1_1 == 16 {\n\n // Rule at src/isa/x64/inst.isle line 461.\n\n let expr0_0 = SseOpcode::Pcmpeqb;\n\n return Some(expr0_0);\n\n }\n\n }\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 52, "score": 429637.1969133719 }, { "content": "fn ensure_supported_elf_format(bytes: &[u8]) -> Result<Endianness, Error> {\n\n use object::elf::*;\n\n use object::read::elf::*;\n\n\n\n let kind = match object::FileKind::parse(bytes) {\n\n Ok(file) => file,\n\n Err(err) => {\n\n bail!(\"Failed to parse file: {}\", err);\n\n }\n\n };\n\n let header = match kind {\n\n object::FileKind::Elf64 => match object::elf::FileHeader64::<Endianness>::parse(bytes) {\n\n Ok(header) => header,\n\n Err(err) => {\n\n bail!(\"Unsupported ELF file: {}\", err);\n\n }\n\n },\n\n _ => {\n\n bail!(\"only 64-bit ELF files currently supported\")\n\n }\n", "file_path": "crates/jit/src/debug.rs", "rank": 53, "score": 429571.2424905441 }, { "content": "fn typecheck_defs(store: &mut StoreOpaque, module: &Module, imports: &[Definition]) -> Result<()> {\n\n for import in imports {\n\n if !import.comes_from_same_store(store) {\n\n bail!(\"cross-`Store` instantiation is not currently supported\");\n\n }\n\n }\n\n typecheck(store, module, imports, |cx, ty, item| {\n\n cx.definition(ty, item)\n\n })\n\n}\n\n\n", "file_path": "crates/wasmtime/src/instance.rs", "rank": 54, "score": 429334.0329227116 }, { "content": "/// Construct a dummy table for the given table type.\n\npub fn dummy_table<T>(store: &mut Store<T>, ty: TableType) -> Result<Table> {\n\n let init_val = dummy_value(ty.element().clone());\n\n Table::new(store, ty, init_val)\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 55, "score": 428255.86541355774 }, { "content": "/// Construct a dummy memory for the given memory type.\n\npub fn dummy_memory<T>(store: &mut Store<T>, ty: MemoryType) -> Result<Memory> {\n\n Memory::new(store, ty)\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 56, "score": 428255.86541355774 }, { "content": "// Generated as internal constructor for term not.\n\npub fn constructor_not<C: Context>(ctx: &mut C, arg0: Type, arg1: Reg) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n let pattern1_0 = arg1;\n\n // Rule at src/isa/x64/inst.isle line 1225.\n\n let expr0_0 = C::temp_writable_reg(ctx, pattern0_0);\n\n let expr1_0 = C::operand_size_of_type(ctx, pattern0_0);\n\n let expr2_0 = MInst::Not {\n\n size: expr1_0,\n\n src: pattern1_0,\n\n dst: expr0_0,\n\n };\n\n let expr3_0 = C::emit(ctx, &expr2_0);\n\n let expr4_0 = C::writable_reg_to_reg(ctx, expr0_0);\n\n return Some(expr4_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 57, "score": 425309.5710225145 }, { "content": "// Generated as internal constructor for term imm.\n\npub fn constructor_imm<C: Context>(ctx: &mut C, arg0: Type, arg1: u64) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n if pattern0_0 == I64 {\n\n let pattern2_0 = arg1;\n\n if let Some(pattern3_0) = C::nonzero_u64_fits_in_u32(ctx, pattern2_0) {\n\n // Rule at src/isa/x64/inst.isle line 628.\n\n let expr0_0: Type = I64;\n\n let expr1_0 = C::temp_writable_reg(ctx, expr0_0);\n\n let expr2_0 = OperandSize::Size32;\n\n let expr3_0 = MInst::Imm {\n\n dst_size: expr2_0,\n\n simm64: pattern3_0,\n\n dst: expr1_0,\n\n };\n\n let expr4_0 = C::emit(ctx, &expr3_0);\n\n let expr5_0 = C::writable_reg_to_reg(ctx, expr1_0);\n\n return Some(expr5_0);\n\n }\n\n }\n\n if pattern0_0 == F32 {\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 58, "score": 420879.8127059651 }, { "content": "// Generated as internal constructor for term imm.\n\npub fn constructor_imm<C: Context>(ctx: &mut C, arg0: Type, arg1: u64) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n if let Some(pattern1_0) = C::integral_ty(ctx, pattern0_0) {\n\n let pattern2_0 = arg1;\n\n if let Some(pattern3_0) = C::imm_logic_from_u64(ctx, pattern2_0) {\n\n // Rule at src/isa/aarch64/inst.isle line 1513.\n\n let expr0_0 = ALUOp::Orr64;\n\n let expr1_0 = C::zero_reg(ctx);\n\n let expr2_0 = constructor_alu_rr_imm_logic(ctx, &expr0_0, expr1_0, pattern3_0)?;\n\n return Some(expr2_0);\n\n }\n\n if let Some(pattern3_0) = C::move_wide_const_from_u64(ctx, pattern2_0) {\n\n // Rule at src/isa/aarch64/inst.isle line 1505.\n\n let expr0_0 = OperandSize::Size64;\n\n let expr1_0 = constructor_movz(ctx, pattern3_0, &expr0_0)?;\n\n return Some(expr1_0);\n\n }\n\n if let Some(pattern3_0) = C::move_wide_const_from_negated_u64(ctx, pattern2_0) {\n\n // Rule at src/isa/aarch64/inst.isle line 1509.\n\n let expr0_0 = OperandSize::Size64;\n\n let expr1_0 = constructor_movn(ctx, pattern3_0, &expr0_0)?;\n\n return Some(expr1_0);\n\n }\n\n // Rule at src/isa/aarch64/inst.isle line 1520.\n\n let expr0_0 = C::load_constant64_full(ctx, pattern2_0);\n\n return Some(expr0_0);\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 59, "score": 420879.8127059651 }, { "content": "fn instantiate_with_dummy(store: &mut Store<StoreLimits>, module: &Module) -> Option<Instance> {\n\n // Creation of imports can fail due to resource limit constraints, and then\n\n // instantiation can naturally fail for a number of reasons as well. Bundle\n\n // the two steps together to match on the error below.\n\n let instance =\n\n dummy::dummy_linker(store, module).and_then(|l| l.instantiate(&mut *store, module));\n\n\n\n let e = match instance {\n\n Ok(i) => return Some(i),\n\n Err(e) => e,\n\n };\n\n\n\n // If the instantiation hit OOM for some reason then that's ok, it's\n\n // expected that fuzz-generated programs try to allocate lots of\n\n // stuff.\n\n if store.data().oom {\n\n return None;\n\n }\n\n\n\n // Allow traps which can happen normally with `unreachable` or a\n", "file_path": "crates/fuzzing/src/oracles.rs", "rank": 60, "score": 418911.30842012784 }, { "content": "fn run(engine: &Engine, module: &Module, linker: &Linker<()>) -> Result<()> {\n\n // Each sub-thread we have starting out by instantiating the `module`\n\n // provided into a fresh `Store`.\n\n println!(\"Instantiating module...\");\n\n let mut store = Store::new(&engine, ());\n\n let instance = linker.instantiate(&mut store, module)?;\n\n let run = instance.get_typed_func::<(), (), _>(&mut store, \"run\")?;\n\n\n\n println!(\"Executing...\");\n\n for _ in 0..N_REPS {\n\n run.call(&mut store, ())?;\n\n thread::sleep(time::Duration::from_millis(100));\n\n }\n\n\n\n // Also note that that a `Store` can also move between threads:\n\n println!(\"> Moving {:?} to a new thread\", thread::current().id());\n\n let child = thread::spawn(move || run.call(&mut store, ()));\n\n\n\n child.join().unwrap()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/threads.rs", "rank": 61, "score": 410923.4699951551 }, { "content": "pub fn instantiate(data: &[u8], bin_name: &str, workspace: Option<&Path>) -> anyhow::Result<()> {\n\n run(data, bin_name, workspace, false)\n\n}\n", "file_path": "crates/test-programs/tests/wasm_tests/runtime/tokio.rs", "rank": 62, "score": 389397.47190937947 }, { "content": "/// Construct the list of compilations (transformations from ISLE\n\n/// source to generated Rust source) that exist in the repository.\n\nfn get_isle_compilations(crate_dir: &std::path::Path) -> Result<IsleCompilations, std::io::Error> {\n\n let cur_dir = std::env::current_dir()?;\n\n\n\n let clif_isle =\n\n make_isle_source_path_relative(&cur_dir, crate_dir.join(\"src\").join(\"clif.isle\"));\n\n let prelude_isle =\n\n make_isle_source_path_relative(&cur_dir, crate_dir.join(\"src\").join(\"prelude.isle\"));\n\n let src_isa_x64 =\n\n make_isle_source_path_relative(&cur_dir, crate_dir.join(\"src\").join(\"isa\").join(\"x64\"));\n\n let src_isa_aarch64 =\n\n make_isle_source_path_relative(&cur_dir, crate_dir.join(\"src\").join(\"isa\").join(\"aarch64\"));\n\n\n\n // This is a set of ISLE compilation units.\n\n //\n\n // The format of each entry is:\n\n //\n\n // (output Rust code file, input ISLE source files)\n\n //\n\n // There should be one entry for each backend that uses ISLE for lowering,\n\n // and if/when we replace our peephole optimization passes with ISLE, there\n", "file_path": "cranelift/codegen/build.rs", "rank": 63, "score": 382843.0884144376 }, { "content": "/// Perform differential execution between Cranelift and wasmi, diffing the\n\n/// resulting memory image when execution terminates. This relies on the\n\n/// module-under-test to be instrumented to bound the execution time. Invoke\n\n/// with a module generated by `wasm-smith` using the\n\n/// `SingleFunctionModuleConfig` configuration type for best results.\n\n///\n\n/// May return `None` if we early-out due to a rejected fuzz config; these\n\n/// should be rare if modules are generated appropriately.\n\npub fn differential_wasmi_execution(wasm: &[u8], config: &crate::generators::Config) -> Option<()> {\n\n crate::init_fuzzing();\n\n log_wasm(wasm);\n\n\n\n // Instantiate wasmi module and instance.\n\n let wasmi_module = wasmi::Module::from_buffer(&wasm[..]).ok()?;\n\n let wasmi_instance =\n\n wasmi::ModuleInstance::new(&wasmi_module, &wasmi::ImportsBuilder::default()).ok()?;\n\n let wasmi_instance = wasmi_instance.assert_no_start();\n\n\n\n // If wasmi succeeded then we assert that wasmtime will also succeed.\n\n let (wasmtime_module, mut wasmtime_store) = differential_store(wasm, config);\n\n let wasmtime_instance = Instance::new(&mut wasmtime_store, &wasmtime_module, &[])\n\n .expect(\"Wasmtime can instantiate module\");\n\n\n\n // Introspect wasmtime module to find name of an exported function and of an\n\n // exported memory.\n\n let (func_name, ty) = first_exported_function(&wasmtime_module)?;\n\n let memory_name = first_exported_memory(&wasmtime_module)?;\n\n\n", "file_path": "crates/fuzzing/src/oracles.rs", "rank": 64, "score": 380694.07810630475 }, { "content": "/// Perform differential execution between Wasmtime and the official WebAssembly\n\n/// specification interpreter.\n\n///\n\n/// May return `None` if we early-out due to a rejected fuzz config.\n\npub fn differential_spec_execution(wasm: &[u8], config: &crate::generators::Config) -> Option<()> {\n\n crate::init_fuzzing();\n\n debug!(\"config: {:#?}\", config);\n\n log_wasm(wasm);\n\n\n\n // Run the spec interpreter first, then Wasmtime. The order is important\n\n // because both sides (OCaml runtime and Wasmtime) register signal handlers;\n\n // Wasmtime uses these signal handlers for catching various WebAssembly\n\n // failures. On certain OSes (e.g. Linux x86_64), the signal handlers\n\n // interfere, observable as an uncaught `SIGSEGV`--not even caught by\n\n // libFuzzer. By running Wasmtime second, its signal handlers are registered\n\n // most recently and they catch failures appropriately.\n\n let spec_vals = wasm_spec_interpreter::interpret(wasm, vec![]);\n\n debug!(\"spec interpreter returned: {:?}\", &spec_vals);\n\n let wasmtime_vals = run_in_wasmtime(wasm, config, &[]);\n\n debug!(\"Wasmtime returned: {:?}\", wasmtime_vals);\n\n\n\n // Match a spec interpreter value against a Wasmtime value. Eventually this\n\n // should support references and `v128` (TODO).\n\n fn matches(spec_val: &wasm_spec_interpreter::Value, wasmtime_val: &wasmtime::Val) -> bool {\n", "file_path": "crates/fuzzing/src/oracles.rs", "rank": 65, "score": 380670.41534035595 }, { "content": "/// Generates all the Rust source files used in Cranelift from the meta-language.\n\npub fn generate(isas: &[isa::Isa], out_dir: &str, crate_dir: &Path) -> Result<(), error::Error> {\n\n // Create all the definitions:\n\n // - common definitions.\n\n let mut shared_defs = shared::define();\n\n\n\n gen_settings::generate(\n\n &shared_defs.settings,\n\n gen_settings::ParentGroup::None,\n\n \"settings.rs\",\n\n &out_dir,\n\n )?;\n\n gen_types::generate(\"types.rs\", &out_dir)?;\n\n\n\n // - per ISA definitions.\n\n let target_isas = isa::define(isas, &mut shared_defs);\n\n\n\n // At this point, all definitions are done.\n\n let all_formats = shared_defs.verify_instruction_formats();\n\n\n\n // Generate all the code.\n", "file_path": "cranelift/codegen/meta/src/lib.rs", "rank": 66, "score": 380326.3513556041 }, { "content": "pub fn instantiate(data: &[u8], bin_name: &str, workspace: Option<&Path>) -> anyhow::Result<()> {\n\n run(data, bin_name, workspace, false)\n\n}\n", "file_path": "crates/test-programs/tests/wasm_tests/runtime/cap_std_sync.rs", "rank": 67, "score": 380001.457333393 }, { "content": "/// Prints:\n\n/// ; error: [ERROR BODY]\n\nfn print_error(w: &mut dyn Write, err: VerifierError) -> fmt::Result {\n\n writeln!(w, \"; error: {}\", err.to_string())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/print_errors.rs", "rank": 68, "score": 375935.88420056645 }, { "content": "/// Performs differential execution between Wasmtime and V8.\n\n///\n\n/// This will instantiate the `wasm` provided, which should have no host\n\n/// imports, and then run it in Wasmtime with the `config` specified and V8 with\n\n/// default settings. The first export is executed and if memory is exported\n\n/// it's compared as well.\n\n///\n\n/// Note that it's the caller's responsibility to ensure that the `wasm`\n\n/// doesn't infinitely loop as no protections are done in v8 to prevent this\n\n/// from happening.\n\npub fn differential_v8_execution(wasm: &[u8], config: &crate::generators::Config) -> Option<()> {\n\n // Wasmtime setup\n\n crate::init_fuzzing();\n\n log_wasm(wasm);\n\n let (wasmtime_module, mut wasmtime_store) = super::differential_store(wasm, config);\n\n log::trace!(\"compiled module with wasmtime\");\n\n\n\n // V8 setup\n\n let mut isolate = isolate();\n\n let mut scope = v8::HandleScope::new(&mut *isolate);\n\n let context = v8::Context::new(&mut scope);\n\n let global = context.global(&mut scope);\n\n let mut scope = v8::ContextScope::new(&mut scope, context);\n\n\n\n // V8: compile module\n\n let buf = v8::ArrayBuffer::new_backing_store_from_boxed_slice(wasm.into());\n\n let buf = v8::SharedRef::from(buf);\n\n let name = v8::String::new(&mut scope, \"WASM_BINARY\").unwrap();\n\n let buf = v8::ArrayBuffer::with_backing_store(&mut scope, &buf);\n\n global.set(&mut scope, name.into(), buf.into());\n", "file_path": "crates/fuzzing/src/oracles/v8.rs", "rank": 69, "score": 375844.2099092582 }, { "content": "fn exit_wasm<T>(store: &mut StoreContextMut<'_, T>, prev_stack: Option<usize>) {\n\n // If we don't have a previous stack pointer to restore, then there's no\n\n // cleanup we need to perform here.\n\n let prev_stack = match prev_stack {\n\n Some(stack) => stack,\n\n None => return,\n\n };\n\n\n\n // Only if we're restoring a top-level value do we clear the stack canary\n\n // value. Otherwise our purpose here might be restoring a recursive stack\n\n // limit but leaving the active canary in place.\n\n if prev_stack == usize::max_value() {\n\n store.0.externref_activations_table().set_stack_canary(None);\n\n }\n\n\n\n // see docs above for why this uses `Relaxed`\n\n store.0.interrupts().stack_limit.store(prev_stack, Relaxed);\n\n}\n\n\n\n/// A trait implemented for types which can be returned from closures passed to\n", "file_path": "crates/wasmtime/src/func.rs", "rank": 70, "score": 369903.11105452495 }, { "content": "// Generated as internal constructor for term pabsd.\n\npub fn constructor_pabsd<C: Context>(ctx: &mut C, arg0: &RegMem) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/isa/x64/inst.isle line 1092.\n\n let expr0_0 = SseOpcode::Pabsd;\n\n let expr1_0 = constructor_xmm_unary_rm_r(ctx, &expr0_0, pattern0_0)?;\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 71, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term pabsb.\n\npub fn constructor_pabsb<C: Context>(ctx: &mut C, arg0: &RegMem) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/isa/x64/inst.isle line 1082.\n\n let expr0_0 = SseOpcode::Pabsb;\n\n let expr1_0 = constructor_xmm_unary_rm_r(ctx, &expr0_0, pattern0_0)?;\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 72, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term pmovzxbw.\n\npub fn constructor_pmovzxbw<C: Context>(ctx: &mut C, arg0: &RegMem) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/isa/x64/inst.isle line 1077.\n\n let expr0_0 = SseOpcode::Pmovzxbw;\n\n let expr1_0 = constructor_xmm_unary_rm_r(ctx, &expr0_0, pattern0_0)?;\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 73, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term lo_reg.\n\npub fn constructor_lo_reg<C: Context>(ctx: &mut C, arg0: Value) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/prelude.isle line 95.\n\n let expr0_0 = C::put_in_regs(ctx, pattern0_0);\n\n let expr1_0: usize = 0;\n\n let expr2_0 = C::value_regs_get(ctx, expr0_0, expr1_0);\n\n return Some(expr2_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 74, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term lower.\n\npub fn constructor_lower<C: Context>(ctx: &mut C, arg0: Inst) -> Option<ValueRegs> {\n\n let pattern0_0 = arg0;\n\n let pattern1_0 = C::inst_data(ctx, pattern0_0);\n\n match &pattern1_0 {\n\n &InstructionData::UnaryIeee32 {\n\n opcode: ref pattern2_0,\n\n imm: pattern2_1,\n\n } => {\n\n if let &Opcode::F32const = &pattern2_0 {\n\n let pattern4_0 = C::u64_from_ieee32(ctx, pattern2_1);\n\n // Rule at src/isa/x64/lower.isle line 46.\n\n let expr0_0: Type = F32;\n\n let expr1_0 = constructor_imm(ctx, expr0_0, pattern4_0)?;\n\n let expr2_0 = C::value_reg(ctx, expr1_0);\n\n return Some(expr2_0);\n\n }\n\n }\n\n &InstructionData::UnaryIeee64 {\n\n opcode: ref pattern2_0,\n\n imm: pattern2_1,\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 75, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term pmovsxbw.\n\npub fn constructor_pmovsxbw<C: Context>(ctx: &mut C, arg0: &RegMem) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/isa/x64/inst.isle line 1072.\n\n let expr0_0 = SseOpcode::Pmovsxbw;\n\n let expr1_0 = constructor_xmm_unary_rm_r(ctx, &expr0_0, pattern0_0)?;\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 76, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term lo_reg.\n\npub fn constructor_lo_reg<C: Context>(ctx: &mut C, arg0: Value) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/prelude.isle line 95.\n\n let expr0_0 = C::put_in_regs(ctx, pattern0_0);\n\n let expr1_0: usize = 0;\n\n let expr2_0 = C::value_regs_get(ctx, expr0_0, expr1_0);\n\n return Some(expr2_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 77, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term lower.\n\npub fn constructor_lower<C: Context>(ctx: &mut C, arg0: Inst) -> Option<ValueRegs> {\n\n let pattern0_0 = arg0;\n\n if let Some(pattern1_0) = C::first_result(ctx, pattern0_0) {\n\n let pattern2_0 = C::value_type(ctx, pattern1_0);\n\n if pattern2_0 == I64 {\n\n let pattern4_0 = C::inst_data(ctx, pattern0_0);\n\n if let &InstructionData::Binary {\n\n opcode: ref pattern5_0,\n\n args: ref pattern5_1,\n\n } = &pattern4_0\n\n {\n\n match &pattern5_0 {\n\n &Opcode::Umulhi => {\n\n let (pattern7_0, pattern7_1) = C::unpack_value_array_2(ctx, &pattern5_1);\n\n // Rule at src/isa/aarch64/lower.isle line 389.\n\n let expr0_0 = ALUOp::UMulH;\n\n let expr1_0 = C::put_in_reg(ctx, pattern7_0);\n\n let expr2_0 = C::put_in_reg(ctx, pattern7_1);\n\n let expr3_0 = constructor_alu_rrr(ctx, &expr0_0, expr1_0, expr2_0)?;\n\n let expr4_0 = C::value_reg(ctx, expr3_0);\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 78, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term vpabsq.\n\npub fn constructor_vpabsq<C: Context>(ctx: &mut C, arg0: &RegMem) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/isa/x64/inst.isle line 1104.\n\n let expr0_0 = Avx512Opcode::Vpabsq;\n\n let expr1_0 = constructor_xmm_unary_rm_r_evex(ctx, &expr0_0, pattern0_0)?;\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 79, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term pabsw.\n\npub fn constructor_pabsw<C: Context>(ctx: &mut C, arg0: &RegMem) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/isa/x64/inst.isle line 1087.\n\n let expr0_0 = SseOpcode::Pabsw;\n\n let expr1_0 = constructor_xmm_unary_rm_r(ctx, &expr0_0, pattern0_0)?;\n\n return Some(expr1_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 80, "score": 368886.85041784844 }, { "content": "// Generated as internal constructor for term i128_not.\n\npub fn constructor_i128_not<C: Context>(ctx: &mut C, arg0: Value) -> Option<ValueRegs> {\n\n let pattern0_0 = arg0;\n\n // Rule at src/isa/x64/lower.isle line 1018.\n\n let expr0_0 = C::put_in_regs(ctx, pattern0_0);\n\n let expr1_0: usize = 0;\n\n let expr2_0 = C::value_regs_get(ctx, expr0_0, expr1_0);\n\n let expr3_0: usize = 1;\n\n let expr4_0 = C::value_regs_get(ctx, expr0_0, expr3_0);\n\n let expr5_0: Type = I64;\n\n let expr6_0 = constructor_not(ctx, expr5_0, expr2_0)?;\n\n let expr7_0: Type = I64;\n\n let expr8_0 = constructor_not(ctx, expr7_0, expr4_0)?;\n\n let expr9_0 = C::value_regs(ctx, expr6_0, expr8_0);\n\n return Some(expr9_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 81, "score": 368886.85041784844 }, { "content": "/// Construct a sequence of dummy values for the given types.\n\npub fn dummy_values(val_tys: impl IntoIterator<Item = ValType>) -> Vec<Val> {\n\n val_tys.into_iter().map(dummy_value).collect()\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 82, "score": 366697.8534743453 }, { "content": "/// Format a floating point number in a way that is reasonably human-readable, and that can be\n\n/// converted back to binary without any rounding issues. The hexadecimal formatting of normal and\n\n/// subnormal numbers is compatible with C99 and the `printf \"%a\"` format specifier. The NaN and Inf\n\n/// formats are not supported by C99.\n\n///\n\n/// The encoding parameters are:\n\n///\n\n/// w - exponent field width in bits\n\n/// t - trailing significand field width in bits\n\n///\n\nfn format_float(bits: u64, w: u8, t: u8, f: &mut Formatter) -> fmt::Result {\n\n debug_assert!(w > 0 && w <= 16, \"Invalid exponent range\");\n\n debug_assert!(1 + w + t <= 64, \"Too large IEEE format for u64\");\n\n debug_assert!((t + w + 1).is_power_of_two(), \"Unexpected IEEE format size\");\n\n\n\n let max_e_bits = (1u64 << w) - 1;\n\n let t_bits = bits & ((1u64 << t) - 1); // Trailing significand.\n\n let e_bits = (bits >> t) & max_e_bits; // Biased exponent.\n\n let sign_bit = (bits >> (w + t)) & 1;\n\n\n\n let bias: i32 = (1 << (w - 1)) - 1;\n\n let e = e_bits as i32 - bias; // Unbiased exponent.\n\n let emin = 1 - bias; // Minimum exponent.\n\n\n\n // How many hexadecimal digits are needed for the trailing significand?\n\n let digits = (t + 3) / 4;\n\n // Trailing significand left-aligned in `digits` hexadecimal digits.\n\n let left_t_bits = t_bits << (4 * digits - t);\n\n\n\n // All formats share the leading sign.\n", "file_path": "cranelift/codegen/src/ir/immediates.rs", "rank": 83, "score": 366005.9386449135 }, { "content": "/// Construct a dummy function for the given function type\n\npub fn dummy_func<T>(store: &mut Store<T>, ty: FuncType) -> Func {\n\n Func::new(store, ty.clone(), move |_, _, results| {\n\n for (ret_ty, result) in ty.results().zip(results) {\n\n *result = dummy_value(ret_ty);\n\n }\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 84, "score": 365421.65897816076 }, { "content": "/// Construct a dummy global for the given global type.\n\npub fn dummy_global<T>(store: &mut Store<T>, ty: GlobalType) -> Global {\n\n let val = dummy_value(ty.content().clone());\n\n Global::new(store, ty, val).unwrap()\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 85, "score": 365421.65897816076 }, { "content": "// Generated as internal constructor for term put_in_reg_sext64.\n\npub fn constructor_put_in_reg_sext64<C: Context>(ctx: &mut C, arg0: Value) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n let pattern1_0 = C::value_type(ctx, pattern0_0);\n\n if pattern1_0 == I64 {\n\n // Rule at src/isa/aarch64/inst.isle line 1534.\n\n let expr0_0 = C::put_in_reg(ctx, pattern0_0);\n\n return Some(expr0_0);\n\n }\n\n if let Some(pattern2_0) = C::fits_in_32(ctx, pattern1_0) {\n\n // Rule at src/isa/aarch64/inst.isle line 1527.\n\n let expr0_0: Type = I32;\n\n let expr1_0 = C::temp_writable_reg(ctx, expr0_0);\n\n let expr2_0 = C::put_in_reg(ctx, pattern0_0);\n\n let expr3_0: bool = true;\n\n let expr4_0 = C::ty_bits(ctx, pattern2_0);\n\n let expr5_0: u8 = 64;\n\n let expr6_0 = MInst::Extend {\n\n rd: expr1_0,\n\n rn: expr2_0,\n\n signed: expr3_0,\n\n from_bits: expr4_0,\n\n to_bits: expr5_0,\n\n };\n\n let expr7_0 = C::emit(ctx, &expr6_0);\n\n let expr8_0 = C::writable_reg_to_reg(ctx, expr1_0);\n\n return Some(expr8_0);\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 86, "score": 365196.60204514384 }, { "content": "// Generated as internal constructor for term put_in_reg_zext64.\n\npub fn constructor_put_in_reg_zext64<C: Context>(ctx: &mut C, arg0: Value) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n let pattern1_0 = C::value_type(ctx, pattern0_0);\n\n if pattern1_0 == I64 {\n\n // Rule at src/isa/aarch64/inst.isle line 1545.\n\n let expr0_0 = C::put_in_reg(ctx, pattern0_0);\n\n return Some(expr0_0);\n\n }\n\n if let Some(pattern2_0) = C::fits_in_32(ctx, pattern1_0) {\n\n // Rule at src/isa/aarch64/inst.isle line 1538.\n\n let expr0_0: Type = I32;\n\n let expr1_0 = C::temp_writable_reg(ctx, expr0_0);\n\n let expr2_0 = C::put_in_reg(ctx, pattern0_0);\n\n let expr3_0: bool = false;\n\n let expr4_0 = C::ty_bits(ctx, pattern2_0);\n\n let expr5_0: u8 = 64;\n\n let expr6_0 = MInst::Extend {\n\n rd: expr1_0,\n\n rn: expr2_0,\n\n signed: expr3_0,\n\n from_bits: expr4_0,\n\n to_bits: expr5_0,\n\n };\n\n let expr7_0 = C::emit(ctx, &expr6_0);\n\n let expr8_0 = C::writable_reg_to_reg(ctx, expr1_0);\n\n return Some(expr8_0);\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 87, "score": 365196.60204514384 }, { "content": "/// Returns the range of `inner` within `outer`, such that `outer[range]` is the\n\n/// same as `inner`.\n\n///\n\n/// This method requires that `inner` is a sub-slice of `outer`, and if that\n\n/// isn't true then this method will panic.\n\npub fn subslice_range(inner: &[u8], outer: &[u8]) -> Range<usize> {\n\n if inner.len() == 0 {\n\n return 0..0;\n\n }\n\n\n\n assert!(outer.as_ptr() <= inner.as_ptr());\n\n assert!((&inner[inner.len() - 1] as *const _) <= (&outer[outer.len() - 1] as *const _));\n\n\n\n let start = inner.as_ptr() as usize - outer.as_ptr() as usize;\n\n start..start + inner.len()\n\n}\n\n\n", "file_path": "crates/jit/src/instantiate.rs", "rank": 88, "score": 362402.13678274036 }, { "content": "// Generated as internal constructor for term operand_size_bits.\n\npub fn constructor_operand_size_bits<C: Context>(ctx: &mut C, arg0: &OperandSize) -> Option<u16> {\n\n let pattern0_0 = arg0;\n\n match pattern0_0 {\n\n &OperandSize::Size8 => {\n\n // Rule at src/isa/x64/inst.isle line 86.\n\n let expr0_0: u16 = 8;\n\n return Some(expr0_0);\n\n }\n\n &OperandSize::Size16 => {\n\n // Rule at src/isa/x64/inst.isle line 87.\n\n let expr0_0: u16 = 16;\n\n return Some(expr0_0);\n\n }\n\n &OperandSize::Size32 => {\n\n // Rule at src/isa/x64/inst.isle line 88.\n\n let expr0_0: u16 = 32;\n\n return Some(expr0_0);\n\n }\n\n &OperandSize::Size64 => {\n\n // Rule at src/isa/x64/inst.isle line 89.\n\n let expr0_0: u16 = 64;\n\n return Some(expr0_0);\n\n }\n\n _ => {}\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 89, "score": 361633.59244931187 }, { "content": "fn serialize(engine: &Engine, wat: &str) -> Result<Vec<u8>> {\n\n let module = Module::new(&engine, wat)?;\n\n Ok(module.serialize()?)\n\n}\n\n\n\nunsafe fn deserialize_and_instantiate(store: &mut Store<()>, buffer: &[u8]) -> Result<Instance> {\n\n let module = Module::deserialize(store.engine(), buffer)?;\n\n Ok(Instance::new(store, &module, &[])?)\n\n}\n\n\n", "file_path": "tests/all/module_serialize.rs", "rank": 90, "score": 361133.2984011928 }, { "content": "pub fn stdout() -> Stdout {\n\n Stdout(wasi_cap_std_sync::stdio::stdout())\n\n}\n\n\n\npub struct Stderr(wasi_cap_std_sync::stdio::Stderr);\n\n\n", "file_path": "crates/wasi-common/tokio/src/file.rs", "rank": 91, "score": 360506.2319567801 }, { "content": "pub fn stdin() -> Stdin {\n\n Stdin(wasi_cap_std_sync::stdio::stdin())\n\n}\n\n\n\npub struct Stdout(wasi_cap_std_sync::stdio::Stdout);\n\n\n", "file_path": "crates/wasi-common/tokio/src/file.rs", "rank": 92, "score": 360504.58540599776 }, { "content": "pub fn stderr() -> Stderr {\n\n Stderr(wasi_cap_std_sync::stdio::stderr())\n\n}\n\n\n\nmacro_rules! wasi_file_impl {\n\n ($ty:ty) => {\n\n #[wiggle::async_trait]\n\n impl WasiFile for $ty {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n async fn datasync(&self) -> Result<(), Error> {\n\n block_on_dummy_executor(|| self.0.datasync())\n\n }\n\n async fn sync(&self) -> Result<(), Error> {\n\n block_on_dummy_executor(|| self.0.sync())\n\n }\n\n async fn get_filetype(&self) -> Result<FileType, Error> {\n\n block_on_dummy_executor(|| self.0.get_filetype())\n\n }\n", "file_path": "crates/wasi-common/tokio/src/file.rs", "rank": 93, "score": 360501.27644475776 }, { "content": "/// A helper to extract all the `Type` listings of each variable in `params`\n\n/// for only parameters the return true for `is_wasm`, typically paired with\n\n/// `is_wasm_return` or `is_wasm_parameter`.\n\npub fn wasm_param_types(params: &[ir::AbiParam], is_wasm: impl Fn(usize) -> bool) -> Vec<Type> {\n\n let mut ret = Vec::with_capacity(params.len());\n\n for (i, param) in params.iter().enumerate() {\n\n if is_wasm(i) {\n\n ret.push(param.value_type);\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "cranelift/wasm/src/code_translator.rs", "rank": 94, "score": 357540.6000303073 }, { "content": "/// Build a filechecker using the directives in the file preamble and the function's comments.\n\npub fn build_filechecker(context: &Context) -> anyhow::Result<Checker> {\n\n let mut builder = CheckerBuilder::new();\n\n // Preamble comments apply to all functions.\n\n for comment in context.preamble_comments {\n\n builder\n\n .directive(comment.text)\n\n .context(\"filecheck directive failed\")?;\n\n }\n\n for comment in &context.details.comments {\n\n builder\n\n .directive(comment.text)\n\n .context(\"filecheck directive failed\")?;\n\n }\n\n Ok(builder.finish())\n\n}\n", "file_path": "cranelift/filetests/src/subtest.rs", "rank": 95, "score": 357489.4627638584 }, { "content": "#[test]\n\nfn instantiate() -> Result<(), Error> {\n\n let mut store = Store::<State>::default();\n\n store.call_hook(State::call_hook);\n\n\n\n let m = Module::new(store.engine(), \"(module)\")?;\n\n Instance::new(&mut store, &m, &[])?;\n\n assert_eq!(store.data().calls_into_wasm, 0);\n\n assert_eq!(store.data().calls_into_host, 0);\n\n\n\n let m = Module::new(store.engine(), \"(module (func) (start 0))\")?;\n\n Instance::new(&mut store, &m, &[])?;\n\n assert_eq!(store.data().calls_into_wasm, 1);\n\n assert_eq!(store.data().calls_into_host, 0);\n\n\n\n Ok(())\n\n}\n\n\n\n#[tokio::test]\n\nasync fn instantiate_async() -> Result<(), Error> {\n\n let mut config = Config::new();\n", "file_path": "tests/all/call_hook.rs", "rank": 96, "score": 351800.01245311636 }, { "content": "// Generated as internal constructor for term adc64.\n\npub fn constructor_adc64<C: Context>(ctx: &mut C, arg0: Reg, arg1: Reg) -> Option<ConsumesFlags> {\n\n let pattern0_0 = arg0;\n\n let pattern1_0 = arg1;\n\n // Rule at src/isa/aarch64/inst.isle line 1442.\n\n let expr0_0: Type = I64;\n\n let expr1_0 = C::temp_writable_reg(ctx, expr0_0);\n\n let expr2_0 = ALUOp::Adc64;\n\n let expr3_0 = MInst::AluRRR {\n\n alu_op: expr2_0,\n\n rd: expr1_0,\n\n rn: pattern0_0,\n\n rm: pattern1_0,\n\n };\n\n let expr4_0 = C::writable_reg_to_reg(ctx, expr1_0);\n\n let expr5_0 = ConsumesFlags::ConsumesFlags {\n\n inst: expr3_0,\n\n result: expr4_0,\n\n };\n\n return Some(expr5_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 97, "score": 350843.1230876753 }, { "content": "// Generated as internal constructor for term sbc64.\n\npub fn constructor_sbc64<C: Context>(ctx: &mut C, arg0: Reg, arg1: Reg) -> Option<ConsumesFlags> {\n\n let pattern0_0 = arg0;\n\n let pattern1_0 = arg1;\n\n // Rule at src/isa/aarch64/inst.isle line 1456.\n\n let expr0_0: Type = I64;\n\n let expr1_0 = C::temp_writable_reg(ctx, expr0_0);\n\n let expr2_0 = ALUOp::Sbc64;\n\n let expr3_0 = MInst::AluRRR {\n\n alu_op: expr2_0,\n\n rd: expr1_0,\n\n rn: pattern0_0,\n\n rm: pattern1_0,\n\n };\n\n let expr4_0 = C::writable_reg_to_reg(ctx, expr1_0);\n\n let expr5_0 = ConsumesFlags::ConsumesFlags {\n\n inst: expr3_0,\n\n result: expr4_0,\n\n };\n\n return Some(expr5_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/aarch64/lower/isle/generated_code.rs", "rank": 98, "score": 350843.1230876753 }, { "content": "// Generated as internal constructor for term paddb.\n\npub fn constructor_paddb<C: Context>(ctx: &mut C, arg0: Reg, arg1: &RegMem) -> Option<Reg> {\n\n let pattern0_0 = arg0;\n\n let pattern1_0 = arg1;\n\n // Rule at src/isa/x64/inst.isle line 759.\n\n let expr0_0: Type = I8X16;\n\n let expr1_0 = SseOpcode::Paddb;\n\n let expr2_0 = constructor_xmm_rm_r(ctx, expr0_0, &expr1_0, pattern0_0, pattern1_0)?;\n\n return Some(expr2_0);\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/lower/isle/generated_code.rs", "rank": 99, "score": 350843.1230876753 } ]
Rust
src/stream/server/ts.rs
burjee/mock-yo-stream
6161822cccb477c4f33217788078463d7cad9c7c
use std::fs::File; use mpeg2ts; use mpeg2ts::{ ts::{TsPacket, TsHeader, TsPayload, Pid, ContinuityCounter}, pes::PesHeader, }; pub struct TransportStream { video_continuity_counter: ContinuityCounter, audio_continuity_counter: ContinuityCounter, packets: Vec<TsPacket>, } impl TransportStream { const PAT_PID: u16 = 0; const PMT_PID: u16 = 256; const VIDEO_PID: u16 = 257; const AUDIO_PID: u16 = 258; const VIDEO_STREAM_ID: u8 = 224; const AUDIO_STREAM_ID: u8 = 192; pub fn new() -> TransportStream { TransportStream { video_continuity_counter: ContinuityCounter::new(), audio_continuity_counter: ContinuityCounter::new(), packets: Vec::new(), } } pub fn write_file(&mut self, filename: &str) { use mpeg2ts::ts::{TsPacketWriter, WriteTsPacket}; let filename = format!("./video/{}", filename); let file = File::create(filename).unwrap(); let packets: Vec<_> = self.packets.drain(..).collect(); let mut writer = TsPacketWriter::new(file); writer.write_ts_packet(&TransportStream::default_pat()).unwrap(); writer.write_ts_packet(&TransportStream::default_pmt()).unwrap(); for packet in &packets { writer.write_ts_packet(packet).unwrap(); } } pub fn push_video(&mut self, timestamp: u64, composition_time: u64, is_keyframe: bool, mut video: Vec<u8>) -> Result<(), ()> { use mpeg2ts::{ ts::{AdaptationField, payload}, es::StreamId, }; let mut header = TransportStream::default_header(TransportStream::VIDEO_PID); header.continuity_counter = self.video_continuity_counter; let packet = { let data = { let bytes: Vec<u8> = if video.len() < 153 { video.drain(..).collect() } else { video.drain(..153).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let pcr = mpeg2ts::time::ClockReference::new(timestamp * 90).unwrap(); let adaptation_field = if is_keyframe { Some(AdaptationField { discontinuity_indicator: false, random_access_indicator: true, es_priority_indicator: false, pcr: Some(pcr), opcr: None, splice_countdown: None, transport_private_data: Vec::new(), extension: None, }) } else { None }; let pts = mpeg2ts::time::Timestamp::new((timestamp + composition_time) * 90).unwrap(); let dts = mpeg2ts::time::Timestamp::new(timestamp * 90).unwrap(); TsPacket { header: header.clone(), adaptation_field, payload: Some(TsPayload::Pes(payload::Pes { header: PesHeader { stream_id: StreamId::new(TransportStream::VIDEO_STREAM_ID), priority: false, data_alignment_indicator: false, copyright: false, original_or_copy: false, pts: Some(pts), dts: Some(dts), escr: None, }, pes_packet_len: 0, data, })), } }; self.packets.push(packet); header.continuity_counter.increment(); while video.len() > 0 { let raw = { let bytes: Vec<u8> = if video.len() < payload::Bytes::MAX_SIZE { video.drain(..).collect() } else { video.drain(..payload::Bytes::MAX_SIZE).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Raw(raw)), }; self.packets.push(packet); header.continuity_counter.increment(); } self.video_continuity_counter = header.continuity_counter; Ok(()) } pub fn push_audio(&mut self, timestamp: u64, mut audio: Vec<u8>) { use mpeg2ts::{ts::payload, es::StreamId}; let data = { let bytes: Vec<u8> = if audio.len() < 153 { audio.drain(..).collect() } else { audio.drain(..153).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let mut header = TransportStream::default_header(TransportStream::AUDIO_PID); header.continuity_counter = self.audio_continuity_counter; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Pes(payload::Pes { header: PesHeader { stream_id: StreamId::new(TransportStream::AUDIO_STREAM_ID), priority: false, data_alignment_indicator: false, copyright: false, original_or_copy: false, pts: Some(mpeg2ts::time::Timestamp::new(timestamp * 90).unwrap()), dts: None, escr: None, }, pes_packet_len: 0, data, })), }; self.packets.push(packet); header.continuity_counter.increment(); while audio.len() > 0 { let raw = { let bytes: Vec<u8> = if audio.len() < payload::Bytes::MAX_SIZE { audio.drain(..).collect() } else { audio.drain(..payload::Bytes::MAX_SIZE).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Raw(raw)), }; self.packets.push(packet); header.continuity_counter.increment(); } self.audio_continuity_counter = header.continuity_counter; } pub fn default_header(pid: u16) -> TsHeader { use mpeg2ts::ts::TransportScramblingControl; TsHeader { transport_error_indicator: false, transport_priority: false, pid: Pid::new(pid).unwrap(), transport_scrambling_control: TransportScramblingControl::NotScrambled, continuity_counter: ContinuityCounter::new(), } } pub fn default_pat() -> TsPacket { use mpeg2ts::ts::{VersionNumber, payload::Pat, ProgramAssociation}; TsPacket { header: TransportStream::default_header(TransportStream::PAT_PID), adaptation_field: None, payload: Some(TsPayload::Pat(Pat { transport_stream_id: 1, version_number: VersionNumber::default(), table: vec![ProgramAssociation { program_num: 1, program_map_pid: Pid::new(TransportStream::PMT_PID).unwrap(), }], })), } } pub fn default_pmt() -> TsPacket { use mpeg2ts::{ ts::{VersionNumber, payload::Pmt, EsInfo}, es::StreamType, }; TsPacket { header: TransportStream::default_header(TransportStream::PMT_PID), adaptation_field: None, payload: Some(TsPayload::Pmt(Pmt { program_num: 1, pcr_pid: Some(Pid::new(TransportStream::VIDEO_PID).unwrap()), version_number: VersionNumber::default(), table: vec![ EsInfo { stream_type: StreamType::H264, elementary_pid: Pid::new(TransportStream::VIDEO_PID).unwrap(), descriptors: vec![], }, EsInfo { stream_type: StreamType::AdtsAac, elementary_pid: Pid::new(TransportStream::AUDIO_PID).unwrap(), descriptors: vec![], }, ], })), } } }
use std::fs::File; use mpeg2ts; use mpeg2ts::{ ts::{TsPacket, TsHeader, TsPayload, Pid, ContinuityCounter}, pes::PesHeader, }; pub struct TransportStream { video_continuity_counter: ContinuityCounter, audio_continuity_counter: ContinuityCounter, packets: Vec<TsPacket>, } impl TransportStream { const PAT_PID: u16 = 0; const PMT_PID: u16 = 256; const VIDEO_PID: u16 = 257; const AUDIO_PID: u16 = 258; const VIDEO_STREAM_ID: u8 = 224; const AUDIO_STREAM_ID: u8 = 192; pub fn new() -> TransportStream { TransportStream { video_continuity_counter: ContinuityCounter::new(), audio_continuity_counter: ContinuityCounter::new(), packets: Vec::new(), } } pub fn write_file(&mut self, filename: &str) { use mpeg2ts::ts::{TsPacketWriter, WriteTsPacket}; let filename = format!("./video/{}", filename); let file = File:
e::H264, elementary_pid: Pid::new(TransportStream::VIDEO_PID).unwrap(), descriptors: vec![], }, EsInfo { stream_type: StreamType::AdtsAac, elementary_pid: Pid::new(TransportStream::AUDIO_PID).unwrap(), descriptors: vec![], }, ], })), } } }
:create(filename).unwrap(); let packets: Vec<_> = self.packets.drain(..).collect(); let mut writer = TsPacketWriter::new(file); writer.write_ts_packet(&TransportStream::default_pat()).unwrap(); writer.write_ts_packet(&TransportStream::default_pmt()).unwrap(); for packet in &packets { writer.write_ts_packet(packet).unwrap(); } } pub fn push_video(&mut self, timestamp: u64, composition_time: u64, is_keyframe: bool, mut video: Vec<u8>) -> Result<(), ()> { use mpeg2ts::{ ts::{AdaptationField, payload}, es::StreamId, }; let mut header = TransportStream::default_header(TransportStream::VIDEO_PID); header.continuity_counter = self.video_continuity_counter; let packet = { let data = { let bytes: Vec<u8> = if video.len() < 153 { video.drain(..).collect() } else { video.drain(..153).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let pcr = mpeg2ts::time::ClockReference::new(timestamp * 90).unwrap(); let adaptation_field = if is_keyframe { Some(AdaptationField { discontinuity_indicator: false, random_access_indicator: true, es_priority_indicator: false, pcr: Some(pcr), opcr: None, splice_countdown: None, transport_private_data: Vec::new(), extension: None, }) } else { None }; let pts = mpeg2ts::time::Timestamp::new((timestamp + composition_time) * 90).unwrap(); let dts = mpeg2ts::time::Timestamp::new(timestamp * 90).unwrap(); TsPacket { header: header.clone(), adaptation_field, payload: Some(TsPayload::Pes(payload::Pes { header: PesHeader { stream_id: StreamId::new(TransportStream::VIDEO_STREAM_ID), priority: false, data_alignment_indicator: false, copyright: false, original_or_copy: false, pts: Some(pts), dts: Some(dts), escr: None, }, pes_packet_len: 0, data, })), } }; self.packets.push(packet); header.continuity_counter.increment(); while video.len() > 0 { let raw = { let bytes: Vec<u8> = if video.len() < payload::Bytes::MAX_SIZE { video.drain(..).collect() } else { video.drain(..payload::Bytes::MAX_SIZE).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Raw(raw)), }; self.packets.push(packet); header.continuity_counter.increment(); } self.video_continuity_counter = header.continuity_counter; Ok(()) } pub fn push_audio(&mut self, timestamp: u64, mut audio: Vec<u8>) { use mpeg2ts::{ts::payload, es::StreamId}; let data = { let bytes: Vec<u8> = if audio.len() < 153 { audio.drain(..).collect() } else { audio.drain(..153).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let mut header = TransportStream::default_header(TransportStream::AUDIO_PID); header.continuity_counter = self.audio_continuity_counter; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Pes(payload::Pes { header: PesHeader { stream_id: StreamId::new(TransportStream::AUDIO_STREAM_ID), priority: false, data_alignment_indicator: false, copyright: false, original_or_copy: false, pts: Some(mpeg2ts::time::Timestamp::new(timestamp * 90).unwrap()), dts: None, escr: None, }, pes_packet_len: 0, data, })), }; self.packets.push(packet); header.continuity_counter.increment(); while audio.len() > 0 { let raw = { let bytes: Vec<u8> = if audio.len() < payload::Bytes::MAX_SIZE { audio.drain(..).collect() } else { audio.drain(..payload::Bytes::MAX_SIZE).collect() }; mpeg2ts::ts::payload::Bytes::new(&bytes[..]).unwrap() }; let packet = TsPacket { header: header.clone(), adaptation_field: None, payload: Some(TsPayload::Raw(raw)), }; self.packets.push(packet); header.continuity_counter.increment(); } self.audio_continuity_counter = header.continuity_counter; } pub fn default_header(pid: u16) -> TsHeader { use mpeg2ts::ts::TransportScramblingControl; TsHeader { transport_error_indicator: false, transport_priority: false, pid: Pid::new(pid).unwrap(), transport_scrambling_control: TransportScramblingControl::NotScrambled, continuity_counter: ContinuityCounter::new(), } } pub fn default_pat() -> TsPacket { use mpeg2ts::ts::{VersionNumber, payload::Pat, ProgramAssociation}; TsPacket { header: TransportStream::default_header(TransportStream::PAT_PID), adaptation_field: None, payload: Some(TsPayload::Pat(Pat { transport_stream_id: 1, version_number: VersionNumber::default(), table: vec![ProgramAssociation { program_num: 1, program_map_pid: Pid::new(TransportStream::PMT_PID).unwrap(), }], })), } } pub fn default_pmt() -> TsPacket { use mpeg2ts::{ ts::{VersionNumber, payload::Pmt, EsInfo}, es::StreamType, }; TsPacket { header: TransportStream::default_header(TransportStream::PMT_PID), adaptation_field: None, payload: Some(TsPayload::Pmt(Pmt { program_num: 1, pcr_pid: Some(Pid::new(TransportStream::VIDEO_PID).unwrap()), version_number: VersionNumber::default(), table: vec![ EsInfo { stream_type: StreamTyp
random
[ { "content": "fn get_data_type(data_type: DataType) -> u8 {\n\n match data_type {\n\n DataType::Video => 0x09,\n\n DataType::Audio => 0x08,\n\n }\n\n}\n\n\n\n// --------------------\n\n// Flv File:\n\n// --------------------\n\n// Flv Header\n\n// Previous Tag Size 0\n\n// Tag 1\n\n// Previous Tag Size 1\n\n// ...\n\n// Tag N\n\n// Previous Tag Size N\n\n\n\n// ------------------------\n\n// Flv Tag\n\n// ------------------| --- |\n\n// Tag Type | u8 | 0x08=audio 0x09=video\n\n// Data Size | u24 | Data欄位的長度\n\n// Timestamp | u24 | 時間戳記(毫秒) 第一個tag的相對值\n\n// TimestampExtended | u8 | 時間戳記延伸 使時間戳記欄位變為U32 為第一個Byte\n\n// StreamID | u24 | 始終為 0\n\n// Data | [] | 資料內容\n", "file_path": "src/stream/server/flv.rs", "rank": 0, "score": 48900.642770635466 }, { "content": "fn file_not_found() -> Response<Body> {\n\n Response::builder().status(StatusCode::NOT_FOUND).body(\"404 NOT FOUND\".into()).unwrap()\n\n}\n\n\n\nasync fn file_response(path: &str) -> Result<Response<Body>, hyper::Error> {\n\n if let Ok(file) = File::open(path).await {\n\n let stream = FramedRead::new(file, BytesCodec::new());\n\n let body = Body::wrap_stream(stream);\n\n return Ok(Response::builder().header(\"Access-Control-Allow-Origin\", \"*\").body(body).unwrap());\n\n }\n\n Ok(file_not_found())\n\n}\n", "file_path": "src/media.rs", "rank": 1, "score": 48318.07585064928 }, { "content": "struct ClientMessage {\n\n pub id: usize,\n\n pub message: OwnedMessage,\n\n}\n\n\n\npub struct ChatServer {}\n\n\n\nimpl ChatServer {\n\n pub fn start(playlist: Arc<Mutex<PlayList>>) {\n\n let address = \"0.0.0.0:4343\";\n\n let server = Server::bind(address).unwrap();\n\n let (tx, rx) = mpsc::channel();\n\n let connections_map = Arc::new(Mutex::new(Slab::new()));\n\n let connections = Arc::new(Mutex::new(HashSet::new()));\n\n handle_message(connections_map.clone(), connections.clone(), rx);\n\n handle_status(playlist.clone(), connections_map.clone(), connections.clone());\n\n\n\n thread::spawn(move || {\n\n for request in server.filter_map(Result::ok) {\n\n if !request.protocols().contains(&String::from(\"yo-websocket\")) {\n", "file_path": "src/chat.rs", "rank": 2, "score": 35221.057577988395 }, { "content": "fn handle_message(connections_map: Arc<Mutex<Slab<Sender>>>, connections: Arc<Mutex<HashSet<usize>>>, rx: mpsc::Receiver<ClientMessage>) {\n\n thread::spawn(move || loop {\n\n match rx.recv() {\n\n Err(mpsc::RecvError) => {\n\n println!(\"chat channel closed!\");\n\n return;\n\n }\n\n Ok(client_message) => {\n\n let mut map = connections_map.lock().unwrap();\n\n let mut ids = connections.lock().unwrap();\n\n match client_message.message {\n\n OwnedMessage::Close(_) => {\n\n let message = OwnedMessage::Close(None);\n\n let sender = map.get_mut(client_message.id).unwrap();\n\n sender.send_message(&message).unwrap();\n\n map.remove(client_message.id);\n\n ids.remove(&client_message.id);\n\n println!(\"chat client disconnected!\");\n\n }\n\n OwnedMessage::Ping(ping) => {\n", "file_path": "src/chat.rs", "rank": 3, "score": 15921.352087550576 }, { "content": "fn handle_status(playlist: Arc<Mutex<PlayList>>, connections_map: Arc<Mutex<Slab<Sender>>>, connections: Arc<Mutex<HashSet<usize>>>) {\n\n thread::spawn(move || {\n\n let rx = {\n\n let playlist = playlist.lock().unwrap();\n\n playlist.rx.clone()\n\n };\n\n loop {\n\n match rx.lock().unwrap().recv() {\n\n Ok(server_message) => match server_message {\n\n ServerMessage::Live => {\n\n let mut map = connections_map.lock().unwrap();\n\n let ids = connections.lock().unwrap();\n\n for &id in &*ids {\n\n let sender = map.get_mut(id).unwrap();\n\n let message = OwnedMessage::Text(String::from(\"server@;live\"));\n\n sender.send_message(&message).unwrap();\n\n }\n\n println!(\"live!\");\n\n }\n\n ServerMessage::Off => println!(\"off!\"),\n\n },\n\n Err(mpsc::RecvError) => {\n\n println!(\"chat status channel closed!\");\n\n return;\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/chat.rs", "rank": 4, "score": 15921.352087550576 }, { "content": "use bytes::{Bytes, Buf};\n\n\n\n// Sound Format | u4 10 = AAC\n\n// Sound Rate | u2 AAC: always 3\n\n// Sound Size | u1\n\n// Sound Type | u1 AAC: always 1\n\n// AAC Packet Type | u8 0 = sequence header\n\n// Data | [u8]\n\npub struct FlvAudio {\n\n pub is_sequence_header: bool,\n\n pub data: Bytes,\n\n}\n\n\n\nimpl FlvAudio {\n\n pub fn read(mut data: Bytes) -> FlvAudio {\n\n let header = data.get_u16();\n\n let is_sequence_header = (header & 0xff) == 0;\n\n\n\n FlvAudio { is_sequence_header, data }\n\n }\n\n}\n", "file_path": "src/stream/server/flv/audio.rs", "rank": 12, "score": 9.82855494566596 }, { "content": "use bytes::{Bytes, Buf};\n\n\n\n// FLV Data - Normal\n\n// Field | Type\n\n// -------------------- | ---\n\n// Frame Type | u4\n\n// Codec ID | u4\n\n// AVC Packet Type | u8\n\n// Composition Time | i24\n\n// Body | [u8]\n\npub struct FlvVideo {\n\n pub is_keyframe: bool,\n\n pub is_sequence_header: bool,\n\n pub composition_time: u64,\n\n pub data: Bytes,\n\n}\n\n\n\nimpl FlvVideo {\n\n pub fn read(mut data: Bytes) -> FlvVideo {\n\n let byte0 = data.get_u8();\n", "file_path": "src/stream/server/flv/video.rs", "rank": 13, "score": 9.393677582457634 }, { "content": " pub profile_compatability: u8,\n\n pub level_indication: u8,\n\n pub nalu_size: u8,\n\n pub sps: Vec<Nalu>,\n\n pub pps: Vec<Nalu>,\n\n}\n\n\n\nimpl NaluConfig {\n\n pub fn new() -> NaluConfig {\n\n NaluConfig {\n\n version: 0,\n\n profile_indication: 0,\n\n profile_compatability: 0,\n\n level_indication: 0,\n\n nalu_size: 0,\n\n sps: Vec::new(),\n\n pps: Vec::new(),\n\n }\n\n }\n\n\n", "file_path": "src/stream/server/nalu.rs", "rank": 14, "score": 9.34242288527408 }, { "content": "use bytes::{Bytes, Buf};\n\n\n\n// Flv Data - Audio Sequence_Header\n\n// ------------------------| ----\n\n// Object Type | u5\n\n// Frequency Index | u4\n\n// Channel Configuration | u4\n\n// AOT Specific Config\n\n// Frame Length Flag | u1\n\n// Depends On Core Coder | u1\n\n// Extension Flag | u1\n\npub struct AdtsConfig {\n\n pub object_type: u8,\n\n pub sampling_frequency_index: u8,\n\n pub channel_configuration: u8,\n\n}\n\n\n\nimpl AdtsConfig {\n\n pub fn new() -> AdtsConfig {\n\n AdtsConfig {\n", "file_path": "src/stream/server/adts.rs", "rank": 15, "score": 9.111147489362482 }, { "content": "use bytes::{Bytes, Buf};\n\n\n\n// Flv Data - Video Sequence_Header\n\n// ------------------------| ----\n\n// Version | u8\n\n// Profile Indication | u8\n\n// Profile Compatability | u8\n\n// Level Indication | u8\n\n// Reserved | u6\n\n// NALU Length | u2\n\n// Reserved | u3\n\n// SPS Count | u5\n\n// SPS Length | u16\n\n// SPS | u[]\n\n// PPS Count | u8\n\n// PPS Length | u16\n\n// PPS | u[]\n\npub struct NaluConfig {\n\n pub version: u8,\n\n pub profile_indication: u8,\n", "file_path": "src/stream/server/nalu.rs", "rank": 16, "score": 8.96689520098597 }, { "content": "use std::sync::{Arc, Mutex};\n\nuse tokio::fs::File;\n\nuse tokio_util::codec::{BytesCodec, FramedRead};\n\nuse hyper::service::{make_service_fn, service_fn};\n\nuse hyper::{Body, Method, Request, Response, Server, StatusCode};\n\nuse super::playlist::PlayList;\n\n\n\npub struct MediaServer {}\n\nimpl MediaServer {\n\n pub async fn start(playlist: Arc<Mutex<PlayList>>) {\n\n let address = \"0.0.0.0:1337\".parse().unwrap();\n\n let make_service = make_service_fn(move |_| {\n\n let playlist = playlist.clone();\n\n async { Ok::<_, hyper::Error>(service_fn(move |request| handle_request(request, playlist.clone()))) }\n\n });\n\n let server = Server::bind(&address).serve(make_service);\n\n println!(\"media server on http://{}\", address);\n\n if let Err(e) = server.await {\n\n println!(\"media server error: {}\", e);\n\n }\n", "file_path": "src/media.rs", "rank": 17, "score": 8.937792925097561 }, { "content": "mod connection;\n\nmod server;\n\n\n\nuse std::sync::{Arc, Mutex};\n\nuse std::net::TcpListener;\n\nuse std::thread;\n\nuse connection::Connection;\n\nuse super::playlist::PlayList;\n\n\n\npub struct StreamServer {}\n\n\n\nimpl StreamServer {\n\n pub fn start(playlist: Arc<Mutex<PlayList>>) {\n\n let address = \"0.0.0.0:1935\";\n\n let listener = TcpListener::bind(address).unwrap();\n\n println!(\"stream server on rtmp://{}\", address);\n\n\n\n thread::spawn(move || {\n\n for stream in listener.incoming() {\n\n Connection::new(stream.unwrap(), playlist.clone());\n\n println!(\"new stream connection!\");\n\n }\n\n });\n\n }\n\n}\n", "file_path": "src/stream.rs", "rank": 18, "score": 7.700591282439599 }, { "content": "mod audio;\n\nmod video;\n\n\n\nuse std::io::prelude::*;\n\nuse std::io::BufWriter;\n\nuse std::fs::OpenOptions;\n\nuse bytes::Bytes;\n\nuse video::FlvVideo;\n\nuse audio::FlvAudio;\n\n\n\npub enum DataType {\n\n Video,\n\n Audio,\n\n}\n\n\n\n// https://www.adobe.com/content/dam/acom/en/devnet/flv/video_file_format_spec_v10.pdf\n\npub struct Flv {\n\n bytes: Vec<u8>,\n\n file_path: String,\n\n}\n", "file_path": "src/stream/server/flv.rs", "rank": 19, "score": 7.585309841795915 }, { "content": "use std::sync::{mpsc, Arc, Mutex};\n\nuse super::chat::ServerMessage;\n\n\n\npub struct PlayList {\n\n pub sequence: usize,\n\n pub m3u8: String,\n\n pub ts: Vec<(u32, String)>,\n\n pub timestamp: Vec<u32>,\n\n pub live: bool,\n\n pub tx: mpsc::Sender<ServerMessage>,\n\n pub rx: Arc<Mutex<mpsc::Receiver<ServerMessage>>>,\n\n}\n\n\n\nimpl PlayList {\n\n const COUNT: usize = 2;\n\n\n\n pub fn new() -> PlayList {\n\n let (tx, rx) = mpsc::channel();\n\n\n\n PlayList {\n", "file_path": "src/playlist.rs", "rank": 20, "score": 7.536028493393052 }, { "content": "\n\nimpl Flv {\n\n const HEADER: &'static [u8] = b\"FLV\\x01\\x05\\x00\\x00\\x00\\x09\";\n\n\n\n pub fn read_video(data: Bytes) -> FlvVideo {\n\n FlvVideo::read(data)\n\n }\n\n\n\n pub fn read_audio(data: Bytes) -> FlvAudio {\n\n FlvAudio::read(data)\n\n }\n\n\n\n pub fn new() -> Flv {\n\n Flv { bytes: vec![], file_path: String::from(\"\") }\n\n }\n\n\n\n pub fn init_file(&mut self, file_path: String) {\n\n self.file_path = file_path;\n\n self.bytes.extend(Flv::HEADER);\n\n self.bytes.extend(b\"\\x00\\x00\\x00\\x00\"); // pre_tag_size\n", "file_path": "src/stream/server/flv.rs", "rank": 21, "score": 7.454920514569383 }, { "content": " pub fn set(&mut self, mut data: Bytes) {\n\n self.version = data.get_u8();\n\n self.profile_indication = data.get_u8();\n\n self.profile_compatability = data.get_u8();\n\n self.level_indication = data.get_u8();\n\n self.nalu_size = (data.get_u8() & 0b11) + 1;\n\n\n\n let sps_count = data.get_u8() & 0b11111;\n\n let mut sps = Vec::new();\n\n for _ in 0..sps_count {\n\n let sps_length = data.get_u16() as usize;\n\n let sps_temp = data.slice(..sps_length);\n\n data.advance(sps_length);\n\n sps.push(Nalu::read_unit(sps_temp));\n\n }\n\n\n\n let pps_count = data.get_u8();\n\n let mut pps = Vec::new();\n\n for _ in 0..pps_count {\n\n let pps_length = data.get_u16() as usize;\n", "file_path": "src/stream/server/nalu.rs", "rank": 22, "score": 7.285098795399532 }, { "content": "use rml_rtmp::handshake::{Handshake, HandshakeProcessResult, PeerType};\n\nuse std::sync::{Arc, Mutex};\n\nuse std::io::{Read, Write};\n\nuse std::net::TcpStream;\n\nuse std::thread;\n\nuse super::server::{Server, ServerResult};\n\nuse super::PlayList;\n\n\n\npub struct Connection {\n\n socket: TcpStream,\n\n handshake: Handshake,\n\n handshake_completed: bool,\n\n server: Server,\n\n}\n\n\n\nimpl Connection {\n\n const BUFFER_SIZE: usize = 4096;\n\n\n\n pub fn new(socket: TcpStream, playlist: Arc<Mutex<PlayList>>) {\n\n // let mut socket = socket.try_clone().unwrap();\n", "file_path": "src/stream/connection.rs", "rank": 23, "score": 7.170384395542109 }, { "content": " pub data: Bytes, // RBSP\n\n}\n\n\n\nimpl Nalu {\n\n const INTER_DELIMITER: &'static [u8] = &[0x00, 0x00, 0x01];\n\n const BEGIN_DELIMITER: &'static [u8] = &[0x00, 0x00, 0x00, 0x01];\n\n const NALU_DELIMITER: &'static [u8] = &[0x00, 0x00, 0x00, 0x01, 0x09, 0x00];\n\n\n\n fn to_vec(&self) -> Vec<u8> {\n\n let mut v = Vec::with_capacity(self.data.len() + 1);\n\n\n\n let header = (self.ref_idc << 5) | (self.unit_type);\n\n v.push(header);\n\n v.extend(self.data.clone());\n\n v\n\n }\n\n\n\n pub fn read(mut data: Bytes, nalu_size: u8) -> Vec<Nalu> {\n\n let nalu_size = nalu_size as usize;\n\n let mut nal_units = Vec::new();\n", "file_path": "src/stream/server/nalu.rs", "rank": 26, "score": 6.8093610857163895 }, { "content": "mod adts;\n\nmod flv;\n\nmod nalu;\n\nmod ts;\n\n\n\nuse rml_rtmp::chunk_io::Packet;\n\nuse rml_rtmp::sessions::{ServerSession, ServerSessionConfig, ServerSessionEvent, ServerSessionResult};\n\nuse rml_rtmp::time::RtmpTimestamp;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::{fs, thread};\n\nuse bytes::Bytes;\n\nuse ts::TransportStream;\n\nuse flv::{Flv, DataType};\n\nuse nalu::{Nalu, NaluConfig};\n\nuse adts::{Adts, AdtsConfig};\n\nuse super::PlayList;\n\n\n\npub enum ServerResult {\n\n Disconnect,\n\n Response { packet: Packet },\n", "file_path": "src/stream/server.rs", "rank": 27, "score": 6.516898443017591 }, { "content": " object_type: 0,\n\n sampling_frequency_index: 0,\n\n channel_configuration: 0,\n\n }\n\n }\n\n\n\n pub fn set(&mut self, mut data: Bytes) {\n\n let byte0 = data.get_u8();\n\n let byte1 = data.get_u8();\n\n\n\n self.object_type = (byte0 & 0xF8) >> 3;\n\n self.sampling_frequency_index = ((byte0 & 0x07) << 1) | (byte1 >> 7);\n\n self.channel_configuration = (byte1 >> 3) & 0x0F;\n\n }\n\n}\n\n\n\npub struct Adts {}\n\n\n\nimpl Adts {\n\n const SYNCWORD: &'static [u8] = &[0xff, 0xf1];\n", "file_path": "src/stream/server/adts.rs", "rank": 28, "score": 6.169688192669039 }, { "content": " let tag_size_byte1 = ((pre_tag_size >> 16) & 0xff) as u8;\n\n let tag_size_byte2 = ((pre_tag_size >> 8) & 0xff) as u8;\n\n let tag_size_byte3 = (pre_tag_size & 0xff) as u8;\n\n\n\n let pre_tag_size = vec![tag_size_byte0, tag_size_byte1, tag_size_byte2, tag_size_byte3];\n\n\n\n self.bytes.extend(&tag[..]);\n\n self.bytes.extend(&data[..]);\n\n self.bytes.extend(&pre_tag_size[..]);\n\n }\n\n\n\n pub fn write_file(&mut self) {\n\n let file = OpenOptions::new().create(true).write(true).append(true).open(&self.file_path).unwrap();\n\n let mut buf = BufWriter::new(file);\n\n\n\n buf.write_all(&self.bytes[..]).unwrap();\n\n buf.flush().unwrap();\n\n self.bytes.clear();\n\n }\n\n}\n\n\n", "file_path": "src/stream/server/flv.rs", "rank": 29, "score": 5.89895839777695 }, { "content": "}\n\n\n\npub struct Server {\n\n flv: Flv,\n\n ts: TransportStream,\n\n video_config: NaluConfig,\n\n audio_config: AdtsConfig,\n\n has_keyframe: bool,\n\n session: Option<ServerSession>,\n\n playlist: Arc<Mutex<PlayList>>,\n\n next_write: u32,\n\n}\n\n\n\nimpl Server {\n\n const WRITE_DURATION: u32 = 2000;\n\n\n\n pub fn new(playlist: Arc<Mutex<PlayList>>) -> Server {\n\n Server {\n\n flv: Flv::new(),\n\n ts: TransportStream::new(),\n", "file_path": "src/stream/server.rs", "rank": 30, "score": 5.885046082412643 }, { "content": "\n\n for result in server_results.into_iter() {\n\n match result {\n\n ServerResult::Response { packet } => self.write(packet.bytes),\n\n ServerResult::Disconnect => {\n\n self.server.end_stream();\n\n return;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn write(&mut self, bytes: Vec<u8>) {\n\n match self.socket.write(&bytes) {\n\n Ok(_) => (),\n\n Err(error) => {\n\n println!(\"Error writing to socket: {:?}\", error);\n\n }\n\n }\n", "file_path": "src/stream/connection.rs", "rank": 32, "score": 5.662148732850403 }, { "content": "// -----------\n\n// 0\t 未使用\n\n// 1\t 非關鍵幀\n\n// 2\t 片分區A\n\n// 3\t 片分區B\n\n// 4\t 片分區C\n\n// 5\t 關鍵幀\n\n// 6\t 補充增強訊息單元(SEI)\n\n// 7\t SPS序列參數集\n\n// 8\t PPS圖像參數集\n\n// 9\t 分解符\n\n// 10\t 序列结束\n\n// 11\t 碼流结束\n\n// 12\t 填充\n\n// 13~23 保留\n\n// 24~31 未使用\n\n\n\npub struct Nalu {\n\n pub ref_idc: u8,\n\n pub unit_type: u8,\n", "file_path": "src/stream/server/nalu.rs", "rank": 34, "score": 4.9603302655510095 }, { "content": "use slab::Slab;\n\nuse std::collections::HashSet;\n\nuse std::sync::{mpsc, Arc, Mutex};\n\nuse std::thread;\n\nuse websocket::sync::Server;\n\nuse websocket::OwnedMessage;\n\nuse super::playlist::PlayList;\n\n\n\npub enum ServerMessage {\n\n Off,\n\n Live,\n\n}\n\n\n", "file_path": "src/chat.rs", "rank": 35, "score": 4.855155978812847 }, { "content": " video_config: NaluConfig::new(),\n\n audio_config: AdtsConfig::new(),\n\n has_keyframe: false,\n\n session: None,\n\n playlist,\n\n next_write: Server::WRITE_DURATION,\n\n }\n\n }\n\n\n\n pub fn handle_handshake_bytes(&mut self, bytes: &[u8]) -> Result<Vec<ServerResult>, String> {\n\n let mut server_results = Vec::new();\n\n let config = ServerSessionConfig::new();\n\n let (session, initial_results) = match ServerSession::new(config) {\n\n Ok(results) => results,\n\n Err(error) => return Err(error.to_string()),\n\n };\n\n\n\n self.session = Some(session);\n\n self.handle_session_results(initial_results, &mut server_results);\n\n match self.handle_bytes(bytes) {\n", "file_path": "src/stream/server.rs", "rank": 36, "score": 4.846253844717317 }, { "content": " }\n\n\n\n pub fn push(&mut self, data_type: DataType, timestamp: u32, is_keyframe: bool, data: Bytes) {\n\n if is_keyframe {\n\n self.write_file();\n\n }\n\n\n\n let data_type = get_data_type(data_type);\n\n let data_len = data.len();\n\n let len_byte0 = (data_len >> 16) as u8;\n\n let len_byte1 = ((data_len >> 8) & 0xff) as u8;\n\n let len_byte2 = (data_len & 0xff) as u8;\n\n\n\n let time_byte0 = (timestamp >> 16) as u8;\n\n let time_byte1 = ((timestamp >> 8) & 0xff) as u8;\n\n let time_byte2 = (timestamp & 0xff) as u8;\n\n\n\n let tag = vec![data_type, len_byte0, len_byte1, len_byte2, time_byte0, time_byte1, time_byte2, 0, 0, 0, 0];\n\n let pre_tag_size = tag.len() + data.len();\n\n let tag_size_byte0 = (pre_tag_size >> 24) as u8;\n", "file_path": "src/stream/server/flv.rs", "rank": 37, "score": 4.809763533760009 }, { "content": "mod chat;\n\nmod media;\n\nmod playlist;\n\nmod stream;\n\n\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let playlist = Arc::new(Mutex::new(playlist::PlayList::new()));\n\n stream::StreamServer::start(playlist.clone());\n\n chat::ChatServer::start(playlist.clone());\n\n media::MediaServer::start(playlist.clone()).await;\n\n}\n", "file_path": "src/main.rs", "rank": 38, "score": 4.068200448503841 }, { "content": "\n\n let profile = 0x40;\n\n let sampling_frequency_index = adts_config.sampling_frequency_index << 2;\n\n let channel_configuration0 = (adts_config.channel_configuration & 0x07) >> 2;\n\n es.push(profile | sampling_frequency_index | channel_configuration0);\n\n\n\n let channel_configuration1 = (adts_config.channel_configuration & 0x03) << 6;\n\n let frame_length = (7 + data.len()) as u16;\n\n let frame_length0 = ((frame_length & 0x1FFF) >> 11) as u8;\n\n es.push(channel_configuration1 | frame_length0);\n\n\n\n let frame_length1 = ((frame_length & 0x7FF) << 5) as u16;\n\n let frame_length2 = frame_length1 | 0b0000_0000_0001_1111;\n\n es.extend(&[(frame_length2 >> 8) as u8, (frame_length2 & 0xff) as u8]);\n\n\n\n es.push(0b1111_1100);\n\n es.extend(data);\n\n\n\n es\n\n }\n\n}\n", "file_path": "src/stream/server/adts.rs", "rank": 39, "score": 4.028095432544385 }, { "content": " // 轉成es時有nalu header, 固定爲0x00000001(幀開始)或0x000001(幀中)\n\n // 轉成es時, pes和es之間需加入type=9的nalu, 關鍵幀前必須加入type=7和type=8的nalu, 這些nalu彼此相鄰。\n\n // Pes Header | nalu(0x09) | 隨便(u8) | nalu(其他) | 內容 | nalu(0x67) | sps | nalu(0x68) | pps | nalu(0x65) | keyframe |\n\n // Pes Header | nalu(0x09) | 隨便(u8) | nalu(其他) | 內容 | nalu(0x41) | 內容 |\n\n pub fn to_es_layer(nalu_config: &NaluConfig, data: Vec<Nalu>) -> Vec<u8> {\n\n let mut es = Vec::new();\n\n let mut is_delimit = false;\n\n let mut is_keyframe_delimit = false;\n\n\n\n for nalu in data {\n\n match nalu.unit_type {\n\n 1 | 6 => {\n\n if !is_delimit {\n\n es.extend(Nalu::NALU_DELIMITER);\n\n is_delimit = true;\n\n }\n\n }\n\n 5 => {\n\n if !is_delimit {\n\n es.extend(Nalu::NALU_DELIMITER);\n", "file_path": "src/stream/server/nalu.rs", "rank": 40, "score": 3.8378022256406954 }, { "content": " sequence: 0,\n\n m3u8: String::from(\"\"),\n\n ts: vec![],\n\n timestamp: vec![0],\n\n live: false,\n\n tx,\n\n rx: Arc::new(Mutex::new(rx)),\n\n }\n\n }\n\n\n\n pub fn push(&mut self, timestamp: u32, filename: String, end: bool) -> u64 {\n\n let mut timestamp = timestamp / 1000 + 1;\n\n let mut duration = timestamp;\n\n if let Some(t) = self.timestamp.last() {\n\n if end {\n\n if let Some(d) = self.ts.last() {\n\n timestamp = d.0 + t;\n\n duration = d.0;\n\n }\n\n } else {\n", "file_path": "src/playlist.rs", "rank": 41, "score": 3.784082721360818 }, { "content": " Ok(results) => server_results.extend(results),\n\n Err(error) => {\n\n println!(\"Handshake bytes the following server error: {}\", error);\n\n return Err(error.to_string());\n\n }\n\n }\n\n Ok(server_results)\n\n }\n\n\n\n pub fn handle_bytes(&mut self, bytes: &[u8]) -> Result<Vec<ServerResult>, String> {\n\n let mut server_results = Vec::new();\n\n let session_results = match self.session.as_mut().unwrap().handle_input(bytes) {\n\n Ok(results) => results,\n\n Err(error) => return Err(error.to_string()),\n\n };\n\n\n\n self.handle_session_results(session_results, &mut server_results);\n\n Ok(server_results)\n\n }\n\n\n", "file_path": "src/stream/server.rs", "rank": 43, "score": 3.673598161274659 }, { "content": " is_delimit = true;\n\n }\n\n\n\n if !is_keyframe_delimit {\n\n let nalu = nalu_config.sps.first().unwrap();\n\n let sps: Vec<u8> = nalu.to_vec();\n\n es.extend(Nalu::BEGIN_DELIMITER);\n\n es.extend(sps);\n\n\n\n let nalu = nalu_config.pps.first().unwrap();\n\n let pps: Vec<u8> = nalu.to_vec();\n\n es.extend(Nalu::BEGIN_DELIMITER);\n\n es.extend(pps);\n\n\n\n is_keyframe_delimit = true;\n\n }\n\n }\n\n _ => continue,\n\n }\n\n\n\n es.extend(Self::INTER_DELIMITER);\n\n es.extend(nalu.to_vec());\n\n }\n\n es\n\n }\n\n}\n", "file_path": "src/stream/server/nalu.rs", "rank": 44, "score": 3.536244719829984 }, { "content": " self.audio_config.set(audio.data.clone());\n\n return;\n\n }\n\n\n\n let es = Adts::to_es_layer(&self.audio_config, audio.data.to_vec());\n\n self.ts.push_audio(timestamp.value as u64, es);\n\n }\n\n\n\n pub fn end_stream(&mut self) {\n\n // self.flv.write_file();\n\n self.ts.write_file(\"0.ts\");\n\n\n\n let duration = {\n\n let mut playlist = self.playlist.lock().unwrap();\n\n playlist.push(0, \"0.ts\".to_string(), true) * 1000 + 1000\n\n };\n\n\n\n let playlist = self.playlist.clone();\n\n thread::spawn(move || {\n\n let duration = std::time::Duration::from_millis(duration);\n\n std::thread::sleep(duration);\n\n playlist.lock().unwrap().live = false;\n\n });\n\n }\n\n}\n", "file_path": "src/stream/server.rs", "rank": 45, "score": 3.259378643960442 }, { "content": " let pps_temp = data.slice(..pps_length);\n\n data.advance(pps_length);\n\n pps.push(Nalu::read_unit(pps_temp));\n\n }\n\n\n\n self.sps = sps;\n\n self.pps = pps;\n\n }\n\n}\n\n\n\n// FLV Data Body\n\n// ----------| --\n\n// Nalu Type | u8\n\n// RBSP | []\n\n\n\n// FLV Data Body Nalu Type\n\n// -----| ---|\n\n// F\t| u1 |\tforbidden zero bit, h.264 必須為零\n\n// NRI\t| u2 |\tnal ref idc, 值0~3, I幀/sps/pps為3, P幀為2, B幀為0\n\n// Type\t| u5 |\t参考下表\n", "file_path": "src/stream/server/nalu.rs", "rank": 46, "score": 2.8725914441729494 }, { "content": "\n\n while data.has_remaining() {\n\n let nalu_length = data.get_uint(nalu_size) as usize;\n\n let nalu_data = data.slice(..nalu_length);\n\n let nal_unit = Nalu::read_unit(nalu_data);\n\n data.advance(nalu_length);\n\n nal_units.push(nal_unit);\n\n }\n\n\n\n nal_units\n\n }\n\n\n\n pub fn read_unit(mut data: Bytes) -> Nalu {\n\n let nalu = data.get_u8();\n\n let ref_idc = (nalu >> 5) & 0x03;\n\n let unit_type = nalu & 0x1f;\n\n\n\n Nalu { ref_idc, unit_type, data }\n\n }\n\n\n", "file_path": "src/stream/server/nalu.rs", "rank": 47, "score": 2.5416506842800066 }, { "content": " request.reject().unwrap();\n\n continue;\n\n }\n\n\n\n let client = request.use_protocol(\"yo-websocket\").accept().unwrap();\n\n let (mut receiver, sender) = client.split().unwrap();\n\n let mut map = connections_map.lock().unwrap();\n\n let mut ids = connections.lock().unwrap();\n\n let id = map.insert(sender);\n\n ids.insert(id);\n\n\n\n let tx_copy = mpsc::Sender::clone(&tx);\n\n thread::spawn(move || {\n\n for message in receiver.incoming_messages() {\n\n match message {\n\n Err(_) => return,\n\n Ok(message) => tx_copy.send(ClientMessage { id, message }).unwrap(),\n\n }\n\n }\n\n });\n\n println!(\"new chat connection!\");\n\n }\n\n });\n\n println!(\"chat server on ws://{}\", address);\n\n }\n\n}\n\n\n", "file_path": "src/chat.rs", "rank": 48, "score": 2.46431094764067 }, { "content": "\n\n // Syncword \t u12 固定爲0xfff\n\n // Id \t u1 0為MPEG-4, 1為MPEG-2\n\n // Layer \t u2 固定爲00\n\n // Protection Absent \t u1 固定爲1\n\n // Profile \t u2 值: 0~3, 1為aac\n\n // Sampling Frequency Index \t u4 表示採樣率, 0: 96000 Hz, 1: 88200 Hz, 2: 64000 Hz, 3:48000 Hz, 4: 44100 Hz, 5: 32000 Hz, 6: 24000 Hz, 7: 22050 Hz, 8: 16000 Hz, 9: 12000 Hz, 10: 11025 Hz, 11: 8000 Hz, 12: 7350 Hz\n\n // Private Bit \t u1 固定爲0\n\n // Channel Configuration \t u3 值: 0~7, 1: 1 channel: front-center, 2: 2 channels: front-left, front-right, 3: 3 channels: front-center, front-left, front-right, 4: 4 channels: front-center, front-left, front-right, back-center\n\n // Original Copy \t u1 固定爲0\n\n // Home \t u1 固定爲0\n\n // Copyright Identification Bit \t u1 固定爲0\n\n // Copyright Identification Start \t u1 固定爲0\n\n // Aac Frame Length \t u13 含adts header在內的數據總長度\n\n // Adts Buffer Fullness \t u11 固定爲0x7ff\n\n // Number Of Raw Data Blocks In Frame \tu2 固定爲00\n\n pub fn to_es_layer(adts_config: &AdtsConfig, data: Vec<u8>) -> Vec<u8> {\n\n let mut es = Vec::with_capacity(7 + data.len());\n\n\n\n es.extend(Adts::SYNCWORD);\n", "file_path": "src/stream/server/adts.rs", "rank": 49, "score": 2.1543815516546196 }, { "content": " thread::spawn(|| {\n\n let mut connection = Connection {\n\n socket: socket,\n\n handshake: Handshake::new(PeerType::Server),\n\n handshake_completed: false,\n\n server: Server::new(playlist),\n\n };\n\n connection.start_socket_reader();\n\n });\n\n }\n\n\n\n fn start_socket_reader(&mut self) {\n\n let mut buffer = [0; Connection::BUFFER_SIZE];\n\n loop {\n\n let result = match self.socket.read(&mut buffer) {\n\n Ok(0) => {\n\n self.server.end_stream();\n\n return;\n\n }\n\n Ok(count) => {\n", "file_path": "src/stream/connection.rs", "rank": 50, "score": 2.1343512595250766 }, { "content": " }\n\n self.sequence += 1;\n\n }\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.sequence = 0;\n\n self.m3u8 = String::from(\"\");\n\n self.ts.clear();\n\n self.timestamp = vec![0];\n\n }\n\n}\n", "file_path": "src/playlist.rs", "rank": 51, "score": 2.1295168989697957 }, { "content": " let byte1 = data.get_u8();\n\n\n\n let is_keyframe = (byte0 >> 4) == 1;\n\n let is_sequence_header = byte1 == 0;\n\n let composition_time = data.get_uint(3);\n\n\n\n FlvVideo {\n\n is_keyframe,\n\n is_sequence_header,\n\n composition_time,\n\n data,\n\n }\n\n }\n\n}\n", "file_path": "src/stream/server/flv/video.rs", "rank": 52, "score": 1.9584777106958384 }, { "content": " fn handle_session_results(&mut self, session_results: Vec<ServerSessionResult>, server_results: &mut Vec<ServerResult>) {\n\n for result in session_results {\n\n match result {\n\n ServerSessionResult::OutboundResponse(packet) => server_results.push(ServerResult::Response { packet }),\n\n ServerSessionResult::RaisedEvent(event) => self.handle_event(event, server_results),\n\n r => println!(\"Server result received: {:?}\", r),\n\n }\n\n }\n\n }\n\n\n\n fn handle_event(&mut self, event: ServerSessionEvent, server_results: &mut Vec<ServerResult>) {\n\n match event {\n\n ServerSessionEvent::ConnectionRequested { request_id, app_name } => {\n\n self.handle_connection_requested(request_id, app_name, server_results);\n\n }\n\n ServerSessionEvent::PublishStreamRequested {\n\n request_id,\n\n app_name,\n\n stream_key,\n\n mode: _,\n", "file_path": "src/stream/server.rs", "rank": 53, "score": 1.7053929961319292 }, { "content": " duration = timestamp - t;\n\n }\n\n }\n\n\n\n self.ts.push((duration, filename));\n\n self.timestamp.push(timestamp);\n\n self.update(end);\n\n duration as u64\n\n }\n\n\n\n pub fn update(&mut self, end: bool) {\n\n if self.ts.len() >= PlayList::COUNT {\n\n if self.ts.len() == PlayList::COUNT + 1 && !end {\n\n self.ts.remove(0);\n\n self.timestamp.remove(0);\n\n }\n\n let mut target_duration = 0;\n\n let mut list = String::from(\"\");\n\n for ts in &self.ts {\n\n list = format!(\"{}#EXTINF:{}.0000\\r\\n\", list, ts.0);\n", "file_path": "src/playlist.rs", "rank": 54, "score": 1.4267317119219134 }, { "content": " }\n\n\n\n fn handshake(&mut self, bytes: &[u8]) -> Result<Vec<ServerResult>, String> {\n\n let result = match self.handshake.process_bytes(bytes) {\n\n Ok(result) => result,\n\n Err(error) => {\n\n println!(\"Handshake error: {:?}\", error);\n\n return Err(error.to_string());\n\n }\n\n };\n\n\n\n match result {\n\n HandshakeProcessResult::InProgress { response_bytes } => {\n\n if response_bytes.len() > 0 {\n\n self.write(response_bytes);\n\n }\n\n Ok(vec![])\n\n }\n\n\n\n HandshakeProcessResult::Completed { response_bytes, remaining_bytes } => {\n", "file_path": "src/stream/connection.rs", "rank": 55, "score": 1.4173769369572171 }, { "content": " let filename = format!(\"{}.ts\", timestamp.value);\n\n self.ts.write_file(&filename);\n\n self.next_write = timestamp.value + Server::WRITE_DURATION;\n\n playlist.push(timestamp.value, filename, false);\n\n }\n\n }\n\n\n\n let nalu = Nalu::read(video.data, self.video_config.nalu_size);\n\n let es = Nalu::to_es_layer(&self.video_config, nalu);\n\n self.ts.push_video(timestamp.value as u64, video.composition_time, video.is_keyframe, es).unwrap();\n\n }\n\n\n\n fn handle_audio(&mut self, timestamp: RtmpTimestamp, data: Bytes) {\n\n let audio = Flv::read_audio(data.clone());\n\n if !(self.has_keyframe || audio.is_sequence_header) {\n\n return;\n\n }\n\n // self.flv.push(DataType::Audio, timestamp.value, false, data.clone());\n\n\n\n if audio.is_sequence_header {\n", "file_path": "src/stream/server.rs", "rank": 56, "score": 1.2237250692865707 }, { "content": " }\n\n }\n\n\n\n fn handle_connection_requested(&mut self, request_id: u32, app_name: String, server_results: &mut Vec<ServerResult>) {\n\n println!(\"Connection requested connection to app '{}'\", app_name);\n\n\n\n let accept_result = self.session.as_mut().unwrap().accept_request(request_id);\n\n match accept_result {\n\n Ok(results) => self.handle_session_results(results, server_results),\n\n Err(error) => {\n\n println!(\"Error occurred accepting connection request: {:?}\", error);\n\n server_results.push(ServerResult::Disconnect);\n\n }\n\n }\n\n }\n\n\n\n fn handle_publish_requested(&mut self, request_id: u32, app_name: String, stream_key: String, server_results: &mut Vec<ServerResult>) {\n\n println!(\"Publish requested on app '{}' and stream key '{}'\", app_name, stream_key);\n\n // self.flv.init_file(String::from(\"./video.flv\"));\n\n\n", "file_path": "src/stream/server.rs", "rank": 57, "score": 1.1321310888374359 } ]
Rust
src/options/pane_options.rs
AntonGepting/tmux-interface
7a1dea0ad658e2cb8743311480d207ad1d196a48
use crate::{Error, Switch}; use crate::{SetOption, ShowOptions}; use std::fmt; use std::str::FromStr; pub const ALLOW_RENAME: usize = 1 << 0; pub const ALTERNATE_SCREEN: usize = 1 << 1; pub const REMAIN_ON_EXIT: usize = 1 << 2; pub const WINDOW_ACTIVE_STYLE: usize = 1 << 3; pub const WINDOW_STYLE: usize = 1 << 4; pub const PANE_OPTIONS_NONE: usize = 0; pub const PANE_OPTIONS_ALL: usize = ALLOW_RENAME | ALTERNATE_SCREEN | REMAIN_ON_EXIT | WINDOW_ACTIVE_STYLE | WINDOW_STYLE; pub const PANE_OPTIONS_NUM: usize = 5; pub const PANE_OPTIONS: [( &str, fn(p: &mut PaneOptions, i: Option<usize>, s: &str), fn(p: &PaneOptions) -> Option<String>, usize, ); PANE_OPTIONS_NUM] = [ #[cfg(feature = "tmux_3_0")] ( "allow-rename", |p, _, s| p.allow_rename = s.parse().ok(), |p| p.allow_rename.as_ref().map(|v| v.to_string()), ALLOW_RENAME, ), #[cfg(feature = "tmux_3_0")] ( "alternate-screen", |p, _, s| p.alternate_screen = s.parse().ok(), |p| p.alternate_screen.as_ref().map(|v| v.to_string()), ALTERNATE_SCREEN, ), #[cfg(feature = "tmux_3_0")] ( "remain-on-exit", |p, _, s| p.remain_on_exit = s.parse().ok(), |p| p.remain_on_exit.as_ref().map(|v| v.to_string()), REMAIN_ON_EXIT, ), #[cfg(feature = "tmux_3_0")] ( "window-active-style", |p, _, s| p.window_active_style = Some(s.to_string()), |p| { p.window_active_style .as_ref() .map(|v| format!("\"{}\"", v.to_string())) }, WINDOW_ACTIVE_STYLE, ), #[cfg(feature = "tmux_3_0")] ( "window-style", |p, _, s| p.window_style = Some(s.to_string()), |p| { p.window_style .as_ref() .map(|v| format!("\"{}\"", v.to_string())) }, WINDOW_STYLE, ), ]; #[derive(Default, PartialEq, Clone, Debug)] pub struct PaneOptions { #[cfg(feature = "tmux_3_0")] pub allow_rename: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub alternate_screen: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub remain_on_exit: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub window_active_style: Option<String>, #[cfg(feature = "tmux_3_0")] pub window_style: Option<String>, } impl PaneOptions { pub fn get_all() -> Result<Self, Error> { let s = ShowOptions::new().global().output()?.to_string(); s.parse() } pub fn get(bitflags: usize) -> Result<Self, Error> { let selected_option = PANE_OPTIONS .iter() .filter(|t| bitflags == t.3) .map(|t| format!("{}", t.0)) .collect::<Vec<String>>() .join(" "); let s = ShowOptions::new() .pane() .option(&selected_option) .output()? .to_string(); s.parse() } pub fn set(&self, bitflags: usize) -> Result<(), Error> { for selected_option in PANE_OPTIONS.iter().filter(|t| bitflags & t.3 == t.3) { if let Some(selected_value) = selected_option.2(&self) { SetOption::new() .pane() .option(selected_option.0) .value(&selected_value) .output()?; } } Ok(()) } } impl FromStr for PaneOptions { type Err = Error; fn from_str(options: &str) -> Result<Self, Self::Err> { let mut pane_options: PaneOptions = Default::default(); let mut v: Vec<&str>; let mut arr: Vec<&str>; for option in options.lines() { v = option.trim().splitn(2, ' ').collect(); arr = v[0].split(|c| c == '[' || c == ']').collect(); for pane_var in PANE_OPTIONS.iter() { if pane_var.0 == arr[0] { pane_var.1( &mut pane_options, arr.get(1).and_then(|i| i.parse::<usize>().ok()), v.get(1).unwrap_or(&""), ) } } } Ok(pane_options) } } impl fmt::Display for PaneOptions { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for var in PANE_OPTIONS.iter() { if let Some(ref v) = var.2(self) { write!(f, "{} {}\n", var.0, v)?; } } Ok(()) } } #[derive(Default, Debug)] pub struct PaneOptionsBuilder<'a> { #[cfg(feature = "tmux_3_0")] pub allow_rename: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub alternate_screen: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub remain_on_exit: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub window_active_style: Option<&'a str>, #[cfg(feature = "tmux_3_0")] pub window_style: Option<&'a str>, } impl<'a> PaneOptionsBuilder<'a> { pub fn new() -> Self { Default::default() } #[cfg(feature = "tmux_3_0")] pub fn allow_rename(&mut self, allow_rename: Switch) -> &mut Self { self.allow_rename = Some(allow_rename); self } #[cfg(feature = "tmux_3_0")] pub fn alternate_screen(&mut self, alternate_screen: Switch) -> &mut Self { self.alternate_screen = Some(alternate_screen); self } #[cfg(feature = "tmux_3_0")] pub fn remain_on_exit(&mut self, remain_on_exit: Switch) -> &mut Self { self.remain_on_exit = Some(remain_on_exit); self } #[cfg(feature = "tmux_3_0")] pub fn window_active_style(&mut self, window_active_style: &'a str) -> &mut Self { self.window_active_style = Some(window_active_style); self } #[cfg(feature = "tmux_3_0")] pub fn window_style(&mut self, window_style: &'a str) -> &mut Self { self.window_style = Some(window_style); self } pub fn build(&self) -> PaneOptions { PaneOptions { #[cfg(feature = "tmux_3_0")] allow_rename: self.allow_rename.clone(), #[cfg(feature = "tmux_3_0")] alternate_screen: self.alternate_screen.clone(), #[cfg(feature = "tmux_3_0")] remain_on_exit: self.remain_on_exit.clone(), #[cfg(feature = "tmux_3_0")] window_active_style: self.window_active_style.map(|s| s.to_string()), #[cfg(feature = "tmux_3_0")] window_style: self.window_style.map(|s| s.to_string()), } } }
use crate::{Error, Switch}; use crate::{SetOption, ShowOptions}; use std::fmt; use std::str::FromStr; pub const ALLOW_RENAME: usize = 1 << 0; pub const ALTERNATE_SCREEN: usize = 1 << 1; pub const REMAIN_ON_EXIT: usize = 1 << 2; pub const WINDOW_ACTIVE_STYLE: usize = 1 << 3; pub const WINDOW_STYLE: usize = 1 << 4; pub const PANE_OPTIONS_NONE: usize = 0; pub const PANE_OPTIONS_ALL: usize = ALLOW_RENAME | ALTERNATE_SCREEN | REMAIN_ON_EXIT | WINDOW_ACTIVE_STYLE | WINDOW_STYLE; pub const PANE_OPTIONS_NUM: usize = 5; pub const PANE_OPTIONS: [( &str, fn(p: &mut PaneOptions, i: Option<usize>, s: &str), fn(p: &PaneOptions) -> Option<String>, usize, ); PANE_OPTIONS_NUM] = [ #[cfg(feature = "tmux_3_0")] ( "allow-rename", |p, _, s| p.allow_rename = s.parse().ok(), |p| p.allow_rename.as_ref().map(|v| v.to_string()), ALLOW_RENAME, ), #[cfg(feature = "tmux_3_0")] ( "alternate-screen", |p, _, s| p.alternate_screen = s.parse().ok(), |p| p.alternate_screen.as_ref().map(|v| v.to_string()), ALTERNATE_SCREEN, ), #[cfg(feature = "tmux_3_0")] ( "remain-on-exit", |p, _, s| p.remain_on_exit = s.parse().ok(), |p| p.remain_on_exit.as_ref().map(|v| v.to_string()), REMAIN_ON_EXIT, ), #[cfg(feature = "tmux_3_0")] ( "window-active-style", |p, _, s| p.window_active_style = Some(s.to_string()), |p| { p.window_active_style .as_ref() .map(|v| format!("\"{}\"", v.to_string())) }, WINDOW_ACTIVE_STYLE, ), #[cfg(feature = "tmux_3_0")] ( "window-style", |p, _, s| p.window_style = Some(s.to_string()), |p| { p.window_style .as_ref() .map(|v| format!("\"{}\"", v.to_string())) }, WINDOW_STYLE, ), ]; #[derive(Default, PartialEq, Clone, Debug)] pub struct PaneOptions { #[cfg(feature = "tmux_3_0")] pub allow_rename: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub alternate_screen: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub remain_on_exit: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub window_active_style: Option<String>, #[cfg(feature = "tmux_3_0")] pub window_style: Option<String>, } impl PaneOptions { pub fn get_all() -> Result<Self, Error> { let s = ShowOptions::new().global().output()?.to_string(); s.parse() } pub fn get(bitflags: usize) -> Result<Self, Error> { let selected_option = PANE_OPTIONS .iter() .filter(|t| bitflags == t.3) .
pub fn set(&self, bitflags: usize) -> Result<(), Error> { for selected_option in PANE_OPTIONS.iter().filter(|t| bitflags & t.3 == t.3) { if let Some(selected_value) = selected_option.2(&self) { SetOption::new() .pane() .option(selected_option.0) .value(&selected_value) .output()?; } } Ok(()) } } impl FromStr for PaneOptions { type Err = Error; fn from_str(options: &str) -> Result<Self, Self::Err> { let mut pane_options: PaneOptions = Default::default(); let mut v: Vec<&str>; let mut arr: Vec<&str>; for option in options.lines() { v = option.trim().splitn(2, ' ').collect(); arr = v[0].split(|c| c == '[' || c == ']').collect(); for pane_var in PANE_OPTIONS.iter() { if pane_var.0 == arr[0] { pane_var.1( &mut pane_options, arr.get(1).and_then(|i| i.parse::<usize>().ok()), v.get(1).unwrap_or(&""), ) } } } Ok(pane_options) } } impl fmt::Display for PaneOptions { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for var in PANE_OPTIONS.iter() { if let Some(ref v) = var.2(self) { write!(f, "{} {}\n", var.0, v)?; } } Ok(()) } } #[derive(Default, Debug)] pub struct PaneOptionsBuilder<'a> { #[cfg(feature = "tmux_3_0")] pub allow_rename: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub alternate_screen: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub remain_on_exit: Option<Switch>, #[cfg(feature = "tmux_3_0")] pub window_active_style: Option<&'a str>, #[cfg(feature = "tmux_3_0")] pub window_style: Option<&'a str>, } impl<'a> PaneOptionsBuilder<'a> { pub fn new() -> Self { Default::default() } #[cfg(feature = "tmux_3_0")] pub fn allow_rename(&mut self, allow_rename: Switch) -> &mut Self { self.allow_rename = Some(allow_rename); self } #[cfg(feature = "tmux_3_0")] pub fn alternate_screen(&mut self, alternate_screen: Switch) -> &mut Self { self.alternate_screen = Some(alternate_screen); self } #[cfg(feature = "tmux_3_0")] pub fn remain_on_exit(&mut self, remain_on_exit: Switch) -> &mut Self { self.remain_on_exit = Some(remain_on_exit); self } #[cfg(feature = "tmux_3_0")] pub fn window_active_style(&mut self, window_active_style: &'a str) -> &mut Self { self.window_active_style = Some(window_active_style); self } #[cfg(feature = "tmux_3_0")] pub fn window_style(&mut self, window_style: &'a str) -> &mut Self { self.window_style = Some(window_style); self } pub fn build(&self) -> PaneOptions { PaneOptions { #[cfg(feature = "tmux_3_0")] allow_rename: self.allow_rename.clone(), #[cfg(feature = "tmux_3_0")] alternate_screen: self.alternate_screen.clone(), #[cfg(feature = "tmux_3_0")] remain_on_exit: self.remain_on_exit.clone(), #[cfg(feature = "tmux_3_0")] window_active_style: self.window_active_style.map(|s| s.to_string()), #[cfg(feature = "tmux_3_0")] window_style: self.window_style.map(|s| s.to_string()), } } }
map(|t| format!("{}", t.0)) .collect::<Vec<String>>() .join(" "); let s = ShowOptions::new() .pane() .option(&selected_option) .output()? .to_string(); s.parse() }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn from_str() {\n\n use crate::Version;\n\n\n\n let examples = &[\n\n \"tmux next-3.2\",\n\n \"tmux 3.1b\",\n\n \"tmux 3.1a\",\n\n \"tmux 3.1\",\n\n \"tmux 3.0a\",\n\n \"tmux 3.0\",\n\n \"tmux 2.9a\",\n\n \"tmux 2.9\",\n\n \"tmux 2.8\",\n\n \"tmux 2.7-rc\",\n\n \"tmux 2.6\",\n\n \"tmux 2.5\",\n\n \"tmux 2.4\",\n\n \"tmux 2.3\",\n\n \"tmux 2.2\",\n\n \"tmux 2.1\",\n", "file_path": "src/version_tests.rs", "rank": 0, "score": 100880.82894676762 }, { "content": "#[test]\n\nfn switch_client() {\n\n use crate::{SwitchClient, TargetSession};\n\n use std::borrow::Cow;\n\n\n\n // Structure to switch the current session for client `target-client` to `target-session`\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.1:\n\n // ```text\n\n // tmux switch-client [-ElnprZ] [-c target-client] [-t target-session] [-T key-table]\n\n // (alias: switchc)\n\n // ```\n\n //\n\n // tmux ^2.1:\n\n // ```text\n\n // tmux switch-client [-Elnpr] [-c target-client] [-t target-session] [-T key-table]\n\n // (alias: switchc)\n\n // ```\n\n //\n", "file_path": "src/commands/clients_and_sessions/switch_client_tests.rs", "rank": 1, "score": 99130.65015429015 }, { "content": "#[test]\n\nfn bitflags() {\n\n use crate::{SESSION_OPTIONS_ALL, SESSION_OPTIONS_NONE};\n\n let bitflags =\n\n // 80______________64_63_____________________________32_31______________________________0\n\n 0b_011111111111111111__11111111111111111111111111111111__11111111111111111111111111111111;\n\n //println!(\"{:b}\", SESSION_OPTIONS_ALL);\n\n //println!(\"{:b}\", &bitflags);\n\n assert_eq!(bitflags, SESSION_OPTIONS_ALL);\n\n assert_eq!(0, SESSION_OPTIONS_NONE);\n\n}\n\n\n", "file_path": "src/options/session_options_tests.rs", "rank": 2, "score": 94643.83587945614 }, { "content": "#[test]\n\nfn bitflags() {\n\n use crate::{SESSION_ALL, SESSION_NONE};\n\n let bitflags =\n\n // 31______23_____1615_____________0\n\n 0b_0000000011111111_1111111111111111;\n\n //println!(\"{:b}\", SESSION_ALL);\n\n //println!(\"{:b}\", &bitflags);\n\n assert_eq!(bitflags, SESSION_ALL);\n\n assert_eq!(0, SESSION_NONE);\n\n}\n\n\n", "file_path": "src/variables/session/session_tests.rs", "rank": 3, "score": 94643.83587945614 }, { "content": "#[test]\n\nfn bitflags() {\n\n use crate::{WINDOW_ALL, WINDOW_NONE};\n\n let bitflags =\n\n // 35___31_____________1615_____________0\n\n 0b_1111_1111111111111111_1111111111111111;\n\n //println!(\"{:b}\", WINDOW_ALL);\n\n //println!(\"{:b}\", &bitflags);\n\n assert_eq!(bitflags, WINDOW_ALL);\n\n assert_eq!(0, WINDOW_NONE);\n\n}\n\n\n\n//let window_str = \"1557947146'0'1'0'''*'1'64'@0'4'0'3484,177x64,0,0{88x64,0,0,3,88x64,89,0,18}'0'bash'''2'0'0''3484,177x64,0,0{88x64,0,0,3,88x64,89,0,18}'177'0\";\n", "file_path": "src/variables/window/window_tests.rs", "rank": 4, "score": 94643.83587945614 }, { "content": "#[test]\n\nfn bitflags() {\n\n use crate::{WINDOW_OPTIONS_ALL, WINDOW_OPTIONS_NONE};\n\n let bitflags =\n\n // 69___64_63____________________________32_31_____________________________0\n\n 0b_0111111_11111111111111111111111111111111_11111111111111111111111111111111;\n\n //println!(\"{:b}\", WINDOW_OPTIONS_ALL);\n\n //println!(\"{:b}\", &bitflags);\n\n assert_eq!(bitflags, WINDOW_OPTIONS_ALL);\n\n assert_eq!(0, WINDOW_OPTIONS_NONE);\n\n}\n\n\n", "file_path": "src/options/window_options_tests.rs", "rank": 5, "score": 94643.83587945614 }, { "content": "#[test]\n\nfn bitflags() {\n\n use crate::{SERVER_OPTIONS_ALL, SERVER_OPTIONS_NONE};\n\n let bitflags =\n\n // 14____8_7______0\n\n 0b_1111111_11111111;\n\n //println!(\"{:b}\", SERVER_OPTIONS_ALL);\n\n //println!(\"{:b}\", &bitflags);\n\n assert_eq!(bitflags, SERVER_OPTIONS_ALL);\n\n assert_eq!(0, SERVER_OPTIONS_NONE);\n\n}\n\n\n", "file_path": "src/options/server_options_tests.rs", "rank": 6, "score": 94643.83587945614 }, { "content": "#[test]\n\nfn bitflags() {\n\n use crate::{PANE_ALL, PANE_NONE};\n\n let bitflags =\n\n // _31____________16_15_____________0\n\n 0b_11111111111111111_1111111111111111;\n\n //println!(\"{:b}\", PANE_ALL);\n\n //println!(\"{:b}\", &bitflags);\n\n assert_eq!(bitflags, PANE_ALL);\n\n assert_eq!(0, PANE_NONE);\n\n}\n\n\n", "file_path": "src/variables/pane/pane_tests.rs", "rank": 7, "score": 94643.83587945614 }, { "content": "#[test]\n\nfn bitflag_operations() {\n\n let a = 0b001;\n\n let b = 0b010;\n\n let c = 0b100;\n\n let d = a | b;\n\n assert!(d & a == a);\n\n assert!(d & c != c);\n\n}\n\n\n\n//#[test]\n\n//fn make_fmt_string() {\n\n//use crate::pane::get_fmt_string;\n\n//use crate::Pane;\n\n//let _a = get_fmt_string(Pane::PANE_ACTIVE | Pane::PANE_AT_LEFT);\n\n//}\n\n\n", "file_path": "src/variables/pane/pane_tests.rs", "rank": 8, "score": 91860.48257523747 }, { "content": "#[test]\n\nfn show_generated_struct() {\n\n use crate::WindowOptions;\n\n\n\n let _window_options = WindowOptions {\n\n ..Default::default()\n\n };\n\n //dbg!(window_options);\n\n}\n\n\n", "file_path": "src/options/window_options_tests.rs", "rank": 9, "score": 89267.10840397955 }, { "content": "#[test]\n\nfn show_generated_struct() {\n\n use crate::ServerOptions;\n\n\n\n let _server_options = ServerOptions {\n\n ..Default::default()\n\n };\n\n //dbg!(_server_options);\n\n}\n\n\n", "file_path": "src/options/server_options_tests.rs", "rank": 10, "score": 89267.10840397955 }, { "content": "#[test]\n\nfn show_generated_struct() {\n\n use crate::SessionOptions;\n\n\n\n let _session_options = SessionOptions {\n\n ..Default::default()\n\n };\n\n //dbg!(_session_options);\n\n}\n\n\n", "file_path": "src/options/session_options_tests.rs", "rank": 11, "score": 89267.10840397955 }, { "content": "#[test]\n\nfn show_generated_struct() {\n\n use crate::Pane;\n\n\n\n let _pane = Pane {\n\n ..Default::default()\n\n };\n\n //dbg!(_pane);\n\n}\n\n\n", "file_path": "src/variables/pane/pane_tests.rs", "rank": 12, "score": 89267.10840397955 }, { "content": "#[test]\n\nfn show_generated_struct() {\n\n use crate::Window;\n\n\n\n let _window = Window {\n\n ..Default::default()\n\n };\n\n //dbg!(_window);\n\n}\n\n\n", "file_path": "src/variables/window/window_tests.rs", "rank": 13, "score": 89267.10840397955 }, { "content": "#[test]\n\nfn show_generated_struct() {\n\n use crate::Session;\n\n\n\n let _session = Session {\n\n ..Default::default()\n\n };\n\n //dbg!(_session);\n\n}\n\n\n", "file_path": "src/variables/session/session_tests.rs", "rank": 14, "score": 89267.10840397955 }, { "content": "// trait used for bin_args and cmd_args\n\npub trait Args<'a> {\n\n fn push_param<S: Into<Cow<'a, str>>>(&mut self, param: S);\n\n fn push_option<S, U>(&mut self, key: S, option: U)\n\n where\n\n S: Into<Cow<'a, str>>,\n\n U: Into<Cow<'a, str>>;\n\n fn push_flag<S: Into<Cow<'a, str>>>(&mut self, flag: S);\n\n}\n\n\n\n// trait used for bin_args and cmd_args\n\nimpl<'a> Args<'a> for Option<Vec<Cow<'a, str>>> {\n\n fn push_param<S: Into<Cow<'a, str>>>(&mut self, param: S) {\n\n self.get_or_insert(Vec::new()).push(param.into());\n\n }\n\n\n\n fn push_option<S, U>(&mut self, key: S, option: U)\n\n where\n\n S: Into<Cow<'a, str>>,\n\n U: Into<Cow<'a, str>>,\n\n {\n\n self.get_or_insert(Vec::new())\n\n .extend_from_slice(&[key.into(), option.into()]);\n\n }\n\n\n\n fn push_flag<S: Into<Cow<'a, str>>>(&mut self, flag: S) {\n\n self.push_param(flag.into());\n\n }\n\n}\n", "file_path": "src/commands/tmux_command.rs", "rank": 15, "score": 61317.34887740546 }, { "content": "#[test]\n\nfn issue1() {\n\n use tmux_interface::{TargetSession, TmuxCommand};\n\n\n\n let tmux = TmuxCommand::new();\n\n\n\n let target_session = TargetSession::Raw(\"test_ti\").to_string();\n\n\n\n tmux.new_session()\n\n .detached()\n\n .session_name(&target_session)\n\n .output()\n\n .unwrap();\n\n // do not wait for user input, because test is running on Travis CI\n\n tmux.send_keys().key(\"exit\").key(\"C-m\").output().unwrap();\n\n tmux.attach_session()\n\n .target_session(&target_session)\n\n .output()\n\n .unwrap();\n\n //assert!(output.status.success());\n\n tmux.kill_session()\n\n .target_session(&target_session)\n\n .output()\n\n .unwrap();\n\n}\n", "file_path": "tests/issue1.rs", "rank": 16, "score": 59092.73705103417 }, { "content": "#[test]\n\nfn issue2() {\n\n let sessions = Sessions::get(SESSION_ALL).unwrap();\n\n\n\n for s in sessions {\n\n let session_name = s.name.unwrap().to_string();\n\n let parent_session = TargetSession::new(&session_name);\n\n let ws = Windows::get(&parent_session, WINDOW_ALL).unwrap();\n\n println!(\"Session {}\", session_name);\n\n for w in ws {\n\n println!(\"{:?}\", w);\n\n }\n\n }\n\n}\n", "file_path": "tests/issue2.rs", "rank": 17, "score": 59092.73705103417 }, { "content": "#[test]\n\nfn has_session() {\n\n use crate::tmux_interface::TmuxCommand;\n\n\n\n let tmux = TmuxCommand::new();\n\n //tmux.tmux = Some(\"./tests/tmux_mock.sh\");\n\n let session_name = \"test_has_session\";\n\n tmux.new_session()\n\n .detached()\n\n .session_name(session_name)\n\n .output()\n\n .unwrap();\n\n let has_session = tmux\n\n .has_session()\n\n .target_session(session_name)\n\n .output()\n\n .unwrap();\n\n assert_eq!(has_session.0.status.success(), true);\n\n tmux.kill_session()\n\n .target_session(session_name)\n\n .output()\n\n .unwrap();\n\n}\n\n\n\n//#[test]\n\n//fn kill_server() {\n\n//unimplemented!();\n\n//}\n\n\n", "file_path": "tests/tmux_interface.rs", "rank": 18, "score": 57363.123465581695 }, { "content": "#[test]\n\nfn example2() {\n\n use tmux_interface::{HasSession, KillSession, NewSession};\n\n\n\n NewSession::new()\n\n .detached()\n\n .session_name(\"example_2\")\n\n .output()\n\n .unwrap();\n\n HasSession::new()\n\n .target_session(\"example_2\")\n\n .output()\n\n .unwrap();\n\n KillSession::new()\n\n .target_session(\"example_2\")\n\n .output()\n\n .unwrap();\n\n}\n\n\n\n//#[test]\n\n//fn example3() {\n\n//use tmux_interface::{TargetSession, TmuxInterface};\n\n\n\n//let mut tmux = TmuxInterface::new();\n\n//let id = tmux.new_session(None).unwrap();\n\n//tmux.kill_session(None, None, Some(&TargetSession::Id(id)))\n\n//.unwrap();\n\n//}\n", "file_path": "tests/readme_examples.rs", "rank": 19, "score": 57363.123465581695 }, { "content": "#[test]\n\nfn example1() {\n\n use tmux_interface::TmuxCommand;\n\n\n\n let tmux = TmuxCommand::new();\n\n\n\n tmux.new_session()\n\n .detached()\n\n .session_name(\"example_1\")\n\n .output()\n\n .unwrap();\n\n tmux.has_session()\n\n .target_session(\"example_1\")\n\n .output()\n\n .unwrap();\n\n tmux.kill_session()\n\n .target_session(\"example_1\")\n\n .output()\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/readme_examples.rs", "rank": 20, "score": 57363.123465581695 }, { "content": "#[test]\n\nfn kill_session() {\n\n use crate::tmux_interface::TmuxCommand;\n\n\n\n let session_name = \"test_kill_session\";\n\n\n\n let tmux = TmuxCommand::new();\n\n tmux.new_session()\n\n .detached()\n\n .session_name(session_name)\n\n .output()\n\n .unwrap();\n\n tmux.kill_session()\n\n .target_session(session_name)\n\n .output()\n\n .unwrap();\n\n}\n\n\n\n// NOTE: comment out, bash scripts moved out from tests directory\n\n//#[test]\n\n//fn callback() {\n", "file_path": "tests/tmux_interface.rs", "rank": 21, "score": 55762.51903335482 }, { "content": "#[test]\n\nfn new_session() {\n\n use crate::tmux_interface::TmuxCommand;\n\n\n\n let tmux = TmuxCommand::new();\n\n\n\n let session_name = \"test_new_session\";\n\n tmux.new_session()\n\n .detached()\n\n .session_name(session_name)\n\n .output()\n\n .unwrap();\n\n tmux.kill_session()\n\n .target_session(session_name)\n\n .output()\n\n .unwrap();\n\n}\n\n\n\n//#[test]\n\n//fn refresh_client() {\n\n//unimplemented!();\n\n//}\n\n\n", "file_path": "tests/tmux_interface.rs", "rank": 22, "score": 55762.51903335482 }, { "content": "#[cfg(feature = \"tmux_1_6\")]\n\n#[test]\n\nfn get_panes() {\n\n use tmux_interface::PANE_ALL;\n\n use tmux_interface::{Panes, TargetSession, TargetWindowExt, TmuxCommand};\n\n use tmux_interface::{SessionOptionsBuilder, BASE_INDEX};\n\n\n\n const TARGET_SESSION: &str = \"test_get_panes\";\n\n const WINDOW_INDEX: usize = 1;\n\n\n\n let target_session = TargetSession::Raw(TARGET_SESSION);\n\n let target_session_str = target_session.to_string();\n\n let target_window = TargetWindowExt::id(Some(&target_session), WINDOW_INDEX);\n\n\n\n let tmux = TmuxCommand::new();\n\n\n\n SessionOptionsBuilder::new()\n\n .base_index(WINDOW_INDEX)\n\n .build()\n\n .set(BASE_INDEX)\n\n .unwrap();\n\n\n", "file_path": "tests/panes_tests.rs", "rank": 23, "score": 55762.51903335482 }, { "content": "#[cfg(feature = \"tmux_1_6\")]\n\n#[test]\n\nfn get_sessions() {\n\n use tmux_interface::SESSION_ALL;\n\n use tmux_interface::{Sessions, TargetSession, TmuxCommand};\n\n\n\n const TARGET_SESSION: &str = \"test_get_sessions\";\n\n let target_session = TargetSession::Raw(TARGET_SESSION).to_string();\n\n\n\n let tmux = TmuxCommand::new();\n\n tmux.new_session()\n\n .detached()\n\n .session_name(&target_session)\n\n .output()\n\n .unwrap();\n\n let has_session = tmux\n\n .has_session()\n\n .target_session(&target_session)\n\n .output()\n\n .unwrap();\n\n assert_eq!(has_session.0.status.success(), true);\n\n\n\n let _sessions = Sessions::get(SESSION_ALL).unwrap();\n\n\n\n tmux.kill_session()\n\n .target_session(&target_session)\n\n .output()\n\n .unwrap();\n\n}\n", "file_path": "tests/sessions_tests.rs", "rank": 24, "score": 55762.51903335482 }, { "content": "#[cfg(feature = \"tmux_1_6\")]\n\n#[test]\n\nfn get_windows() {\n\n use tmux_interface::WINDOW_ALL;\n\n use tmux_interface::{NewSession, TargetSession, TmuxCommand, Windows};\n\n\n\n const TARGET_SESSION: &str = \"test_get_windows\";\n\n let target_session = TargetSession::Raw(TARGET_SESSION);\n\n let target_session_str = target_session.to_string();\n\n\n\n let tmux = TmuxCommand::new();\n\n\n\n //NewSession::new()\n\n //.detached()\n\n //.session_name(TARGET_SESSION)\n\n //.output()\n\n //.unwrap();\n\n tmux.new_session()\n\n .detached()\n\n .session_name(&target_session_str)\n\n .output()\n\n .unwrap();\n", "file_path": "tests/windows_tests.rs", "rank": 25, "score": 55762.51903335482 }, { "content": "#[test]\n\nfn list_sessions() {\n\n use crate::tmux_interface::TmuxCommand;\n\n\n\n let session_name = \"test_list_sessions\";\n\n let tmux = TmuxCommand::new();\n\n tmux.new_session()\n\n .detached()\n\n .session_name(session_name)\n\n .output()\n\n .unwrap();\n\n tmux.list_sessions().output().unwrap();\n\n tmux.kill_session()\n\n .target_session(session_name)\n\n .output()\n\n .unwrap();\n\n}\n\n\n\n//#[test]\n\n//fn lock_client() {\n\n//unimplemented!();\n\n//}\n\n\n\n//#[test]\n\n//fn lock_session() {\n\n//unimplemented!();\n\n//}\n\n\n", "file_path": "tests/tmux_interface.rs", "rank": 26, "score": 55762.51903335482 }, { "content": "#[test]\n\nfn send_keys() {\n\n use crate::tmux_interface::{TargetPane, TmuxCommand};\n\n\n\n let session_name = \"test_send_keys\";\n\n\n\n let tmux = TmuxCommand::new();\n\n tmux.new_session()\n\n .detached()\n\n .session_name(session_name)\n\n .output()\n\n .unwrap();\n\n\n\n let target_pane = TargetPane::Raw(\"test_send_keys:^.0\").to_string();\n\n #[cfg(feature = \"tmux_1_6\")]\n\n tmux.send_keys()\n\n .target_pane(&target_pane)\n\n .key(\"top\")\n\n .key(\"C-m\")\n\n .output()\n\n .unwrap();\n", "file_path": "tests/tmux_interface.rs", "rank": 27, "score": 55762.51903335482 }, { "content": "#[test]\n\nfn tmux() {\n\n use crate::TmuxCommand;\n\n use std::borrow::Cow;\n\n\n\n // This structure is used to store execution parameters of `tmux`, including binary\n\n // name. Full description of fields can be found using `man tmux`.\n\n // [man tmux](http://man7.org/linux/man-pages/man1/tmux.1.html#DESCRIPTION)\n\n //\n\n // # Manual\n\n //\n\n // tmux ^2.1:\n\n // ```text\n\n // tmux [-2CluvV] [-c shell-command] [-f file] [-L socket-name] [-S socket-path] [command [flags]]\n\n // ```\n\n //\n\n // tmux ^1.9:\n\n // ```text\n\n // tmux [-2lCquvV] [-c shell-command] [-f file] [-L socket-name] [-S socket-path] [command [flags]]\n\n // ```\n\n //\n", "file_path": "src/commands/tmux_tests.rs", "rank": 28, "score": 55762.51903335482 }, { "content": "#[test]\n\nfn rename_session() {\n\n use crate::tmux_interface::TmuxCommand;\n\n\n\n let session_name = \"test_rename_session\";\n\n let new_name = \"rename_test_session\";\n\n let tmux = TmuxCommand::new();\n\n tmux.new_session()\n\n .detached()\n\n .session_name(session_name)\n\n .output()\n\n .unwrap();\n\n tmux.rename_session()\n\n .target_session(session_name)\n\n .new_name(new_name)\n\n .output()\n\n .unwrap();\n\n let has_session = tmux\n\n .has_session()\n\n .target_session(new_name)\n\n .output()\n", "file_path": "tests/tmux_interface.rs", "rank": 29, "score": 55762.51903335482 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::WindowOptions;\n\n\n\n let window_options_str = r#\"\n\n aggressive-resize off\n\n automatic-rename on\n\n automatic-rename-format \"\"\n\n clock-mode-colour colour135\n\n clock-mode-style 24\n\n force-height 0\n\n force-width 0\n\n main-pane-height 24\n\n main-pane-width 80\n\n mode-keys vi\n\n mode-style fg=colour196,bg=colour238,bright\n\n monitor-activity off\n\n monitor-bell on\n\n monitor-silence 0\n\n other-pane-height 0\n\n other-pane-width 0\n", "file_path": "src/options/window_options_tests.rs", "rank": 30, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn if_shell() {\n\n use crate::{IfShell, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Structure for conditional commands executing\n\n //\n\n // # Manual\n\n //\n\n // tmux ^2.0:\n\n // ```text\n\n // tmux if-shell [-bF] [-t target-pane] shell-command command [command]\n\n // (alias: if)\n\n // ```\n\n //\n\n // tmux ^1.8:\n\n // ```text\n\n // tmux if-shell [-b] [-t target-pane] shell-command command [command]\n\n // (alias: if)\n\n // ```\n\n //\n", "file_path": "src/commands/miscellaneous/if_shell_tests.rs", "rank": 31, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn get() {\n\n use crate::WindowOptions;\n\n let _window_options = WindowOptions::get_all().unwrap();\n\n //dbg!(session_options);\n\n //assert_eq!(\n\n //window_options.window_status_separator,\n\n //Some(\" \".to_string())\n\n //);\n\n}\n", "file_path": "src/options/window_options_tests.rs", "rank": 32, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn to_string() {\n\n use crate::SessionOptionsBuilder;\n\n\n\n let _session_options = SessionOptionsBuilder::new()\n\n .default_shell(\"asdfasdfasdf\")\n\n .build();\n\n //dbg!(&session_options.to_string());\n\n}\n\n\n", "file_path": "src/options/session_options_tests.rs", "rank": 33, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n #[cfg(feature = \"tmux_2_6\")]\n\n use crate::SetClipboard;\n\n use crate::{ServerOptions, ServerOptionsBuilder, Switch};\n\n\n\n let mut builder = ServerOptionsBuilder::new();\n\n builder.buffer_limit(50);\n\n #[cfg(feature = \"tmux_2_1\")]\n\n builder.default_terminal(\"\\\"screen-256color\\\"\");\n\n #[cfg(feature = \"tmux_2_7\")]\n\n builder.exit_empty(Switch::On);\n\n #[cfg(feature = \"tmux_2_4\")]\n\n builder.command_alias(vec![\n\n \"\\\"split-pane=split-window\\\"\",\n\n \"\\\"splitp=split-window\\\"\",\n\n ]);\n\n let server_options_default = builder.build();\n\n\n\n // test int, string, enum, vec\n\n let server_options_str = r#\"\n", "file_path": "src/options/server_options_tests.rs", "rank": 34, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn get() {\n\n use crate::SessionOptions;\n\n let _session_options = SessionOptions::get_all().unwrap();\n\n //dbg!(session_options);\n\n}\n", "file_path": "src/options/session_options_tests.rs", "rank": 35, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::Pane;\n\n use crate::PANE_ALL;\n\n\n\n //let pane_str = \"1'1'1'1'1'63'bash'/home/user'0''1'64'%0'0'0'0'0'0'0''1945'0'176'''0'8,16,24,32,40,48,56,64,72,80,88,96,104,112,120,128,136,144,152,160,168,176'asus'0'/dev/pts/2'177\";\n\n\n\n let pane_vec = vec![\n\n // pane_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1\",\n\n // pane_at_bottom\n\n #[cfg(feature = \"tmux_2_6\")]\n\n \"0\",\n\n // pane_at_left\n\n #[cfg(feature = \"tmux_2_6\")]\n\n \"1\",\n\n // pane_at_right\n\n #[cfg(feature = \"tmux_2_6\")]\n\n \"1\",\n\n // pane_at_top\n", "file_path": "src/variables/pane/pane_tests.rs", "rank": 36, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn get() {\n\n use crate::PaneOptions;\n\n let _pane_options = PaneOptions::get_all().unwrap();\n\n //dbg!(pane_options);\n\n}\n", "file_path": "src/options/pane_options_tests.rs", "rank": 37, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse2() {\n\n use crate::{Windows, WINDOW_ALL};\n\n\n\n //let windows_str = \"\n\n //1559064235'0'0'0''''1'64'@0'1'0'c3bd,177x64,0,0,0'0'bash'''1'0'3''c3bd,177x64,0,0,0'177'0\\n\\\n\n //1559064235'0'0'0''''1'64'@1'2'0'8b65,177x64,0,0[177x46,0,0,1,177x17,0,47,4]'0'vim'''2'0'2''8b65,177x64,0,0[177x46,0,0,1,177x17,0,47,4]'177'0\\n\\\n\n //1559064235'0'0'0'''-'1'64'@2'3'1'7966,177x64,0,0[177x52,0,0,2,177x11,0,53,3]'0'vim'''2'0'1''7966,177x64,0,0[177x52,0,0,2,177x11,0,53,3]'177'0\\n\\\n\n //1559064235'0'1'0'''*'1'64'@4'4'0'c3c3,177x64,0,0,6'0'bash'''1'0'0''c3c3,177x64,0,0,6'177'0\";\n\n\n\n let window0_vec = vec![\n\n // window_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1559064235\",\n\n // window_active_clients\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_clients_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions\n", "file_path": "src/variables/window/windows_tests.rs", "rank": 38, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse2() {\n\n use crate::Window;\n\n use crate::WINDOW_ALL;\n\n\n\n let window_vec = vec![\n\n // window_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1557947146\",\n\n // window_active_clients\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_clients_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n", "file_path": "src/variables/window/window_tests.rs", "rank": 39, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn to_string() {\n\n use crate::ServerOptionsBuilder;\n\n\n\n let server_options = ServerOptionsBuilder::new().buffer_limit(50).build();\n\n assert_eq!(server_options.to_string(), \"buffer-limit 50\\n\");\n\n}\n\n\n", "file_path": "src/options/server_options_tests.rs", "rank": 40, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse5() {\n\n use crate::Window;\n\n use crate::WINDOW_ALL;\n\n\n\n let window_vec = vec![\n\n // window_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1557947146\",\n\n // window_active_clients\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_clients_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n", "file_path": "src/variables/window/window_tests.rs", "rank": 41, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse1() {\n\n use crate::Window;\n\n use crate::WINDOW_ALL;\n\n\n\n let window_vec = vec![\n\n // window_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1557947146\",\n\n // window_active_clients\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_clients_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n", "file_path": "src/variables/window/window_tests.rs", "rank": 42, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse3() {\n\n use crate::Window;\n\n use crate::WINDOW_ALL;\n\n\n\n let window_vec = vec![\n\n // window_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1557947146\",\n\n // window_active_clients\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_clients_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n", "file_path": "src/variables/window/window_tests.rs", "rank": 43, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::SessionOptions;\n\n\n\n let session_options_str = r#\"\n\n activity-action other\n\n assume-paste-time 1\n\n base-index 1\n\n bell-action none\n\n default-command \"\"\n\n default-shell \"/usr/bin/fish\"\n\n destroy-unattached off\n\n detach-on-destroy on\n\n display-panes-active-colour red\n\n display-panes-colour blue\n\n display-panes-time 1000\n\n display-time 750\n\n history-limit 2000\n\n key-table \"root\"\n\n lock-after-time 0\n\n lock-command \"lock -np\"\n", "file_path": "src/options/session_options_tests.rs", "rank": 44, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse2() {\n\n use crate::Session;\n\n #[cfg(feature = \"tmux_2_1\")]\n\n use crate::SESSION_ACTIVITY;\n\n #[cfg(feature = \"tmux_1_6\")]\n\n use crate::SESSION_CREATED;\n\n #[cfg(feature = \"tmux_2_1\")]\n\n use crate::SESSION_LAST_ATTACHED;\n\n use std::time::Duration;\n\n\n\n let session_str = \"1557947146:1557947146:1557947146\";\n\n #[cfg(feature = \"tmux_1_6\")]\n\n let session_bitflag = SESSION_CREATED;\n\n #[cfg(feature = \"tmux_2_1\")]\n\n let session_bitflag = SESSION_ACTIVITY | SESSION_CREATED | SESSION_LAST_ATTACHED;\n\n let session = Session::from_str(session_str, session_bitflag).unwrap();\n\n let origin = Session {\n\n #[cfg(feature = \"tmux_2_1\")]\n\n activity: Some(Duration::from_secs(1557947146)),\n\n #[cfg(all(feature = \"tmux_2_1\", not(feature = \"tmux_2_2\")))]\n", "file_path": "src/variables/session/session_tests.rs", "rank": 45, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::Session;\n\n #[cfg(feature = \"tmux_2_5\")]\n\n use crate::SessionStack;\n\n use crate::SESSION_ALL;\n\n use std::time::Duration;\n\n\n\n let session_vec = vec![\n\n // session_activity\n\n #[cfg(feature = \"tmux_2_1\")]\n\n \"1557947146\",\n\n // session_activity_string\n\n #[cfg(all(feature = \"tmux_2_1\", not(feature = \"tmux_2_2\")))]\n\n \"\",\n\n // session_alerts\n\n #[cfg(feature = \"tmux_2_1\")]\n\n \"\",\n\n // session_attached\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1\",\n", "file_path": "src/variables/session/session_tests.rs", "rank": 46, "score": 54277.008816668924 }, { "content": "#[test]\n\n#[cfg(feature = \"tmux_0_8\")]\n\nfn status() {\n\n use crate::Status;\n\n assert_eq!(Status::On.to_string(), \"on\");\n\n assert_eq!(Status::Off.to_string(), \"off\");\n\n #[cfg(feature = \"tmux_2_9\")]\n\n assert_eq!(Status::_2.to_string(), \"2\");\n\n #[cfg(feature = \"tmux_2_9\")]\n\n assert_eq!(Status::_3.to_string(), \"3\");\n\n #[cfg(feature = \"tmux_2_9\")]\n\n assert_eq!(Status::_4.to_string(), \"4\");\n\n #[cfg(feature = \"tmux_2_9\")]\n\n assert_eq!(Status::_5.to_string(), \"5\");\n\n}\n\n\n", "file_path": "src/options/session_options_tests.rs", "rank": 47, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse6() {\n\n use crate::Window;\n\n use crate::WINDOW_ALL;\n\n\n\n let window_vec = vec![\n\n // window_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1557947146\",\n\n // window_active_clients\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_clients_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n", "file_path": "src/variables/window/window_tests.rs", "rank": 48, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse2() {\n\n use crate::Pane;\n\n use crate::PANE_ALL;\n\n\n\n let origin = Pane {\n\n #[cfg(feature = \"tmux_1_6\")]\n\n active: Some(true),\n\n #[cfg(feature = \"tmux_2_6\")]\n\n at_bottom: Some(true),\n\n #[cfg(feature = \"tmux_2_6\")]\n\n at_left: Some(true),\n\n #[cfg(feature = \"tmux_2_6\")]\n\n at_right: Some(true),\n\n #[cfg(feature = \"tmux_2_6\")]\n\n at_top: Some(true),\n\n #[cfg(feature = \"tmux_2_0\")]\n\n bottom: Some(63),\n\n #[cfg(feature = \"tmux_1_8\")]\n\n current_command: Some(\"bash\".to_string()),\n\n #[cfg(feature = \"tmux_1_7\")]\n", "file_path": "src/variables/pane/pane_tests.rs", "rank": 49, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::{Panes, PANE_ALL};\n\n\n\n //\"1'1'1'1'1'63'bash'/home/user'0''1'64'%0'0'0'0'0'0'0''1945'0'176'''0'8,16,24,\\\n\n // 32,40,48,56,64,72,80,88,96,104,112,120,128,136,144,152,160,168,176'asus'0'\\\n\n // /dev/pts/2'177\\n\"\n\n\n\n let pane0_vec = vec![\n\n // pane_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1\",\n\n // pane_at_bottom\n\n #[cfg(feature = \"tmux_2_6\")]\n\n \"1\",\n\n // pane_at_left\n\n #[cfg(feature = \"tmux_2_6\")]\n\n \"1\",\n\n // pane_at_right\n\n #[cfg(feature = \"tmux_2_6\")]\n\n \"1\",\n", "file_path": "src/variables/pane/panes_tests.rs", "rank": 50, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn to_string() {\n\n //use crate::{Switch, WindowOptionsBuilder};\n\n\n\n //let window_options = WindowOptionsBuilder::new().allow_rename(Switch::Off).build();\n\n //dbg!(&window_options.to_string());\n\n}\n\n\n", "file_path": "src/options/window_options_tests.rs", "rank": 51, "score": 54277.008816668924 }, { "content": "#[cfg(feature = \"tmux_1_0\")]\n\nfn create_insert_vec(\n\n v: Option<&mut Vec<String>>,\n\n i: Option<usize>,\n\n s: &str,\n\n) -> Option<Vec<String>> {\n\n if let Some(v) = v {\n\n if let Some(i) = i {\n\n v.insert(i, s.to_string());\n\n return Some(v.to_vec());\n\n };\n\n } else {\n\n let mut v = Vec::new();\n\n if let Some(i) = i {\n\n v.insert(i, s.to_string());\n\n return Some(v);\n\n };\n\n };\n\n None\n\n}\n", "file_path": "src/options/mod.rs", "rank": 52, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::PaneOptions;\n\n\n\n let pane_options_str = r#\"\n\n allow-rename off\n\n alternate-screen on\n\n remain-on-exit off\n\n window-active-style fg=colour253,bg=colour235\n\n window-style fg=colour247,bg=colour238\n\n \"#;\n\n let _pane_options = pane_options_str.parse::<PaneOptions>().unwrap();\n\n //dbg!(&pane_options);\n\n}\n\n\n", "file_path": "src/options/pane_options_tests.rs", "rank": 53, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn get_all() {\n\n use crate::ServerOptions;\n\n let _server_options = ServerOptions::get_all().unwrap();\n\n}\n\n\n", "file_path": "src/options/server_options_tests.rs", "rank": 54, "score": 54277.008816668924 }, { "content": "#[test]\n\n#[cfg(feature = \"tmux_0_8\")]\n\nfn activity() {\n\n use crate::Activity;\n\n assert_eq!(Activity::On.to_string(), \"on\");\n\n assert_eq!(Activity::Off.to_string(), \"off\");\n\n #[cfg(feature = \"tmux_2_6\")]\n\n assert_eq!(Activity::Both.to_string(), \"both\");\n\n}\n\n\n", "file_path": "src/options/session_options_tests.rs", "rank": 55, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn bool() {\n\n //let c = \"1\".parse::<bool>().unwrap();\n\n //assert_eq!(c, true);\n\n let a = \"1\".parse::<usize>().unwrap();\n\n let b = if a == 1 { true } else { false };\n\n assert_eq!(b, true);\n\n}\n\n\n", "file_path": "src/variables/pane/pane_tests.rs", "rank": 56, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::{Sessions, SESSION_ALL};\n\n\n\n //\"1557947146::1:1557947146:1::::0::0:$0:1557947146:0:0:3,2,1:3\\n\\\n\n //1557947146::1:1557947146:1::::0::0:$0:1557947146:0:0:3,2,1:3\";\n\n let session1_vec = vec![\n\n // session_activity\n\n #[cfg(feature = \"tmux_2_1\")]\n\n \"1557947146\",\n\n // session_activity_string\n\n #[cfg(all(feature = \"tmux_2_1\", not(feature = \"tmux_2_2\")))]\n\n \"\",\n\n // session_alerts\n\n #[cfg(feature = \"tmux_2_1\")]\n\n \"\",\n\n // session_attached\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1\",\n\n // session_attached_list\n\n #[cfg(feature = \"tmux_3_1\")]\n", "file_path": "src/variables/session/sessions_tests.rs", "rank": 57, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::{Windows, WINDOW_ALL};\n\n\n\n //let windows_str = \"\n\n // 1559064235'0'0'0''''1'64'@0'1'0'c3bd,177x64,0,0,0'0'bash'''1'0'2''c3bd,177x64,0,0,0'177'0\\n\\\n\n // 1559064235'0'0'0''''1'64'@1'1'0'c3bd,177x64,0,0,0'0'bash'''1'0'2''c3bd,177x64,0,0,0'177'0\";\n\n let window0_vec = vec![\n\n // window 0\n\n // window_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1559064235\",\n\n // window_active_clients\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_clients_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n", "file_path": "src/variables/window/windows_tests.rs", "rank": 58, "score": 54277.008816668924 }, { "content": "#[test]\n\n#[cfg(feature = \"tmux_0_8\")]\n\nfn action() {\n\n use crate::Action;\n\n assert_eq!(Action::Any.to_string(), \"any\");\n\n assert_eq!(Action::None.to_string(), \"none\");\n\n assert_eq!(Action::Current.to_string(), \"current\");\n\n #[cfg(feature = \"tmux_2_1\")]\n\n assert_eq!(Action::Other.to_string(), \"other\");\n\n}\n\n\n", "file_path": "src/options/session_options_tests.rs", "rank": 59, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse() {\n\n //use crate::Layouts;\n\n //use crate::LayoutType;\n\n //use crate::Layout;\n\n\n\n //let \"bcde,178x64,0,0[177x32,0,0{88x32,0,0,1,44x32,89,0,4,43x32,134,0,5},177x31,0,33{88x31,0,33,2,88x31,89,33,3}]\"\n\n //let l = Layouts::parse(\"8b65,177x64,0,0[177x46,0,0,1,177x17,0,47,4]\").unwrap();\n\n //let l_orig = Layouts {\n\n //style: LayoutType::TopBottom,\n\n //checksum: \"8b65\".to_string(),\n\n //parent: Layout {\n\n //x: 177,\n\n //y: 64,\n\n //x_off: 0,\n\n //y_off: 0,\n\n //id: None\n\n //},\n\n //layouts: None\n\n //};\n\n //assert_eq!(l_orig, l);\n", "file_path": "src/variables/layout/layout_tests.rs", "rank": 60, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn parse4() {\n\n use crate::Window;\n\n use crate::WINDOW_ALL;\n\n\n\n let window_vec = vec![\n\n // window_active\n\n #[cfg(feature = \"tmux_1_6\")]\n\n \"1557947146\",\n\n // window_active_clients\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_clients_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n\n // window_active_sessions_list\n\n #[cfg(feature = \"tmux_3_1\")]\n\n \"\",\n", "file_path": "src/variables/window/window_tests.rs", "rank": 61, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn wait_for() {\n\n use crate::WaitFor;\n\n use std::borrow::Cow;\n\n\n\n // # Manual\n\n //\n\n // tmux ^1.9:\n\n // ```text\n\n // tmux wait-for [-L | -S | -U] channel\n\n // (alias: wait)\n\n // ```\n\n //\n\n // tmux ^1.8:\n\n // ```text\n\n // tmux wait-for -LSU channel\n\n // (alias: wait)\n\n // ```\n\n let mut wait_for = WaitFor::new();\n\n #[cfg(feature = \"tmux_1_8\")]\n\n wait_for.locked();\n", "file_path": "src/commands/miscellaneous/wait_for_tests.rs", "rank": 62, "score": 54277.008816668924 }, { "content": "#[test]\n\nfn to_string() {\n\n use crate::{PaneOptionsBuilder, Switch};\n\n\n\n let _pane_options = PaneOptionsBuilder::new().allow_rename(Switch::Off).build();\n\n //dbg!(&pane_options.to_string());\n\n}\n\n\n", "file_path": "src/options/pane_options_tests.rs", "rank": 63, "score": 54277.008816668924 }, { "content": "#[test]\n\n#[cfg(feature = \"tmux_1_7\")]\n\nfn status_position() {\n\n use crate::StatusPosition;\n\n assert_eq!(StatusPosition::Top.to_string(), \"top\");\n\n assert_eq!(StatusPosition::Bottom.to_string(), \"bottom\");\n\n}\n\n\n", "file_path": "src/options/session_options_tests.rs", "rank": 64, "score": 52894.60958166559 }, { "content": "#[test]\n\n#[cfg(feature = \"tmux_2_9\")]\n\nfn window_size() {\n\n use crate::WindowSize;\n\n assert_eq!(WindowSize::Largest.to_string(), \"largest\");\n\n assert_eq!(WindowSize::Smallest.to_string(), \"smallest\");\n\n assert_eq!(WindowSize::Manual.to_string(), \"manual\");\n\n #[cfg(feature = \"tmux_3_1\")]\n\n assert_eq!(WindowSize::Latest.to_string(), \"latest\");\n\n}\n\n\n", "file_path": "src/options/window_options_tests.rs", "rank": 65, "score": 52894.60958166559 }, { "content": "#[test]\n\nfn get_single() {\n\n use crate::ServerOptions;\n\n use crate::ESCAPE_TIME;\n\n #[cfg(feature = \"tmux_1_7\")]\n\n let _server_options = ServerOptions::get(ESCAPE_TIME).unwrap();\n\n //assert_eq!(server_options.escape_time, Some(500));\n\n}\n\n\n\n//#[test]\n\n//fn set_single() {\n\n//use crate::common::server_options::ESCAPE_TIME;\n\n//use crate::{ServerOptions, ServerOptionsBuilder};\n\n\n\n//let server_options = ServerOptionsBuilder::new().escape_time(600).build();\n\n//server_options.set(ESCAPE_TIME).unwrap();\n\n//let server_options = ServerOptions::get(ESCAPE_TIME).unwrap();\n\n//assert_eq!(server_options.escape_time, Some(600));\n\n\n\n//let server_options = ServerOptionsBuilder::new().escape_time(500).build();\n\n//server_options.set(ESCAPE_TIME).unwrap();\n\n//}\n", "file_path": "src/options/server_options_tests.rs", "rank": 66, "score": 52894.60958166559 }, { "content": "#[test]\n\nfn set_clipboard() {\n\n use crate::SetClipboard;\n\n\n\n assert_eq!(SetClipboard::On.to_string(), \"on\");\n\n assert_eq!(SetClipboard::Off.to_string(), \"off\");\n\n #[cfg(feature = \"tmux_2_6\")]\n\n assert_eq!(SetClipboard::External.to_string(), \"external\");\n\n}\n\n\n", "file_path": "src/options/server_options_tests.rs", "rank": 67, "score": 52894.60958166559 }, { "content": "#[test]\n\nfn calc() {\n\n use crate::LayoutChecksum;\n\n\n\n let checksum_orig = usize::from_str_radix(\"e211\", 16).unwrap();\n\n let checksum = LayoutChecksum::calc(\"177x64,0,0,22\");\n\n assert_eq!(checksum_orig, checksum);\n\n\n\n let checksum_orig = usize::from_str_radix(\"d964\", 16).unwrap();\n\n let checksum = LayoutChecksum::calc(\"177x64,0,0[177x48,0,0,1,177x15,0,49,2]\");\n\n assert_eq!(checksum_orig, checksum);\n\n}\n", "file_path": "src/variables/layout/layout_checksum_tests.rs", "rank": 68, "score": 52894.60958166559 }, { "content": "#[test]\n\nfn confirm_before() {\n\n use crate::ConfirmBefore;\n\n use std::borrow::Cow;\n\n\n\n // # Manual\n\n //\n\n // tmux ^1.5:\n\n // ```text\n\n // tmux confirm-before [-p prompt] [-t target-client] command\n\n // (alias: confirm)\n\n // ```\n\n //\n\n // tmux ^0.9:\n\n // ```text\n\n // tmux confirm-before [-t target-client] command\n\n // (alias: confirm)\n\n // ```\n\n let mut confirm_before = ConfirmBefore::new();\n\n #[cfg(feature = \"tmux_1_5\")]\n\n confirm_before.prompt(\"1\");\n", "file_path": "src/commands/status_line/confirm_before_tests.rs", "rank": 69, "score": 52894.60958166559 }, { "content": "#[test]\n\n#[cfg(feature = \"tmux_1_0\")]\n\nfn status_position() {\n\n use crate::ClockModeStyle;\n\n assert_eq!(ClockModeStyle::_12.to_string(), \"12\");\n\n assert_eq!(ClockModeStyle::_24.to_string(), \"24\");\n\n}\n\n\n", "file_path": "src/options/window_options_tests.rs", "rank": 70, "score": 52894.60958166559 }, { "content": "#[test]\n\nfn parse() {\n\n use crate::LayoutCell;\n\n use crate::LayoutType;\n\n\n\n let layout_orig = LayoutCell::new(176, 64, 1, 3, Some(2), LayoutType::WindowPane, None);\n\n let layout_cell: LayoutCell = \"176x64,1,3,2\".parse().unwrap();\n\n assert_eq!(layout_cell, layout_orig);\n\n\n\n let pane2 = LayoutCell::new(176, 32, 3, 4, Some(2), LayoutType::WindowPane, None);\n\n let pane3 = LayoutCell::new(177, 31, 5, 6, Some(3), LayoutType::WindowPane, None);\n\n let layout_orig = LayoutCell::new(\n\n 178,\n\n 64,\n\n 1,\n\n 2,\n\n None,\n\n LayoutType::TopBottom,\n\n Some(vec![pane2, pane3]),\n\n );\n\n let layout_cell: LayoutCell = \"178x64,1,2[176x32,3,4,2,177x31,5,6,3]\".parse().unwrap();\n", "file_path": "src/variables/layout/layout_cell_tests.rs", "rank": 71, "score": 52894.60958166559 }, { "content": "#[test]\n\nfn has_session() {\n\n use crate::HasSession;\n\n use std::borrow::Cow;\n\n\n\n // Report if the specified session exist\n\n //\n\n // # Manual\n\n //\n\n // tmux ^0.8:\n\n // ```text\n\n // tmux has-session [-t target-session]\n\n // (alias: has)\n\n // ```\n\n let mut has_session = HasSession::new();\n\n #[cfg(feature = \"tmux_0_8\")]\n\n has_session.target_session(\"1\");\n\n\n\n let mut s = Vec::new();\n\n #[cfg(feature = \"tmux_0_8\")]\n\n s.extend_from_slice(&[\"-t\", \"1\"]);\n", "file_path": "src/commands/clients_and_sessions/has_session_tests.rs", "rank": 72, "score": 52894.60958166559 }, { "content": "#[test]\n\n#[cfg(feature = \"tmux_1_0\")]\n\nfn status_justify() {\n\n use crate::StatusJustify;\n\n assert_eq!(StatusJustify::Left.to_string(), \"left\");\n\n assert_eq!(StatusJustify::Centre.to_string(), \"centre\");\n\n assert_eq!(StatusJustify::Right.to_string(), \"right\");\n\n}\n\n\n", "file_path": "src/options/session_options_tests.rs", "rank": 73, "score": 52894.60958166559 }, { "content": "#[test]\n\nfn show_hooks() {\n\n use crate::{ShowHooks, TargetSession};\n\n use std::borrow::Cow;\n\n\n\n // # Manual\n\n //\n\n // tmux ^2.2:\n\n // ```text\n\n // tmux show-hooks [-g] [-t target-session]\n\n // ```\n\n let target_session = TargetSession::Raw(\"1\").to_string();\n\n\n\n let mut show_hooks = ShowHooks::new();\n\n #[cfg(feature = \"tmux_2_2\")]\n\n show_hooks.global();\n\n #[cfg(feature = \"tmux_2_2\")]\n\n show_hooks.target_session(&target_session);\n\n\n\n let cmd = \"show-hooks\";\n\n\n", "file_path": "src/commands/hooks/show_hooks_tests.rs", "rank": 74, "score": 51604.94583708429 }, { "content": "#[test]\n\n#[cfg(feature = \"tmux_2_3\")]\n\nfn pane_border_status() {\n\n use crate::PaneBorderStatus;\n\n assert_eq!(PaneBorderStatus::Off.to_string(), \"off\");\n\n assert_eq!(PaneBorderStatus::Top.to_string(), \"top\");\n\n assert_eq!(PaneBorderStatus::Bottom.to_string(), \"bottom\");\n\n}\n\n\n", "file_path": "src/options/window_options_tests.rs", "rank": 75, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn lock_server() {\n\n use crate::LockServer;\n\n use std::borrow::Cow;\n\n\n\n // # Manual\n\n //\n\n // tmux ^0.8:\n\n // ```text\n\n // tmux lock-server\n\n // (alias: lock)\n\n // ```\n\n let lock_server = LockServer::new();\n\n\n\n #[cfg(not(feature = \"cmd_alias\"))]\n\n let cmd = \"lock-server\";\n\n #[cfg(feature = \"cmd_alias\")]\n\n let cmd = \"lock\";\n\n\n\n assert_eq!(lock_server.0.bin, Cow::Borrowed(\"tmux\"));\n\n assert_eq!(lock_server.0.bin_args, None);\n\n assert_eq!(lock_server.0.cmd, Some(Cow::Borrowed(cmd)));\n\n assert_eq!(lock_server.0.cmd_args, None);\n\n}\n", "file_path": "src/commands/miscellaneous/lock_server_tests.rs", "rank": 76, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn set_option() {\n\n use crate::{SetOption, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Structure for setting a pane/window/session/server option\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.0:\n\n // ```text\n\n // tmux set-option [-aFgopqsuw] [-t target-pane] option value\n\n // (alias: set)\n\n // ```\n\n //\n\n // tmux ^2.6:\n\n // ```text\n\n // tmux set-option [-aFgoqsuw] [-t target-session | target-window] option value\n\n // (alias: set)\n\n // ```\n\n //\n", "file_path": "src/commands/options/set_option_tests.rs", "rank": 77, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn paste_buffer() {\n\n use crate::{PasteBuffer, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Structure for inserting the contents of a paste buffer into the specified pane\n\n //\n\n // # Manual\n\n //\n\n // tmux ^1.7:\n\n // ```text\n\n // tmux paste-buffer [-dpr] [-b buffer-name] [-s separator] [-t target-pane]\n\n // (alias: pasteb)\n\n // ```\n\n //\n\n // tmux ^1.3:\n\n // ```text\n\n // tmux paste-buffer [-dr] [-b buffer-index] [-s separator] [-t target-window]\n\n // (alias: pasteb)\n\n // ```\n\n //\n", "file_path": "src/commands/buffers/paste_buffer_tests.rs", "rank": 78, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn run_shell() {\n\n use crate::{RunShell, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // # Manual\n\n //\n\n // tmux ^1.8:\n\n // ```text\n\n // tmux run-shell [-b] [-t target-pane] shell-command\n\n // (alias: run)\n\n // ```\n\n //\n\n // tmux ^1.2:\n\n // ```text\n\n // tmux run-shell shell-command\n\n // (alias: run)\n\n // ```\n\n //\n\n // tmux ^1.1:\n\n // ```text\n", "file_path": "src/commands/miscellaneous/run_shell_tests.rs", "rank": 79, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn show_buffer() {\n\n use crate::ShowBuffer;\n\n use std::borrow::Cow;\n\n\n\n // Display the contents of the specified buffer.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^1.5:\n\n // ```text\n\n // tmux show-buffer [-b buffer-name]\n\n // (alias: showb)\n\n // ```\n\n //\n\n // tmux ^0.8:\n\n // ```text\n\n // tmux show-buffer [-b buffer-index] [-t target-session]\n\n // (alias: showb)\n\n // ```\n\n let mut show_buffer = ShowBuffer::new();\n", "file_path": "src/commands/buffers/show_buffer_tests.rs", "rank": 80, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn load_buffer() {\n\n use crate::LoadBuffer;\n\n use std::borrow::Cow;\n\n\n\n // Load the contents of the specified paste buffer from path.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^2.0:\n\n // ```text\n\n // tmux load-buffer [-b buffer-name] path\n\n // (alias: loadb)\n\n // ```\n\n //\n\n // tmux ^1.5:\n\n // ```text\n\n // tmux load-buffer [-b buffer-index] path\n\n // (alias: loadb)\n\n // ```\n\n //\n", "file_path": "src/commands/buffers/load_buffer_tests.rs", "rank": 81, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn save_buffer() {\n\n use crate::SaveBuffer;\n\n use std::borrow::Cow;\n\n\n\n // Save the contents of the specified paste buffer to path.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^2.0:\n\n // ```text\n\n // tmux save-buffer [-a] [-b buffer-name] path\n\n // (alias: saveb)\n\n // ```\n\n //\n\n // tmux ^1.5:\n\n // ```text\n\n // tmux save-buffer [-a] [-b buffer-index] path\n\n // (alias: saveb)\n\n // ```\n\n //\n", "file_path": "src/commands/buffers/save_buffer_tests.rs", "rank": 82, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn delete_buffer() {\n\n use crate::{DeleteBuffer, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Delete the buffer named buffer-name, or the most recently added automatically named buffer\n\n // if not specified.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^2.0:\n\n // ```text\n\n // tmux delete-buffer [-b buffer-name]\n\n // (alias: deleteb)\n\n // ```\n\n //\n\n // tmux ^1.5:\n\n // ```text\n\n // tmux delete-buffer [-b buffer-index]\n\n // (alias: deleteb)\n\n // ```\n", "file_path": "src/commands/buffers/delete_buffer_tests.rs", "rank": 83, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn clear_history() {\n\n use crate::{ClearHistory, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Remove and free the history for the specified pane.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^1.0:\n\n // ```text\n\n // tmux clear-history [-t target-pane]\n\n // (alias: clearhist)\n\n // ```\n\n //\n\n // tmux ^0.9:\n\n // ```text\n\n // tmux clear-history [-p pane-index] [-t target-window]\n\n // (alias: clearhist)\n\n // ```\n\n let target_pane = TargetPane::Raw(\"1\").to_string();\n", "file_path": "src/commands/buffers/clear_history_tests.rs", "rank": 84, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn set_hook() {\n\n use crate::{SetHook, TargetSession};\n\n use std::borrow::Cow;\n\n\n\n // Structure for setting or unsetting hook `hook-name` to command.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.0:\n\n // ```text\n\n // tmux set-hook [-agRu] [-t target-session] hook-name command\n\n // ```\n\n //\n\n // tmux ^2.8:\n\n // ```text\n\n // tmux set-hook [-gRu] [-t target-session] hook-name command\n\n // ```\n\n //\n\n // tmux ^2.4:\n\n // ```text\n", "file_path": "src/commands/hooks/set_hook_tests.rs", "rank": 85, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn list_buffers() {\n\n use crate::ListBuffers;\n\n use std::borrow::Cow;\n\n\n\n // List the global buffers.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^1.7:\n\n // ```text\n\n // tmux list-buffers [-F format]\n\n // (alias: lsb)\n\n // ```\n\n //\n\n // tmux ^1.5:\n\n // ```text\n\n // tmux list-buffers\n\n // (alias: lsb)\n\n // ```\n\n //\n", "file_path": "src/commands/buffers/list_buffers_tests.rs", "rank": 86, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn clock_mode() {\n\n use crate::ClockMode;\n\n use std::borrow::Cow;\n\n\n\n // # Manual\n\n //\n\n // tmux ^1.0:\n\n // ```text\n\n // tmux clock-mode [-t target-pane]\n\n // ```\n\n //\n\n // tmux ^0.8:\n\n // ```text\n\n // tmux clock-mode [-t target-window]\n\n // ```\n\n let mut clock_mode = ClockMode::new();\n\n #[cfg(feature = \"tmux_1_0\")]\n\n clock_mode.target_pane(\"1\");\n\n #[cfg(all(feature = \"tmux_0_8\", not(feature = \"tmux_1_0\")))]\n\n clock_mode.target_window(\"2\");\n", "file_path": "src/commands/miscellaneous/clock_mode_tests.rs", "rank": 87, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn show_options() {\n\n use crate::{ShowOptions, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Structure for showing options\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.0:\n\n // ```text\n\n // tmux show-options [-AgHpqsvw] [-t target-pane] [option]\n\n // (alias: show)\n\n // ```\n\n //\n\n // tmux ^1.8:\n\n // ```text\n\n // tmux show-options [-gqsvw] [-t target-session | target-window] [option]\n\n // (alias: show)\n\n // ```\n\n //\n", "file_path": "src/commands/options/show_options_tests.rs", "rank": 88, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn set_buffer() {\n\n use crate::SetBuffer;\n\n use std::borrow::Cow;\n\n\n\n // Set the contents of the specified buffer to data.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^2.0:\n\n // ```text\n\n // tmux set-buffer [-a] [-b buffer-name] [-n new-buffer-name] data\n\n // (alias: setb)\n\n // ```\n\n //\n\n // tmux ^1.5:\n\n // ```text\n\n // tmux set-buffer [-b buffer-index] data\n\n // (alias: setb)\n\n // ```\n\n //\n", "file_path": "src/commands/buffers/set_buffer_tests.rs", "rank": 89, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn choose_buffer() {\n\n use crate::{ChooseBuffer, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Stucture for putting a pane into buffer mode\n\n //\n\n // # Manual\n\n //\n\n // tmux X.X:\n\n // ```text\n\n // tmux choose-buffer [-NZr] [-F format] [-f filter] [-O sort-order] [-t target-pane] [template]\n\n // ```\n\n //\n\n // tmux ^3.1:\n\n // ```text\n\n // tmux choose-buffer [-NZr] [-F format] [-f filter] [-O sort-order] [-t target-pane] [template]\n\n // ```\n\n //\n\n // tmux ^2.7:\n\n // ```text\n", "file_path": "src/commands/buffers/choose_buffer_tests.rs", "rank": 90, "score": 51604.94583708429 }, { "content": "#[test]\n\nfn select_layout() {\n\n use crate::{SelectLayot, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Choose a specific layout for a window\n\n //\n\n // # Manual\n\n //\n\n // tmux ^2.7:\n\n // ```text\n\n // tmux select-layout [-Enop] [-t target-pane] [layout-name]\n\n // (alias: selectl)\n\n // ```\n\n //\n\n // tmux ^2.1:\n\n // ```text\n\n // tmux select-layout [-nop] [-t target-pane] [layout-name]\n\n // (alias: selectl)\n\n // ```\n\n //\n", "file_path": "src/commands/windows_and_panes/select_layout_tests.rs", "rank": 91, "score": 50398.98895407624 }, { "content": "#[test]\n\nfn respawn_window() {\n\n use crate::{RespawnWindow, TargetWindow};\n\n use std::borrow::Cow;\n\n\n\n // Reactivate a window in which the command has exited\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.0:\n\n // ```text\n\n // tmux respawn-window [-k] [-c start-directory] [-e environment] [-t target-window]\n\n // [shell-command]\n\n // (alias: respawnw)\n\n //\n\n // tmux ^2.6:\n\n // ```text\n\n // tmux respawn-window [-k] [-c start-directory] [-t target-window]\n\n // [shell-command]\n\n // (alias: respawnw)\n\n //\n", "file_path": "src/commands/windows_and_panes/respawn_window_tests.rs", "rank": 92, "score": 50398.98895407624 }, { "content": "#[test]\n\nfn swap_pane() {\n\n use crate::{SwapPane, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Swap two panes\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.1:\n\n // ```text\n\n // tmux swap-pane [-dDUZ] [-s src-pane] [-t dst-pane]\n\n // (alias: swapp)\n\n // ```\n\n //\n\n // tmux ^1.0:\n\n // ```text\n\n // tmux swap-pane [-dDU] [-s src-pane] [-t dst-pane]\n\n // (alias: swapp)\n\n // ```\n\n //\n", "file_path": "src/commands/windows_and_panes/swap_pane_tests.rs", "rank": 93, "score": 50398.98895407624 }, { "content": "#[test]\n\nfn unlink_window() {\n\n use crate::{TargetWindow, UnlinkWindow};\n\n use std::borrow::Cow;\n\n\n\n // Unlink `target-window`\n\n //\n\n // # Manual\n\n //\n\n // tmux ^1.0:\n\n // ```text\n\n // tmux unlink-window [-k] [-t target-window]\n\n // (alias: unlinkw)\n\n // ```\n\n //\n\n // tmux ^0.8:\n\n // ```text\n\n // tmux unlink-window [-t target-window]\n\n // (alias: unlinkw)\n\n // ```\n\n let target_window = TargetWindow::Raw(\"1\").to_string();\n", "file_path": "src/commands/windows_and_panes/unlink_window_tests.rs", "rank": 94, "score": 50398.98895407624 }, { "content": "#[test]\n\nfn select_pane() {\n\n use crate::{SelectPane, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Make pane `target-pane` the active pane in window `target-window`\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.1:\n\n // ```text\n\n // tmux select-pane [-DdeLlMmRUZ] [-T title] [-t target-pane]\n\n // (alias: selectp)\n\n // ```\n\n //\n\n // tmux ^2.6:\n\n // ```text\n\n // tmux select-pane [-DdeLlMmRU] [-T title] [-t target-pane]\n\n // (alias: selectp)\n\n // ```\n\n //\n", "file_path": "src/commands/windows_and_panes/select_pane_tests.rs", "rank": 95, "score": 50398.98895407624 }, { "content": "#[test]\n\nfn swap_window() {\n\n use crate::{SwapWindow, TargetWindow};\n\n use std::borrow::Cow;\n\n\n\n // This is similar to link-window, except the source and destination windows are swapped\n\n //\n\n // # Manual\n\n //\n\n // tmux ^0.8:\n\n // ```text\n\n // tmux swap-window [-d] [-s src-window] [-t dst-window]\n\n // (alias: swapw)\n\n // ```\n\n let src_window = TargetWindow::Raw(\"1\").to_string();\n\n let dst_window = TargetWindow::Raw(\"2\").to_string();\n\n\n\n let mut swap_window = SwapWindow::new();\n\n #[cfg(feature = \"tmux_0_8\")]\n\n swap_window.detached();\n\n #[cfg(feature = \"tmux_0_8\")]\n", "file_path": "src/commands/windows_and_panes/swap_window_tests.rs", "rank": 96, "score": 50398.98895407624 }, { "content": "#[test]\n\nfn split_window() {\n\n use crate::{PaneSize, SplitWindow, TargetPane};\n\n use std::borrow::Cow;\n\n\n\n // Create a new pane by splitting target-pane\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.1:\n\n // ```text\n\n // tmux split-window [-bdfhIvP] [-c start-directory] [-e environment] [-l size] [-t target-pane]\n\n // [shell-command] [-F format]\n\n // (alias: splitw)\n\n // ```\n\n //\n\n // tmux ^3.0:\n\n // ```text\n\n // tmux split-window [-bdfhIvP] [-c start-directory] [-e environment] [-l size | -p percentage]\n\n // [-t target-pane] [shell-command] [-F format]\n\n // (alias: splitw)\n", "file_path": "src/commands/windows_and_panes/split_window_tests.rs", "rank": 97, "score": 50398.98895407624 }, { "content": "#[test]\n\nfn select_window() {\n\n use crate::{SelectWindow, TargetWindow};\n\n use std::borrow::Cow;\n\n\n\n // Select the window at target-window.\n\n //\n\n // # Manual\n\n //\n\n // tmux ^1.8:\n\n // ```text\n\n // tmux select-window [-lnpT] [-t target-window]\n\n // (alias: selectw)\n\n // ```\n\n //\n\n // tmux ^1.5:\n\n // ```text\n\n // tmux select-window [-lnp] [-t target-window]\n\n // (alias: selectw)\n\n // ```\n\n //\n", "file_path": "src/commands/windows_and_panes/select_window_tests.rs", "rank": 98, "score": 50398.98895407624 }, { "content": "#[test]\n\nfn rotate_window() {\n\n use crate::{RotateWindow, TargetWindow};\n\n use std::borrow::Cow;\n\n\n\n // Rotate the positions of the panes within a window\n\n //\n\n // # Manual\n\n //\n\n // tmux ^3.1:\n\n // ```text\n\n // tmux rotate-window [-DUZ] [-t target-window]\n\n // (alias: rotatew)\n\n // ```\n\n //\n\n // tmux ^0.8:\n\n // ```text\n\n // tmux rotate-window [-DU] [-t target-window]\n\n // (alias: rotatew)\n\n // ```\n\n let target_window = TargetWindow::Raw(\"1\").to_string();\n", "file_path": "src/commands/windows_and_panes/rotate_window_tests.rs", "rank": 99, "score": 50398.98895407624 } ]
Rust
crates/fluvio-storage/src/range_map.rs
bohlmannc/fluvio
b5a3105600b6886c55d76707d369fa59f5d9673b
use std::cmp::max; use std::cmp::min; use std::collections::BTreeMap; use std::ops::Bound::Included; use std::ffi::OsStr; use tracing::debug; use tracing::trace; use tracing::error; use dataplane::Offset; use crate::segment::ReadSegment; use crate::StorageError; use crate::config::ConfigOption; use crate::util::log_path_get_offset; #[derive(Debug)] pub(crate) struct SegmentList { segments: BTreeMap<Offset, ReadSegment>, min_offset: Offset, max_offset: Offset, } impl SegmentList { pub fn new() -> Self { SegmentList { segments: BTreeMap::new(), max_offset: 0, min_offset: -1, } } pub async fn from_dir( option: &ConfigOption, ) -> Result<(SegmentList, Option<Offset>), StorageError> { let dirs = option.base_dir.read_dir()?; debug!("reading segments at: {:#?}", dirs); let files: Vec<_> = dirs.filter_map(|entry| entry.ok()).collect(); let mut offsets: Vec<Offset> = vec![]; for entry in files { if let Ok(metadata) = entry.metadata() { if metadata.is_file() { let path = entry.path(); trace!("scanning file: {:#?}", path); if path.extension() == Some(OsStr::new("log")) { if let Ok(offset) = log_path_get_offset(&path) { trace!("detected valid log: {}", offset); offsets.push(offset); /* match Segment::open(offset,option).await { Ok(segment) => segments.add_segment(segment), Err(err) => error!("error opening segment: {:#?}",err) } } else { debug!("not log, skipping: {:#?}",path); */ } } } } } offsets.sort_unstable(); let last_offset = offsets.pop(); let mut segments = Self::new(); for offset in offsets { match ReadSegment::open_unknown(offset, option).await { Ok(segment) => segments.add_segment(segment), Err(err) => error!("error opening segment: {:#?}", err), } } Ok((segments, last_offset)) } #[allow(dead_code)] pub fn len(&self) -> usize { self.segments.len() } pub fn min_offset(&self) -> Offset { self.min_offset } pub fn add_segment(&mut self, segment: ReadSegment) { debug!( base_offset = segment.get_base_offset(), end_offset = segment.get_end_offset(), "inserting" ); self.max_offset = max(self.max_offset, segment.get_end_offset()); self.min_offset = if self.min_offset < 0 { segment.get_base_offset() } else { min(self.min_offset, segment.get_base_offset()) }; self.segments.insert(segment.get_base_offset(), segment); } #[allow(dead_code)] pub fn get_segment(&self, offset: Offset) -> Option<&ReadSegment> { self.segments.get(&offset) } pub fn find_segment(&self, offset: Offset) -> Option<(&Offset, &ReadSegment)> { if offset < self.min_offset { None } else if offset == self.min_offset { (&self.segments) .range((Included(offset), Included(offset))) .next_back() } else if offset >= self.max_offset { None } else { let range = ( Included(offset - self.max_offset + self.min_offset + 1), Included(offset), ); (&self.segments).range(range).next_back() } } } #[cfg(test)] mod tests { use std::env::temp_dir; use std::path::PathBuf; use flv_util::fixture::ensure_new_dir; use dataplane::fixture::create_batch; use dataplane::Offset; use crate::StorageError; use crate::segment::MutableSegment; use crate::segment::ReadSegment; use crate::config::ConfigOption; use super::SegmentList; async fn create_segment( option: &ConfigOption, start: Offset, end_offset: Offset, ) -> Result<ReadSegment, StorageError> { let mut mut_segment = MutableSegment::create(start, option).await?; mut_segment.write_batch(&mut create_batch()).await?; mut_segment.set_end_offset(end_offset); let segment = mut_segment.convert_to_segment().await?; Ok(segment) } fn default_option(base_dir: PathBuf) -> ConfigOption { ConfigOption { segment_max_bytes: 100, base_dir, index_max_bytes: 1000, index_max_interval_bytes: 0, ..Default::default() } } #[fluvio_future::test] async fn test_segment_empty() { let rep_dir = temp_dir().join("segmentlist-read-empty"); ensure_new_dir(&rep_dir).expect("new"); let option = default_option(rep_dir); let (segments, last_segment) = SegmentList::from_dir(&option).await.expect("from"); assert_eq!(segments.len(), 0); assert!(last_segment.is_none()); } #[fluvio_future::test] async fn test_segment_single_base_zero() { let rep_dir = temp_dir().join("segmentlist-single-zero"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 0, 500).await.expect("create")); println!("segments: {:#?}", list); assert!(list.find_segment(-1).is_none()); assert!(list.find_segment(0).is_some()); assert!(list.find_segment(1).is_some()); assert!(list.find_segment(499).is_some()); assert!(list.find_segment(500).is_none()); assert!(list.find_segment(501).is_none()); } #[fluvio_future::test] async fn test_segment_single_base_some() { let rep_dir = temp_dir().join("segmentlist-single-some"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 100, 500).await.expect("create")); println!("segments: {:#?}", list); assert!(list.find_segment(50).is_none()); assert!(list.find_segment(99).is_none()); assert!(list.find_segment(100).is_some()); assert!(list.find_segment(499).is_some()); assert!(list.find_segment(500).is_none()); } #[fluvio_future::test] async fn test_segment_many_zero() { let rep_dir = temp_dir().join("segmentlist-many-zero"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 0, 500).await.expect("create")); list.add_segment(create_segment(&option, 500, 2000).await.expect("create")); list.add_segment(create_segment(&option, 2000, 3000).await.expect("create")); list.add_segment(create_segment(&option, 3000, 4000).await.expect("create")); println!("segments: {:#?}", list); assert_eq!(list.len(), 4); assert_eq!(list.find_segment(0).expect("segment").0, &0); assert_eq!(list.find_segment(1).expect("segment").0, &0); assert_eq!(list.find_segment(499).expect("segment").0, &0); assert_eq!(list.find_segment(500).expect("segment").0, &500); assert_eq!(list.find_segment(1500).expect("segment").0, &500); assert_eq!(list.find_segment(3000).expect("segment").0, &3000); assert!(list.find_segment(4000).is_none()); assert!(list.find_segment(4001).is_none()); } #[fluvio_future::test] async fn test_segment_many_some() { let rep_dir = temp_dir().join("segmentlist-many-some"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 100, 600).await.expect("create")); list.add_segment(create_segment(&option, 600, 4000).await.expect("create")); list.add_segment(create_segment(&option, 4000, 9000).await.expect("create")); println!("segments: {:#?}", list); assert_eq!(list.len(), 3); assert!(list.find_segment(0).is_none()); assert!(list.find_segment(99).is_none()); assert_eq!(list.find_segment(100).expect("segment").0, &100); assert_eq!(list.find_segment(599).expect("segment").0, &100); assert_eq!(list.find_segment(600).expect("segment").0, &600); assert_eq!(list.find_segment(900).expect("segment").0, &600); assert_eq!(list.find_segment(8000).expect("segment").0, &4000); assert!(list.find_segment(9000).is_none()); assert!(list.find_segment(10000).is_none()); } }
use std::cmp::max; use std::cmp::min; use std::collections::BTreeMap; use std::ops::Bound::Included; use std::ffi::OsStr; use tracing::debug; use tracing::trace; use tracing::error; use dataplane::Offset; use crate::segment::ReadSegment; use crate::StorageError; use crate::config::ConfigOption; use crate::util::log_path_get_offset; #[derive(Debug)] pub(crate) struct SegmentList { segments: BTreeMap<Offset, ReadSegment>, min_offset: Offset, max_offset: Offset, } impl SegmentList { pub fn new() -> Self { SegmentList { segments: BTreeMap::new(), max_offset: 0, min_offset: -1, } } pub async fn from_dir( option: &ConfigOption, ) -> Result<(SegmentList, Option<Offset>), StorageError> { let dirs = option.base_dir.read_dir()?; debug!("reading segments at: {:#?}", dirs); let files: Vec<_> = dirs.filter_map(|entry| entry.ok()).collect(); let mut offsets: Vec<Offset> = vec![]; for entry in files { if let Ok(metadata) = entry.metadata() { if metadata.is_file() { let path = entry.path(); trace!("scanning file: {:#?}", path); if path.extension() == Some(OsStr::new("log")) { if let Ok(offset) = log_path_get_offset(&path) { trace!("detected valid log: {}", offset); offsets.push(offset); /* match Segment::open(offset,option).await { Ok(segment) => segments.add_segment(segment), Err(err) => error!("error opening segment: {:#?}",err) } } else { debug!("not log, skipping: {:#?}",path); */ } } } } } offsets.sort_unstable(); let last_offset = offsets.pop(); let mut segments = Self::new(); for offset in offsets { match ReadSegment::open_unknown(offset, option).await { Ok(segment) => segments.add_segment(segment), Err(err) => error!("error opening segment: {:#?}", err), } } Ok((segments, last_offset)) } #[allow(dead_code)] pub fn len(&self) -> usize { self.segments.len() } pub fn min_offset(&self) -> Offset { self.min_offset } pub fn add_segment(&mut self, segment: ReadSegment) { debug!( base_offset = segment.get_base_offset(), end_offset = segment.get_end_offset(), "inserting" ); self.max_offset = max(self.max_offset, segment.get_end_offset()); self.min_offset = if self.min_offset < 0 { segment.get_base_offset() } else { min(self.min_offset, segment.get_base_offset()) }; self.segments.insert(segment.get_base_offset(), segment); } #[allow(dead_code)] pub fn get_segment(&self, offset: Offset) -> Option<&ReadSegment> { self.segments.get(&offset) } pub fn find_segment(&self, offset: Offset) -> Option<(&Offset, &ReadSegment)> { if offset < self.min_offset { None } els
} #[cfg(test)] mod tests { use std::env::temp_dir; use std::path::PathBuf; use flv_util::fixture::ensure_new_dir; use dataplane::fixture::create_batch; use dataplane::Offset; use crate::StorageError; use crate::segment::MutableSegment; use crate::segment::ReadSegment; use crate::config::ConfigOption; use super::SegmentList; async fn create_segment( option: &ConfigOption, start: Offset, end_offset: Offset, ) -> Result<ReadSegment, StorageError> { let mut mut_segment = MutableSegment::create(start, option).await?; mut_segment.write_batch(&mut create_batch()).await?; mut_segment.set_end_offset(end_offset); let segment = mut_segment.convert_to_segment().await?; Ok(segment) } fn default_option(base_dir: PathBuf) -> ConfigOption { ConfigOption { segment_max_bytes: 100, base_dir, index_max_bytes: 1000, index_max_interval_bytes: 0, ..Default::default() } } #[fluvio_future::test] async fn test_segment_empty() { let rep_dir = temp_dir().join("segmentlist-read-empty"); ensure_new_dir(&rep_dir).expect("new"); let option = default_option(rep_dir); let (segments, last_segment) = SegmentList::from_dir(&option).await.expect("from"); assert_eq!(segments.len(), 0); assert!(last_segment.is_none()); } #[fluvio_future::test] async fn test_segment_single_base_zero() { let rep_dir = temp_dir().join("segmentlist-single-zero"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 0, 500).await.expect("create")); println!("segments: {:#?}", list); assert!(list.find_segment(-1).is_none()); assert!(list.find_segment(0).is_some()); assert!(list.find_segment(1).is_some()); assert!(list.find_segment(499).is_some()); assert!(list.find_segment(500).is_none()); assert!(list.find_segment(501).is_none()); } #[fluvio_future::test] async fn test_segment_single_base_some() { let rep_dir = temp_dir().join("segmentlist-single-some"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 100, 500).await.expect("create")); println!("segments: {:#?}", list); assert!(list.find_segment(50).is_none()); assert!(list.find_segment(99).is_none()); assert!(list.find_segment(100).is_some()); assert!(list.find_segment(499).is_some()); assert!(list.find_segment(500).is_none()); } #[fluvio_future::test] async fn test_segment_many_zero() { let rep_dir = temp_dir().join("segmentlist-many-zero"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 0, 500).await.expect("create")); list.add_segment(create_segment(&option, 500, 2000).await.expect("create")); list.add_segment(create_segment(&option, 2000, 3000).await.expect("create")); list.add_segment(create_segment(&option, 3000, 4000).await.expect("create")); println!("segments: {:#?}", list); assert_eq!(list.len(), 4); assert_eq!(list.find_segment(0).expect("segment").0, &0); assert_eq!(list.find_segment(1).expect("segment").0, &0); assert_eq!(list.find_segment(499).expect("segment").0, &0); assert_eq!(list.find_segment(500).expect("segment").0, &500); assert_eq!(list.find_segment(1500).expect("segment").0, &500); assert_eq!(list.find_segment(3000).expect("segment").0, &3000); assert!(list.find_segment(4000).is_none()); assert!(list.find_segment(4001).is_none()); } #[fluvio_future::test] async fn test_segment_many_some() { let rep_dir = temp_dir().join("segmentlist-many-some"); ensure_new_dir(&rep_dir).expect("new"); let mut list = SegmentList::new(); let option = default_option(rep_dir); list.add_segment(create_segment(&option, 100, 600).await.expect("create")); list.add_segment(create_segment(&option, 600, 4000).await.expect("create")); list.add_segment(create_segment(&option, 4000, 9000).await.expect("create")); println!("segments: {:#?}", list); assert_eq!(list.len(), 3); assert!(list.find_segment(0).is_none()); assert!(list.find_segment(99).is_none()); assert_eq!(list.find_segment(100).expect("segment").0, &100); assert_eq!(list.find_segment(599).expect("segment").0, &100); assert_eq!(list.find_segment(600).expect("segment").0, &600); assert_eq!(list.find_segment(900).expect("segment").0, &600); assert_eq!(list.find_segment(8000).expect("segment").0, &4000); assert!(list.find_segment(9000).is_none()); assert!(list.find_segment(10000).is_none()); } }
e if offset == self.min_offset { (&self.segments) .range((Included(offset), Included(offset))) .next_back() } else if offset >= self.max_offset { None } else { let range = ( Included(offset - self.max_offset + self.min_offset + 1), Included(offset), ); (&self.segments).range(range).next_back() } }
function_block-function_prefixed
[ { "content": "pub fn log_path_get_offset<P>(path: P) -> Result<Offset, OffsetError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let log_path = path.as_ref();\n\n\n\n match log_path.file_stem() {\n\n None => Err(OffsetError::InvalidPath),\n\n Some(file_name) => {\n\n if file_name.len() != 20 {\n\n Err(OffsetError::InvalidLogFileName)\n\n } else {\n\n file_name\n\n .to_str()\n\n .unwrap()\n\n .parse()\n\n .map_err(|err: ParseIntError| err.into())\n\n }\n\n }\n\n }\n", "file_path": "crates/fluvio-storage/src/util.rs", "rank": 0, "score": 319185.0550222922 }, { "content": "/// given parent directory, base offset, extension, generate path\n\npub fn generate_file_name<P>(parent_dir: P, base_offset: Offset, extension: &str) -> PathBuf\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut file = parent_dir.as_ref().join(format!(\"{:020}\", base_offset));\n\n file.set_extension(extension);\n\n file\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub enum OffsetError {\n\n #[error(\"Offset does not exist\")]\n\n NotExistent,\n\n #[error(\"Invalid path\")]\n\n InvalidPath,\n\n #[error(\"Invalid logfile name\")]\n\n InvalidLogFileName,\n\n #[error(\"Failed to parse offset\")]\n\n OffsetParse(#[from] ParseIntError),\n\n}\n\n\n", "file_path": "crates/fluvio-storage/src/util.rs", "rank": 1, "score": 313303.5890796119 }, { "content": "pub fn read_bytes_from_file<P>(path: P) -> Result<Vec<u8>, io::Error>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let file_path = path.as_ref();\n\n info!(\"test file: {}\", file_path.display());\n\n let mut f = File::open(file_path)?;\n\n let mut buffer = Vec::new();\n\n f.read_to_end(&mut buffer)?;\n\n Ok(buffer)\n\n}\n", "file_path": "crates/fluvio-dataplane-protocol/src/fixture.rs", "rank": 2, "score": 270656.90288472327 }, { "content": "fn read_filter_from_path(filter_path: impl AsRef<Path>) -> Vec<u8> {\n\n let path = filter_path.as_ref();\n\n std::fs::read(path).unwrap_or_else(|_| panic!(\"Unable to read file {}\", path.display()))\n\n}\n\n\n", "file_path": "crates/fluvio-spu/src/services/public/stream_fetch_test.rs", "rank": 3, "score": 268808.0125964046 }, { "content": "fn decode_option_vec_u<T>(array: &mut Option<Vec<u8>>, src: &mut T, len: isize) -> Result<(), Error>\n\nwhere\n\n T: Buf,\n\n{\n\n if len < 0 {\n\n *array = None;\n\n return Ok(());\n\n }\n\n\n\n if len == 0 {\n\n *array = Some(Vec::new());\n\n return Ok(());\n\n }\n\n\n\n let mut buf = src.take(len as usize);\n\n let mut value: Vec<u8> = Vec::new();\n\n value.put(&mut buf);\n\n if value.len() != len as usize {\n\n return Err(Error::new(\n\n ErrorKind::UnexpectedEof,\n", "file_path": "crates/fluvio-protocol/src/core/decoder.rs", "rank": 4, "score": 262803.9579213476 }, { "content": "/// find status matching it,\n\nfn find_status(status: &mut Vec<ReplicaStatus>, spu: SpuId) -> Option<&'_ mut ReplicaStatus> {\n\n status.iter_mut().find(|status| status.spu == spu)\n\n}\n\n\n\n#[derive(Decoder, Encoder, Debug, Clone, PartialEq)]\n\n#[cfg_attr(feature = \"use_serde\", derive(serde::Serialize, serde::Deserialize))]\n\npub enum PartitionResolution {\n\n Offline, // No leader available for serving partition\n\n Online, // Partition is running normally, status contains replica info\n\n LeaderOffline, // Election has failed, no suitable leader has been founded\n\n ElectionLeaderFound, // New leader has been selected\n\n}\n\n\n\nimpl Default for PartitionResolution {\n\n fn default() -> Self {\n\n PartitionResolution::Offline\n\n }\n\n}\n\n\n\n#[derive(Decoder, Encoder, Debug, Clone, PartialEq)]\n", "file_path": "crates/fluvio-controlplane-metadata/src/partition/status.rs", "rank": 5, "score": 247013.42740591746 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\nfn home_dir() -> Option<PathBuf> {\n\n None\n\n}\n\n\n\nuse serde::Deserialize;\n\nuse serde::Serialize;\n\n\n\nuse fluvio_types::defaults::{CLI_CONFIG_PATH};\n\nuse crate::{FluvioConfig, FluvioError};\n\n\n\nuse super::TlsPolicy;\n\n\n\n#[derive(Error, Debug)]\n\npub enum ConfigError {\n\n #[error(transparent)]\n\n ConfigFileError(#[from] IoError),\n\n #[error(\"Failed to deserialize Fluvio config\")]\n\n TomlError(#[from] toml::de::Error),\n\n #[error(\"Config has no active profile\")]\n\n NoActiveProfile,\n", "file_path": "crates/fluvio/src/config/config.rs", "rank": 6, "score": 239723.92850556565 }, { "content": "#[allow(clippy::all)]\n\npub fn generate_message(offset: i64, test_case: &SmokeTestCase) -> Vec<u8> {\n\n let producer_record_size = test_case.option.producer_record_size as usize;\n\n\n\n let mut bytes = Vec::with_capacity(producer_record_size);\n\n\n\n let mut prefix = generate_pre_fix(test_case.environment.topic_name().as_str(), offset)\n\n .as_bytes()\n\n .to_vec();\n\n bytes.append(&mut prefix);\n\n\n\n // then fill int the dummy test data\n\n for _ in 0..producer_record_size {\n\n bytes.push(VALUE);\n\n }\n\n\n\n bytes\n\n}\n\n\n\n/// validate the message for given offset\n", "file_path": "crates/fluvio-test/src/tests/smoke/message.rs", "rank": 7, "score": 239086.26769691036 }, { "content": "pub fn validate_versions(min: i16, max: Option<i16>, field: Option<&str>) -> Option<String> {\n\n match (max, field) {\n\n // Print name in named fields\n\n (Some(max), Some(field)) if min > max => Some(format!(\n\n \"On {}, max version({}) is less than min({}).\",\n\n field, max, min\n\n )),\n\n // No name to print in unnamed fields\n\n (Some(max), None) if min > max => {\n\n Some(format!(\"Max version({}) is less than min({}).\", max, min))\n\n }\n\n (None, Some(field)) if min < 0 => Some(format!(\n\n \"On {} min version({}) must be positive.\",\n\n field, min\n\n )),\n\n (None, None) if min < 0 => Some(format!(\"Min version({}) must be positive.\", min)),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "crates/fluvio-protocol-derive/src/ast/prop.rs", "rank": 8, "score": 237269.14442918458 }, { "content": "pub fn cert_dir() -> PathBuf {\n\n std::env::current_dir().unwrap().join(\"tls\").join(\"certs\")\n\n}\n\n\n", "file_path": "crates/fluvio-test-util/tls.rs", "rank": 9, "score": 236597.7859985762 }, { "content": "#[smartmodule(array_map)]\n\npub fn my_array_map(_record: &Record) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_array_map.rs", "rank": 10, "score": 231411.6340122366 }, { "content": "/// Updates the TableModel used to render the TUI table during `TableModel::render()`\n\n/// Attempts to update relevant rows, but appends to table if the primary key doesn't exist\n\n/// Returned String is not intended to be used\n\npub fn format_fancy_table_record(record: &[u8], table_model: &mut TableModel) -> Option<String> {\n\n let maybe_json: serde_json::Value = match serde_json::from_slice(record) {\n\n Ok(value) => value,\n\n Err(e) => {\n\n println!(\"error parsing record as json: {}\", e);\n\n return None;\n\n }\n\n };\n\n\n\n // Handle updates as objects or list of objects\n\n match maybe_json {\n\n serde_json::Value::Object(json_obj) => {\n\n update_table_row(table_model, json_obj).ok()?;\n\n }\n\n serde_json::Value::Array(vec_obj) => {\n\n let json_array = flatten_json_array_updates(vec_obj).ok()?;\n\n for json_obj in json_array {\n\n update_table_row(table_model, json_obj).ok()?;\n\n }\n\n }\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 11, "score": 228355.01363373792 }, { "content": "#[smartmodule(array_map)]\n\npub fn array_map(record: &Record) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n // Deserialize a JSON array with any kind of values inside\n\n let array: Vec<serde_json::Value> = serde_json::from_slice(record.value.as_ref())?;\n\n\n\n // Convert each JSON value from the array back into a JSON string\n\n let strings: Vec<String> = array\n\n .into_iter()\n\n .map(|value| serde_json::to_string(&value))\n\n .collect::<core::result::Result<_, _>>()?;\n\n\n\n // Create one record from each JSON string to send\n\n let records: Vec<(Option<RecordData>, RecordData)> = strings\n\n .into_iter()\n\n .map(|s| (None, RecordData::from(s)))\n\n .collect();\n\n Ok(records)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/array_map_json_array/src/lib.rs", "rank": 12, "score": 225914.68538944886 }, { "content": "#[smartmodule(array_map)]\n\npub fn array_map(record: &Record) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n // Deserialize a RedditListing from JSON\n\n let listing: RedditListing = serde_json::from_slice(record.value.as_ref())?;\n\n\n\n // Create a list of RedditPostData converted back into JSON strings\n\n let posts: Vec<(String, String)> = listing\n\n .data\n\n .children\n\n .into_iter()\n\n .map(|post: RedditPost| {\n\n // Convert each post into (ID, Post JSON)\n\n serde_json::to_string(&post.data).map(|json| (post.data.id, json))\n\n })\n\n .collect::<core::result::Result<_, _>>()?;\n\n\n\n // Convert each Post into a Record whose key is the Post's ID\n\n let records = posts\n\n .into_iter()\n\n .map(|(id, post)| (Some(RecordData::from(id)), RecordData::from(post)))\n\n .collect();\n\n Ok(records)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/array_map_json_reddit/src/lib.rs", "rank": 13, "score": 225914.68538944886 }, { "content": "#[smartmodule(array_map)]\n\npub fn array_map(record: &Record) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n // Deserialize a JSON object (Map) with any kind of values inside\n\n let object: Map<String, Value> = serde_json::from_slice(record.value.as_ref())?;\n\n\n\n // Convert each JSON value from the array back into a JSON string\n\n let key_value_strings: Vec<(&String, String)> = object\n\n .iter()\n\n .map(|(key, value)| serde_json::to_string(value).map(|value| (key, value)))\n\n .collect::<core::result::Result<_, _>>()?;\n\n\n\n // Create one record from each JSON string to send\n\n let key_value_records: Vec<(Option<RecordData>, RecordData)> = key_value_strings\n\n .into_iter()\n\n .map(|(key, value)| {\n\n (\n\n Some(RecordData::from(key.to_string())),\n\n RecordData::from(value),\n\n )\n\n })\n\n .collect();\n\n Ok(key_value_records)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/array_map_json_object/src/lib.rs", "rank": 14, "score": 225914.6853894489 }, { "content": "#[fluvio_test(async = a)]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_async.rs", "rank": 15, "score": 217296.47728081964 }, { "content": "#[fluvio_test(async = false)]\n\npub fn false_case(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_async.rs", "rank": 16, "score": 214379.85151710868 }, { "content": "// zigzag decoding\n\npub fn varint_decode<T>(buf: &mut T) -> Result<(i64, usize), Error>\n\nwhere\n\n T: Buf,\n\n{\n\n let mut num: i64 = 0;\n\n let mut shift: usize = 0;\n\n\n\n loop {\n\n if buf.remaining() == 0 {\n\n return Err(Error::new(\n\n ErrorKind::UnexpectedEof,\n\n \"varint decoding no more bytes left\",\n\n ));\n\n }\n\n\n\n let b = buf.get_u8();\n\n trace!(\"var byte: {:#X}\", b);\n\n\n\n num |= ((b & 0x7f) as i64) << shift;\n\n shift += 7;\n\n\n\n if b & 0x80 == 0 {\n\n break;\n\n }\n\n }\n\n\n\n Ok(((num >> 1) ^ -(num & 1), shift / 7))\n\n}\n\n\n", "file_path": "crates/fluvio-protocol/src/core/varint.rs", "rank": 17, "score": 213448.54481997772 }, { "content": "#[cfg(unix)]\n\nfn make_executable(file: &mut File) -> std::result::Result<(), IoError> {\n\n use std::os::unix::fs::PermissionsExt;\n\n\n\n // Add u+rwx mode to the existing file permissions, leaving others unchanged\n\n let mut permissions = file.metadata()?.permissions();\n\n let mut mode = permissions.mode();\n\n mode |= 0o700;\n\n permissions.set_mode(mode);\n\n\n\n file.set_permissions(permissions)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/fluvio-cli/src/install/mod.rs", "rank": 18, "score": 211473.351806705 }, { "content": "/// Collects the metadata of Fluvio extensions installed on the system\n\npub fn subcommand_metadata() -> Result<Vec<SubcommandMetadata>> {\n\n let mut metadata = Vec::new();\n\n\n\n let extensions = crate::install::get_extensions()?;\n\n for path in extensions {\n\n let result = Command::new(&path).arg(\"metadata\").result();\n\n let output = match result {\n\n Ok(out) => out.stdout,\n\n _ => continue,\n\n };\n\n\n\n let json_result = serde_json::from_slice::<FluvioExtensionMetadata>(&output);\n\n if let Ok(meta) = json_result {\n\n let subcommand = SubcommandMetadata { path, meta };\n\n metadata.push(subcommand);\n\n }\n\n }\n\n\n\n Ok(metadata)\n\n}\n", "file_path": "crates/fluvio-cli/src/metadata.rs", "rank": 19, "score": 208107.7148835971 }, { "content": "#[allow(clippy::needless_range_loop)]\n\npub fn validate_message(iter: u32, offset: i64, test_case: &SmokeTestCase, data: &[u8]) {\n\n let prefix_string = generate_pre_fix(test_case.environment.topic_name().as_str(), offset);\n\n let prefix = prefix_string.as_bytes().to_vec();\n\n let prefix_len = prefix.len();\n\n\n\n let producer_record_size = test_case.option.producer_record_size as usize;\n\n\n\n let message_len = producer_record_size + prefix_len;\n\n assert_eq!(\n\n data.len(),\n\n message_len,\n\n \"message should be: {}\",\n\n message_len\n\n );\n\n\n\n // check prefix\n\n for i in 0..prefix_len {\n\n assert!(\n\n data[i] == prefix[i],\n\n \"prefix failed, iter: {}, index: {}, data: {}, prefix: {}, data len: {}, offset: {}, topic: {}\",\n", "file_path": "crates/fluvio-test/src/tests/smoke/message.rs", "rank": 20, "score": 201844.85917019934 }, { "content": "#[fluvio_test(topic = \"test-bug\")]\n\npub fn concurrent(mut test_driver: TestDriver, mut test_case: TestCase) {\n\n println!(\"Testing concurrent consumer and producer\");\n\n let option: ConcurrentTestCase = test_case.into();\n\n\n\n run_block_on(async {\n\n let (sender, receiver) = std::sync::mpsc::channel();\n\n spawn(consumer::consumer_stream(\n\n test_driver.clone(),\n\n option.clone(),\n\n receiver,\n\n ));\n\n producer::producer(&test_driver, option, sender).await;\n\n });\n\n}\n", "file_path": "crates/fluvio-test/src/tests/concurrent/mod.rs", "rank": 21, "score": 201673.5549356286 }, { "content": "#[fluvio_test(name = \"consumer\", topic = \"producer-test\")]\n\npub fn run(mut test_driver: FluvioTestDriver, mut test_case: TestCase) {\n\n let test_case: ConsumerTestCase = test_case.into();\n\n let consumers = test_case.option.consumers;\n\n let partition = test_case.option.partition;\n\n let is_multi = test_case.option.multi_partition;\n\n let raw_offset = test_case.option.offset;\n\n\n\n // We'll assume for now that structopt is handling mutual exclusivity\n\n let offset = if test_case.option.offset_beginning {\n\n Offset::from_beginning(raw_offset as u32)\n\n } else if test_case.option.offset_end {\n\n Offset::from_end(raw_offset as u32)\n\n } else {\n\n Offset::absolute(raw_offset.into()).expect(\"Couldn't create absolute offset\")\n\n };\n\n\n\n println!(\"\\nStarting Consumer test\");\n\n\n\n println!(\"Consumers: {}\", consumers);\n\n println!(\"Starting offset: {:?}\", &offset);\n", "file_path": "crates/fluvio-test/src/tests/consumer.rs", "rank": 22, "score": 201673.5549356286 }, { "content": "#[fluvio_test(name = \"producer\", topic = \"producer-test\")]\n\npub fn run(mut test_driver: FluvioTestDriver, mut test_case: TestCase) {\n\n let test_case: ProducerTestCase = test_case.into();\n\n let total_records = test_case.option.num_records;\n\n\n\n // If we assign more producers than records to split\n\n // then set # of producers to the # of records\n\n let producers = if total_records > test_case.option.producers {\n\n test_case.option.producers\n\n } else {\n\n println!(\n\n \"More producers than records to split. Reducing number to {}\",\n\n total_records\n\n );\n\n total_records\n\n };\n\n\n\n println!(\"\\nStarting Producer test\");\n\n println!(\"Producers: {}\", producers);\n\n println!(\"# Records: {}\", total_records);\n\n\n", "file_path": "crates/fluvio-test/src/tests/producer.rs", "rank": 23, "score": 201673.5549356286 }, { "content": "#[cfg(not(unix))]\n\nfn make_executable(_file: &mut File) -> std::result::Result<(), IoError> {\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/fluvio-cli/src/install/mod.rs", "rank": 24, "score": 201002.25472389965 }, { "content": "#[fluvio_test(topic = \"test\")]\n\npub fn smoke(mut test_driver: FluvioTestDriver, mut test_case: TestCase) {\n\n let smoke_test_case: SmokeTestCase = test_case.into();\n\n\n\n // If connector tests requested\n\n let maybe_connector = if let Some(ref connector_config) =\n\n smoke_test_case.option.connector_config\n\n {\n\n let connector_process = async_process!(async {\n\n test_driver\n\n .connect()\n\n .await\n\n .expect(\"Connecting to cluster failed\");\n\n\n\n // Add a connector CRD\n\n let admin = test_driver.client().admin().await;\n\n // Create a managed connector\n\n let config = ConnectorConfig::from_file(&connector_config).unwrap();\n\n let spec: ManagedConnectorSpec = config.clone().into();\n\n let name = spec.name.clone();\n\n\n", "file_path": "crates/fluvio-test/src/tests/smoke/mod.rs", "rank": 25, "score": 199509.0051257369 }, { "content": "#[fluvio_test(topic = \"longevity\")]\n\npub fn longevity(mut test_driver: FluvioTestDriver, mut test_case: TestCase) {\n\n let option: LongevityTestCase = test_case.into();\n\n\n\n println!(\"Starting Longevity Test\");\n\n println!(\"Expected runtime: {:?}\", option.option.runtime_seconds);\n\n println!(\"# Consumers: {}\", option.option.consumers);\n\n println!(\"# Producers: {}\", option.option.producers);\n\n\n\n if !option.option.verbose {\n\n println!(\"Run with `--verbose` flag for more test output\");\n\n }\n\n\n\n let mut consumer_wait = Vec::new();\n\n for i in 0..option.option.consumers {\n\n println!(\"Starting Consumer #{}\", i);\n\n let consumer = async_process!(async {\n\n test_driver\n\n .connect()\n\n .await\n\n .expect(\"Connecting to cluster failed\");\n", "file_path": "crates/fluvio-test/src/tests/longevity/mod.rs", "rank": 26, "score": 199509.0051257369 }, { "content": "#[smartmodule(filter_map)]\n\npub fn filter_map(record: &Record) -> Result<Option<(Option<RecordData>, RecordData)>> {\n\n let key = record.key.clone();\n\n let string = String::from_utf8_lossy(record.value.as_ref()).to_string();\n\n let int: i32 = string.parse()?;\n\n\n\n if int % 2 == 0 {\n\n let output = int / 2;\n\n Ok(Some((key.clone(), RecordData::from(output.to_string()))))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/filter_map/src/lib.rs", "rank": 27, "score": 196402.5486305718 }, { "content": "fn fluvio_base_dir_create(path: PathBuf) -> Result<PathBuf> {\n\n if !path.exists() {\n\n // Create the base dir if it doesn't exist yet (#718)\n\n std::fs::create_dir_all(&path)?\n\n }\n\n Ok(path)\n\n}\n\n\n\npub(crate) fn fluvio_extensions_dir() -> Result<PathBuf> {\n\n let base_dir = fluvio_base_dir()?;\n\n let path = base_dir.join(\"extensions\");\n\n\n\n if !path.exists() {\n\n std::fs::create_dir(&path)?;\n\n }\n\n Ok(path)\n\n}\n\n\n\npub(crate) fn get_extensions() -> Result<Vec<PathBuf>> {\n\n use std::fs;\n", "file_path": "crates/fluvio-cli/src/install/mod.rs", "rank": 28, "score": 191788.68520352463 }, { "content": "#[fluvio_test(topic = \"test-multiple-partition\")]\n\npub fn multiple_partition(mut test_driver: TestDriver, mut test_case: TestCase) -> TestResult {\n\n println!(\"Testing multiple partition consumer\");\n\n\n\n let option: MultiplePartitionTestCase = test_case.into();\n\n\n\n run_block_on(async {\n\n spawn(producer::producer(test_driver.clone(), option.clone()));\n\n\n\n consumer::consumer_stream(&test_driver, option).await;\n\n });\n\n}\n", "file_path": "crates/fluvio-test/src/tests/multiple_partitions/mod.rs", "rank": 29, "score": 190109.47691836252 }, { "content": "fn get_flush_policy_from_config(option: &ConfigOption) -> FlushPolicy {\n\n if option.flush_idle_msec > 0 {\n\n FlushPolicy::IdleFlush {\n\n delay_millis: option.flush_idle_msec,\n\n }\n\n } else if option.flush_write_count == 0 {\n\n FlushPolicy::NoFlush\n\n } else if option.flush_write_count == 1 {\n\n FlushPolicy::EveryWrite\n\n } else {\n\n FlushPolicy::CountWrites {\n\n n_writes: option.flush_write_count,\n\n write_tracking: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl MutFileRecords {\n\n pub async fn create(\n\n base_offset: Offset,\n", "file_path": "crates/fluvio-storage/src/mut_records.rs", "rank": 30, "score": 190093.17095207833 }, { "content": "fn decode_vec<T, M>(len: i32, item: &mut Vec<M>, src: &mut T, version: Version) -> Result<(), Error>\n\nwhere\n\n T: Buf,\n\n M: Default + Decoder,\n\n{\n\n for _ in 0..len {\n\n let mut value = <M>::default();\n\n value.decode(src, version)?;\n\n item.push(value);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl<M> Decoder for Option<M>\n\nwhere\n\n M: Default + Decoder,\n\n{\n\n fn decode<T>(&mut self, src: &mut T, version: Version) -> Result<(), Error>\n\n where\n", "file_path": "crates/fluvio-protocol/src/core/decoder.rs", "rank": 31, "score": 188936.3069676214 }, { "content": "pub trait TestOption: Debug + DynClone {\n\n fn as_any(&self) -> &dyn Any;\n\n}\n\n\n\ndyn_clone::clone_trait_object!(TestOption);\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TestCase {\n\n pub environment: EnvironmentSetup,\n\n pub option: Box<dyn TestOption>,\n\n}\n\n\n\nimpl TestCase {\n\n pub fn new(environment: EnvironmentSetup, option: Box<dyn TestOption>) -> Self {\n\n Self {\n\n environment,\n\n option,\n\n }\n\n }\n\n}\n", "file_path": "crates/fluvio-test-util/test_meta/mod.rs", "rank": 32, "score": 184973.92326486774 }, { "content": "pub fn format_json(value: &[u8], suppress: bool) -> Option<String> {\n\n let maybe_json = match serde_json::from_slice(value) {\n\n Ok(value) => Some(value),\n\n Err(e) if !suppress => Some(serde_json::json!({\n\n \"error\": format!(\"{}\", e),\n\n })),\n\n _ => None,\n\n };\n\n\n\n maybe_json.and_then(|json| serde_json::to_string(&json).ok())\n\n}\n\n\n\n// -----------------------------------\n\n// Text\n\n// -----------------------------------\n\n\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 33, "score": 184452.3175033506 }, { "content": "pub fn default_option(index_max_interval_bytes: Size) -> ConfigOption {\n\n ConfigOption {\n\n segment_max_bytes: 100,\n\n index_max_interval_bytes,\n\n base_dir: temp_dir(),\n\n index_max_bytes: 1000,\n\n ..Default::default()\n\n }\n\n}\n\n\n\n#[derive(Builder)]\n\npub struct BatchProducer {\n\n #[builder(setter(into), default = \"0\")]\n\n base_offset: i64,\n\n #[builder(setter(into), default = \"0\")]\n\n producer_id: i64,\n\n #[builder(setter(into), default = \"2\")]\n\n pub records: u16,\n\n /// how many bytes in a record\n\n #[builder(setter, default = \"2\")]\n", "file_path": "crates/fluvio-storage/src/fixture.rs", "rank": 34, "score": 183932.29581500974 }, { "content": "#[smartmodule(map)]\n\npub fn my_map(_record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_map.rs", "rank": 35, "score": 181974.33890212505 }, { "content": "#[smartmodule(map)]\n\npub fn map(record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let key = record.key.clone();\n\n let mut value = Vec::from(record.value.as_ref());\n\n\n\n value.make_ascii_uppercase();\n\n Ok((key, value.into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/map/src/lib.rs", "rank": 36, "score": 181974.33890212505 }, { "content": "#[fluvio_test(min_spu = 2, topic = \"test\")]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_multi.rs", "rank": 37, "score": 180838.51797987995 }, { "content": "#[fluvio_test(name = 1)]\n\npub fn test1(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_name.rs", "rank": 38, "score": 180838.51797987995 }, { "content": "#[fluvio_test(timeout = \"1\")]\n\npub fn test2(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_timeout.rs", "rank": 39, "score": 180838.51797987995 }, { "content": "#[fluvio_test(timeout = \"a\")]\n\npub fn test1(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_timeout.rs", "rank": 40, "score": 180838.51797987995 }, { "content": "#[fluvio_test(name = \"test_name\")]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_name.rs", "rank": 41, "score": 180838.51797987995 }, { "content": "#[fluvio_test(topic = \"test\")]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_topic.rs", "rank": 42, "score": 180838.51797987995 }, { "content": "#[fluvio_test(timeout = 60)]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_timeout.rs", "rank": 43, "score": 180838.51797987995 }, { "content": "#[fluvio_test(min_spu = a, topic = 2)]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_multi.rs", "rank": 44, "score": 180838.51797987995 }, { "content": "#[fluvio_test(timeout = a)]\n\npub fn test3(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_timeout.rs", "rank": 45, "score": 180838.51797987995 }, { "content": "#[fluvio_test(name = unquoted)]\n\npub fn test2(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_name.rs", "rank": 46, "score": 180838.51797987995 }, { "content": "#[smartmodule(map)]\n\npub fn map(record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let key = record.key.clone();\n\n\n\n let string = std::str::from_utf8(record.value.as_ref())?;\n\n let int = string.parse::<i32>()?;\n\n let value = (int * 2).to_string();\n\n\n\n Ok((key, value.into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/map_double/src/lib.rs", "rank": 47, "score": 179600.09648697736 }, { "content": "#[smartmodule(map)]\n\npub fn map(record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let key = record.key.clone();\n\n\n\n let string = std::str::from_utf8(record.value.as_ref())?;\n\n let output = SSN_RE.replace_all(string, \"***-**-****\").to_string();\n\n\n\n Ok((key, output.into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/map_regex/src/lib.rs", "rank": 48, "score": 179600.09648697736 }, { "content": "#[smartmodule(map)]\n\npub fn map(record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let json = serde_json::from_slice::<serde_json::Value>(record.value.as_ref())?;\n\n let yaml_bytes = serde_yaml::to_vec(&json)?;\n\n\n\n Ok((record.key().cloned(), yaml_bytes.into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/map_json/src/lib.rs", "rank": 49, "score": 179600.09648697736 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\npub fn get_log_directory() -> &'static str {\n\n \"/tmp\"\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DefaultLogDirectory(String);\n\n\n\nimpl Default for DefaultLogDirectory {\n\n fn default() -> Self {\n\n Self(get_log_directory().to_string())\n\n }\n\n}\n\n\n\nimpl fmt::Display for DefaultLogDirectory {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl FromStr for DefaultLogDirectory {\n", "file_path": "crates/fluvio-cluster/src/cli/start/mod.rs", "rank": 50, "score": 178771.15468326496 }, { "content": "#[fluvio_test(min_spu = 2)]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_min_spu.rs", "rank": 51, "score": 178654.31129924685 }, { "content": "#[fluvio_test(topic = 1)]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_topic_type.rs", "rank": 52, "score": 178654.31129924685 }, { "content": "#[fluvio_test(fail)]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_invalid_key.rs", "rank": 53, "score": 178654.31129924685 }, { "content": "#[fluvio_test]\n\npub fn run(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_no_macro_args.rs", "rank": 54, "score": 178654.3112992469 }, { "content": "#[fluvio_test(cluster_type=[\"k8\", \"local\"])]\n\npub fn test2(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_cluster_type.rs", "rank": 55, "score": 178654.31129924685 }, { "content": "#[fluvio_test(cluster_type=\"not-a-type\")]\n\npub fn test1(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n\n// Wrong attr type (MetaList), but values would have been valid\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_cluster_type.rs", "rank": 56, "score": 178654.3112992469 }, { "content": "pub fn variant_size(num: i64) -> usize {\n\n let mut v = (num << 1) ^ (num >> 31);\n\n let mut bytes = 1;\n\n\n\n while (v & 0xffffff80) != 0 {\n\n bytes += 1;\n\n v >>= 7;\n\n }\n\n\n\n bytes\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use std::io::Cursor;\n\n use bytes::{BytesMut, BufMut};\n\n use super::varint_decode;\n\n use super::variant_encode;\n\n use super::variant_size;\n", "file_path": "crates/fluvio-protocol/src/core/varint.rs", "rank": 57, "score": 178432.33282734558 }, { "content": "/// Structure json data into table row\n\n/// Print table header if `print_header` is true\n\n/// Rows may not stay aligned with table header\n\npub fn format_basic_table_record(record: &[u8], print_header: bool) -> Option<String> {\n\n use prettytable::{Row, cell, Cell, Slice};\n\n use prettytable::format::{self, FormatBuilder};\n\n\n\n let maybe_json: serde_json::Value = match serde_json::from_slice(record) {\n\n Ok(value) => value,\n\n Err(e) => {\n\n println!(\"error parsing record as json: {}\", e);\n\n return None;\n\n }\n\n };\n\n\n\n let obj = if let Some(obj) = maybe_json.as_object() {\n\n obj\n\n } else {\n\n println!(\"error: Unable to parse json as object map\");\n\n return None;\n\n };\n\n\n\n // This is the case where we don't provide any table info. We want to print a table w/ all top-level keys as headers\n", "file_path": "crates/fluvio-cli/src/consume/record_format.rs", "rank": 58, "score": 177323.20971536724 }, { "content": "#[fluvio_test(cluster_type=\"k8\")]\n\npub fn test_one(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_cluster_type.rs", "rank": 59, "score": 176556.1399860009 }, { "content": "#[fluvio_test(min_spu = a)]\n\npub fn test1(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_min_spu_type.rs", "rank": 60, "score": 176556.1399860009 }, { "content": "#[fluvio_test(min_spu = \"1\")]\n\npub fn test2(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/fail_min_spu_type.rs", "rank": 61, "score": 176556.1399860009 }, { "content": "#[fluvio_test(cluster_type=\"local\")]\n\npub fn test_two(mut test_driver: TestDriver, test_case: TestCase) {\n\n}\n\n\n", "file_path": "crates/fluvio-test-derive/ui-tests/pass_cluster_type.rs", "rank": 62, "score": 176556.1399860009 }, { "content": "// store varaint\n\npub fn variant_encode<T>(buf: &mut T, num: i64) -> Result<(), Error>\n\nwhere\n\n T: BufMut,\n\n{\n\n let mut v = (num << 1) ^ (num >> 31);\n\n\n\n while (v & 0xffffff80) != 0 {\n\n let b: u8 = ((v & 0x7f) | 0x80) as u8;\n\n if buf.remaining_mut() == 0 {\n\n return Err(Error::new(\n\n ErrorKind::UnexpectedEof,\n\n \"varint encoding no more bytes left\",\n\n ));\n\n }\n\n buf.put_u8(b);\n\n v >>= 7;\n\n }\n\n if buf.remaining_mut() == 0 {\n\n return Err(Error::new(\n\n ErrorKind::UnexpectedEof,\n\n \"varint encoding no more bytes left\",\n\n ));\n\n }\n\n buf.put_u8(v as u8);\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/fluvio-protocol/src/core/varint.rs", "rank": 63, "score": 175433.41665108968 }, { "content": "pub fn install_bin<P: AsRef<Path>, B: AsRef<[u8]>>(bin_path: P, bytes: B) -> Result<()> {\n\n use std::io::Write as _;\n\n\n\n let bin_path = bin_path.as_ref();\n\n\n\n // Create directories to bin_path if they do not exist\n\n let parent = bin_path\n\n .parent()\n\n .ok_or_else(|| IoError::new(ErrorKind::NotFound, \"parent directory not found\"))?;\n\n std::fs::create_dir_all(&parent)?;\n\n\n\n // Write bin to temporary file\n\n let tmp_dir = tempdir::TempDir::new_in(parent, \"fluvio-tmp\")?;\n\n let tmp_path = tmp_dir.path().join(\"fluvio\");\n\n let mut tmp_file = File::create(&tmp_path)?;\n\n tmp_file.write_all(bytes.as_ref())?;\n\n\n\n // Mark the file as executable\n\n make_executable(&mut tmp_file)?;\n\n\n\n // Rename (atomic move on unix) temp file to destination\n\n std::fs::rename(&tmp_path, &bin_path)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/fluvio-cli/src/install/mod.rs", "rank": 64, "score": 173537.26746791616 }, { "content": "/// Given an API key, it returns max_version. None if not found\n\npub fn lookup_version(api_key: AdminPublicApiKey, versions: &[ApiVersionKey]) -> Option<i16> {\n\n for version in versions {\n\n if version.api_key == api_key as i16 {\n\n return Some(version.max_version);\n\n }\n\n }\n\n None\n\n}\n", "file_path": "crates/fluvio-sc-schema/src/versions.rs", "rank": 65, "score": 173044.15972499712 }, { "content": "#[smartmodule(join)]\n\npub fn my_join(_record: &Record, _record1: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_join.rs", "rank": 66, "score": 170952.95527170977 }, { "content": "#[smartmodule(join)]\n\npub fn join(left_record: &Record, right_record: &Record) -> Result<(Option<RecordData>, RecordData)> {\n\n let left_value: i32 = std::str::from_utf8(left_record.value.as_ref())?.parse()?;\n\n let right_value: i32 = std::str::from_utf8(right_record.value.as_ref())?.parse()?;\n\n let value = left_value + right_value;\n\n\n\n Ok((None, value.to_string().into()))\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/join/src/lib.rs", "rank": 67, "score": 166668.2397690558 }, { "content": "#[smartmodule(map, params)]\n\npub fn map(_record: &Record, _opt: &MapOpt) -> Result<(Option<RecordData>, RecordData)> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_map_with_params.rs", "rank": 68, "score": 166668.2397690558 }, { "content": "#[smartmodule(filter)]\n\npub fn filter_log_level(record: &Record) -> Result<bool> {\n\n let log = serde_json::from_slice::<StructuredLog>(record.value.as_ref())?;\n\n Ok(log.level > LogLevel::Debug)\n\n}\n", "file_path": "crates/fluvio-smartmodule/examples/filter_json/src/lib.rs", "rank": 69, "score": 164395.44882808632 }, { "content": "fn default_base_dir() -> PathBuf {\n\n PathBuf::from(SPU_LOG_BASE_DIR)\n\n}\n\n\n\nconst fn default_update_hw() -> bool {\n\n true\n\n}\n\n\n\nconst fn default_index_max_bytes() -> Size {\n\n SPU_LOG_INDEX_MAX_BYTES\n\n}\n\n\n\nconst fn default_index_max_interval_bytes() -> Size {\n\n SPU_LOG_INDEX_MAX_INTERVAL_BYTES\n\n}\n\n\n\nconst fn default_segment_max_bytes() -> Size {\n\n SPU_LOG_SEGMENT_MAX_BYTES\n\n}\n\n\n", "file_path": "crates/fluvio-storage/src/config.rs", "rank": 70, "score": 163426.16959191632 }, { "content": "///\n\n/// Validate computed topic spec parameters and update topic status\n\n/// * error is passed to the topic reason.\n\n///\n\npub fn validate_computed_topic_parameters(param: &TopicReplicaParam) -> TopicNextState {\n\n if let Err(err) = ReplicaSpec::valid_partition(&param.partitions) {\n\n TopicStatus::next_resolution_invalid_config(&err.to_string()).into()\n\n } else if let Err(err) = ReplicaSpec::valid_replication_factor(&param.replication_factor) {\n\n TopicStatus::next_resolution_invalid_config(&err.to_string()).into()\n\n } else {\n\n TopicStatus::next_resolution_pending().into()\n\n }\n\n}\n\n\n\n///\n\n/// Generate Replica Map if there are enough online spus\n\n/// * returns a replica map or a reason for the failure\n\n/// * fatal error configuration errors and are not recoverable\n\n///\n\n#[instrument(level = \"trace\", skip(spus, param))]\n\npub async fn generate_replica_map(\n\n spus: &SpuAdminStore,\n\n param: &TopicReplicaParam,\n\n) -> TopicNextState {\n", "file_path": "crates/fluvio-sc/src/controllers/topics/policy.rs", "rank": 71, "score": 160188.24608863544 }, { "content": "//\n\n/// Validate assigned topic spec parameters and update topic status\n\n/// * error is passed to the topic reason.\n\n///\n\npub fn validate_assigned_topic_parameters(partition_map: &PartitionMaps) -> TopicNextState {\n\n if let Err(err) = partition_map.valid_partition_map() {\n\n TopicStatus::next_resolution_invalid_config(&err.to_string()).into()\n\n } else {\n\n TopicStatus::next_resolution_pending().into()\n\n }\n\n}\n\n\n", "file_path": "crates/fluvio-sc/src/controllers/topics/policy.rs", "rank": 72, "score": 160188.24608863544 }, { "content": "fn create_batches(records: Vec<Record>) -> Vec<Batch> {\n\n if records.write_size(0) < *MAX_BATCH_SIZE_BYTES || records.len() == 1 {\n\n let batch = Batch::from(records);\n\n vec![batch]\n\n } else {\n\n debug!(\"Splitting batch into multiple batches\");\n\n let mut batches = Vec::new();\n\n let mut current_batch = Batch::new();\n\n for record in records {\n\n if current_batch.write_size(0) + record.write_size(0) > *MAX_BATCH_SIZE_BYTES {\n\n debug!(\n\n len = current_batch.write_size(0),\n\n \"Created batch with length\"\n\n );\n\n\n\n batches.push(current_batch);\n\n current_batch = Batch::new();\n\n }\n\n current_batch.add_record(record);\n\n }\n", "file_path": "crates/fluvio/src/producer/mod.rs", "rank": 73, "score": 159715.4877073864 }, { "content": "pub fn impl_smart_opt(input: DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &input.ident;\n\n\n\n // parse out all the field names in the struct as `Ident`s\n\n let fields = match input.data {\n\n Data::Struct(st) => st.fields,\n\n _ => {\n\n return Err(syn::Error::new_spanned(\n\n input.ident,\n\n \"SmartOpt derive macro only can be used on structs.\",\n\n ))\n\n }\n\n };\n\n\n\n let idents: Vec<&Ident> = fields\n\n .iter()\n\n .filter_map(|field| field.ident.as_ref())\n\n .collect::<Vec<&Ident>>();\n\n\n\n let keys: Vec<String> = idents\n", "file_path": "crates/fluvio-smartmodule-derive/src/generator/opt.rs", "rank": 74, "score": 156924.38329611864 }, { "content": "/// create server and spin up services, but don't run server\n\npub fn create_services(\n\n local_spu: SpuConfig,\n\n internal: bool,\n\n public: bool,\n\n) -> (\n\n DefaultSharedGlobalContext,\n\n Option<InternalApiServer>,\n\n Option<SpuPublicServer>,\n\n) {\n\n let ctx = FileReplicaContext::new_shared_context(local_spu);\n\n\n\n let public_ep_addr = ctx.config().public_socket_addr().to_owned();\n\n let private_ep_addr = ctx.config().private_socket_addr().to_owned();\n\n\n\n let public_server = if public {\n\n Some(create_public_server(public_ep_addr, ctx.clone()))\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "crates/fluvio-spu/src/start.rs", "rank": 75, "score": 156215.47726946673 }, { "content": "fn fluvio_base_dir() -> Result<PathBuf> {\n\n if let Ok(dir) = std::env::var(\"FLUVIO_DIR\") {\n\n // Assume this is like `~/.fluvio\n\n let path = PathBuf::from(dir);\n\n return fluvio_base_dir_create(path);\n\n }\n\n let home =\n\n home::home_dir().ok_or_else(|| IoError::new(ErrorKind::NotFound, \"Homedir not found\"))?;\n\n let path = home.join(\".fluvio\");\n\n\n\n fluvio_base_dir_create(path)\n\n}\n\n\n", "file_path": "crates/fluvio-cli/src/install/mod.rs", "rank": 76, "score": 154977.54425200386 }, { "content": "/// compute total number of values in the default batch\n\nfn compute_batch_record_size(batch: &Batch) -> usize {\n\n batch\n\n .records()\n\n .iter()\n\n .fold(0, |acc, batch| acc + batch.value.len())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use tracing::debug;\n\n use std::env::temp_dir;\n\n use std::fs::metadata;\n\n use std::io::Cursor;\n\n use std::path::PathBuf;\n\n\n\n use flv_util::fixture::ensure_new_dir;\n\n use dataplane::batch::{Batch, MemoryRecords};\n\n use dataplane::Size;\n\n use dataplane::core::Decoder;\n", "file_path": "crates/fluvio-storage/src/segment.rs", "rank": 77, "score": 153167.95572909102 }, { "content": "fn create_replica_map(rows: Vec<Vec<i32>>) -> BTreeMap<i32, Vec<i32>> {\n\n let mut map = BTreeMap::new();\n\n for (idx, row) in rows.iter().enumerate() {\n\n map.insert(idx as i32, row.clone());\n\n }\n\n map\n\n}\n\n\n\nimpl TopicStatus {\n\n pub fn new<S>(resolution: TopicResolution, replica_map: Vec<Vec<i32>>, reason: S) -> Self\n\n where\n\n S: Into<String>,\n\n {\n\n TopicStatus {\n\n resolution,\n\n replica_map: create_replica_map(replica_map),\n\n reason: reason.into(),\n\n }\n\n }\n\n\n", "file_path": "crates/fluvio-controlplane-metadata/src/topic/status.rs", "rank": 78, "score": 152714.52265533723 }, { "content": "fn zip(raw_buffer: Vec<u8>) -> Vec<u8> {\n\n use std::io::Read;\n\n let mut encoder = GzEncoder::new(raw_buffer.as_slice(), Compression::default());\n\n let mut buffer = Vec::with_capacity(raw_buffer.len());\n\n encoder\n\n .read_to_end(&mut buffer)\n\n .unwrap_or_else(|_| panic!(\"Unable to gzip file\"));\n\n buffer\n\n}\n\n\n", "file_path": "crates/fluvio-spu/src/services/public/stream_fetch_test.rs", "rank": 79, "score": 152677.5167711623 }, { "content": "/// Fetch OS information\n\nfn os_info() -> Option<String> {\n\n use sysinfo::SystemExt;\n\n let sys = sysinfo::System::new_all();\n\n\n\n let info = format!(\n\n \"{} {} (kernel {})\",\n\n sys.name()?,\n\n sys.os_version()?,\n\n sys.kernel_version()?,\n\n );\n\n\n\n Some(info)\n\n}\n", "file_path": "crates/fluvio-cli/src/version.rs", "rank": 80, "score": 152613.4254839422 }, { "content": "/// Copy a byte array into an instance's linear memory\n\n/// and return the offset relative to the module's memory.\n\npub fn copy_memory_to_instance(\n\n store: &mut Store<()>,\n\n instance: &Instance,\n\n bytes: &[u8],\n\n) -> Result<isize, Error> {\n\n // Get the \"memory\" export of the module.\n\n // If the module does not export it, just panic,\n\n // since we are not going to be able to copy the data.\n\n let memory = instance\n\n .get_memory(&mut *store, MEMORY)\n\n .ok_or_else(|| anyhow!(\"Missing memory\"))?;\n\n\n\n // The module is not using any bindgen libraries,\n\n // so it should export its own alloc function.\n\n //\n\n // Get the guest's exported alloc function, and call it with the\n\n // length of the byte array we are trying to copy.\n\n // The result is an offset relative to the module's linear memory,\n\n // which is used to copy the bytes into the module's memory.\n\n // Then, return the offset.\n", "file_path": "crates/fluvio-smartengine/src/smartmodule/memory.rs", "rank": 81, "score": 151523.44469017355 }, { "content": "fn cluster_cleanup(option: EnvironmentSetup) {\n\n if option.cluster_delete() {\n\n let mut setup = TestCluster::new(option);\n\n\n\n let cluster_cleanup_wait = async_process!(async {\n\n setup.remove_cluster().await;\n\n });\n\n let _ = cluster_cleanup_wait\n\n .join()\n\n .expect(\"Cluster cleanup wait failed\");\n\n }\n\n}\n\n\n", "file_path": "crates/fluvio-test/src/main.rs", "rank": 82, "score": 150319.56433354216 }, { "content": "fn validate_consume_message_cli(test_case: &SmokeTestCase, offsets: Offsets) {\n\n let replication = test_case.environment.replication;\n\n\n\n for i in 0..replication {\n\n let topic_name = test_case.environment.topic_name();\n\n let offset = offsets.get(&topic_name).expect(\"topic offset\");\n\n let mut command = get_binary(\"fluvio\").expect(\"fluvio not found\");\n\n command\n\n .arg(\"consume\")\n\n .arg(&topic_name)\n\n .arg(\"--partition\")\n\n .arg(\"0\")\n\n .arg(\"-d\")\n\n .arg(\"-o\")\n\n .arg(offset.to_string());\n\n println!(\"Executing> {}\", command.display());\n\n let output = command.result().expect(\"fluvio command failed\");\n\n\n\n io::stderr().write_all(&output.stderr).unwrap();\n\n\n", "file_path": "crates/fluvio-test/src/tests/smoke/consume.rs", "rank": 83, "score": 149760.97645635865 }, { "content": "#[smartmodule(array_map, params)]\n\npub fn my_array_map(\n\n _record: &Record,\n\n _opt: &ArrayOpt,\n\n) -> Result<Vec<(Option<RecordData>, RecordData)>> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/fluvio-smartmodule/ui-tests/pass_array_map_with_params.rs", "rank": 84, "score": 147270.35889150185 }, { "content": "pub fn create_batch() -> Batch {\n\n create_batch_with_producer(12, 2)\n\n}\n\n\n", "file_path": "crates/fluvio-dataplane-protocol/src/fixture.rs", "rank": 85, "score": 147193.42182655275 }, { "content": "#[derive(Serialize)]\n\nstruct ListSpus(Vec<Metadata<SpuSpec>>);\n\n\n", "file_path": "crates/fluvio-cluster/src/cli/spu/display.rs", "rank": 86, "score": 146109.37396850088 }, { "content": "pub trait EnvDetail: Debug + Clone {\n\n fn set_topic_name(&mut self, topic: String);\n\n fn topic_name(&self) -> String;\n\n fn is_topic_set(&self) -> bool;\n\n fn replication(&self) -> u16;\n\n fn client_log(&self) -> Option<String>;\n\n fn spu(&self) -> u16;\n\n fn remove_cluster_before(&self) -> bool;\n\n fn cluster_start(&self) -> bool;\n\n fn cluster_delete(&self) -> bool;\n\n fn develop_mode(&self) -> bool;\n\n fn skip_checks(&self) -> bool;\n\n fn tls_user(&self) -> String;\n\n fn authorization_config_map(&self) -> Option<String>;\n\n fn server_log(&self) -> Option<String>;\n\n fn log_dir(&self) -> Option<String>;\n\n fn timeout(&self) -> Duration;\n\n fn set_timeout(&mut self, timeout: Duration);\n\n fn cluster_type(&self) -> EnvironmentType;\n\n}\n", "file_path": "crates/fluvio-test-util/test_meta/environment.rs", "rank": 87, "score": 145422.44461193137 }, { "content": "pub fn rand_record() -> Record {\n\n let len: u16 = rand::random();\n\n let record: Vec<u8> = (0..len).map(|_| rand::random::<u8>()).collect();\n\n record\n\n}\n\n\n", "file_path": "crates/fluvio-test/src/tests/concurrent/util.rs", "rank": 88, "score": 145017.50268457015 }, { "content": "// FIXME: Need to confirm SPU options count match cluster. Offer self-correcting behavior\n\nfn cluster_setup(option: &EnvironmentSetup) -> Result<(), ()> {\n\n let cluster_setup_wait = async_process!(async {\n\n if option.remove_cluster_before() {\n\n println!(\"Deleting existing cluster before starting test\");\n\n let mut setup = TestCluster::new(option.clone());\n\n setup.remove_cluster().await;\n\n }\n\n\n\n if option.cluster_start() || option.remove_cluster_before() {\n\n println!(\"Starting cluster and testing connection\");\n\n let mut test_cluster = TestCluster::new(option.clone());\n\n\n\n test_cluster\n\n .start()\n\n .await\n\n .expect(\"Unable to connect to fresh test cluster\");\n\n } else {\n\n println!(\"Testing connection to Fluvio cluster in profile\");\n\n Fluvio::connect()\n\n .await\n", "file_path": "crates/fluvio-test/src/main.rs", "rank": 89, "score": 144199.80319402707 }, { "content": "pub trait Request: Encoder + Decoder + Debug {\n\n const API_KEY: u16;\n\n\n\n const DEFAULT_API_VERSION: i16 = 0;\n\n const MIN_API_VERSION: i16 = 0;\n\n const MAX_API_VERSION: i16 = -1;\n\n\n\n type Response: Encoder + Decoder + Debug;\n\n}\n\n\n", "file_path": "crates/fluvio-protocol/src/api/mod.rs", "rank": 90, "score": 143839.64228138738 }, { "content": "pub fn default_convert_from_k8<S>(\n\n k8_obj: K8Obj<S::K8Spec>,\n\n) -> Result<MetadataStoreObject<S, K8MetaItem>, K8ConvertError<S::K8Spec>>\n\nwhere\n\n S: K8ExtendedSpec,\n\n S::IndexKey: TryFrom<String> + Display,\n\n <S::IndexKey as TryFrom<String>>::Error: Debug,\n\n <<S as K8ExtendedSpec>::K8Spec as K8Spec>::Status: Into<S::Status>,\n\n S::K8Spec: Into<S>,\n\n{\n\n let k8_name = k8_obj.metadata.name.clone();\n\n let result: Result<S::IndexKey, _> = k8_name.try_into();\n\n match result {\n\n Ok(key) => {\n\n // convert K8 Spec/Status into Metadata Spec/Status\n\n let local_spec = k8_obj.spec.into();\n\n let local_status = k8_obj.status.into();\n\n\n\n let ctx_item_result: Result<K8MetaItem, _> = k8_obj.metadata.try_into();\n\n match ctx_item_result {\n", "file_path": "crates/fluvio-stream-model/src/store/k8.rs", "rank": 91, "score": 142945.7577505212 }, { "content": "#[derive(Debug)]\n\nstruct ScopeBindings(HashMap<String, Vec<String>>);\n\n\n\nimpl ScopeBindings {\n\n pub fn load(scope_binding_file_path: &Path) -> Result<Self, IoError> {\n\n let file = std::fs::read_to_string(scope_binding_file_path)?;\n\n let scope_bindings = Self(serde_json::from_str(&file)?);\n\n debug!(\"scope bindings loaded {:?}\", scope_bindings);\n\n Ok(scope_bindings)\n\n }\n\n pub fn get_scopes(&self, principal: &str) -> Vec<String> {\n\n trace!(\"getting scopes for principal {:?}\", principal);\n\n if let Some(scopes) = self.0.get(principal) {\n\n trace!(\"scopes found for principal {:?}: {:?}\", principal, scopes);\n\n scopes.clone()\n\n } else {\n\n trace!(\"scopes not found for principal {:?}\", principal);\n\n Vec::new()\n\n }\n\n }\n\n}\n", "file_path": "crates/fluvio-auth/src/x509/authenticator.rs", "rank": 92, "score": 142299.50551211264 }, { "content": "pub fn main_loop(opt: SpuOpt) {\n\n use std::time::Duration;\n\n\n\n use sysinfo::{System, SystemExt};\n\n use tracing::info;\n\n\n\n use fluvio_future::task::run_block_on;\n\n use fluvio_future::timer::sleep;\n\n // parse configuration (program exits on error)\n\n let (spu_config, tls_acceptor_option) = opt.process_spu_cli_or_exit();\n\n\n\n println!(\"starting spu server (id:{})\", spu_config.id);\n\n\n\n let mut sys = System::new_all();\n\n sys.refresh_all();\n\n info!(version = &*crate::VERSION, \"Platform\");\n\n info!(commit = env!(\"GIT_HASH\"), \"Git\");\n\n info!(name = ?sys.name(),\"System\");\n\n info!(kernel = ?sys.kernel_version(),\"System\");\n\n info!(os_version = ?sys.long_os_version(),\"System\");\n", "file_path": "crates/fluvio-spu/src/start.rs", "rank": 93, "score": 141250.17448820145 }, { "content": "#[instrument(skip(request, auth_ctx, sink, end_event))]\n\npub fn handle_watch_request<AC>(\n\n request: RequestMessage<ObjectApiWatchRequest>,\n\n auth_ctx: &AuthServiceContext<AC>,\n\n sink: ExclusiveFlvSink,\n\n end_event: Arc<StickyEvent>,\n\n) -> Result<(), IoError> {\n\n debug!(\"handling watch request\");\n\n let (header, req) = request.get_header_request();\n\n\n\n match req {\n\n ObjectApiWatchRequest::Topic(_) => WatchController::<TopicSpec>::update(\n\n sink,\n\n end_event,\n\n auth_ctx.global_ctx.topics().clone(),\n\n header,\n\n ),\n\n ObjectApiWatchRequest::Spu(_) => WatchController::<SpuSpec>::update(\n\n sink,\n\n end_event,\n\n auth_ctx.global_ctx.spus().clone(),\n", "file_path": "crates/fluvio-sc/src/services/public_api/watch.rs", "rank": 94, "score": 140976.56015515394 }, { "content": "/// Process server based on output type\n\npub fn format_spu_response_output<O>(\n\n out: std::sync::Arc<O>,\n\n spus: Vec<Metadata<SpuSpec>>,\n\n output_type: OutputType,\n\n) -> Result<(), ClusterCliError>\n\nwhere\n\n O: Terminal,\n\n{\n\n if !spus.is_empty() {\n\n let spu_list = ListSpus(spus);\n\n out.render_list(&spu_list, output_type)?;\n\n } else {\n\n t_println!(out, \"no spu\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl TableOutputHandler for ListSpus {\n\n /// table header implementation\n", "file_path": "crates/fluvio-cluster/src/cli/spu/display.rs", "rank": 95, "score": 140970.8807619448 }, { "content": " ptr: *mut c_void,\n\n len: Size,\n\n}\n\n\n\n// const MEM_SIZE: u64 = 1024 * 1024 * 10; //10 MBs\n\n\n\nunsafe impl Send for LogIndex {}\n\n\n\nunsafe impl Sync for LogIndex {}\n\n\n\nimpl LogIndex {\n\n unsafe_unpinned!(mmap: MemoryMappedFile);\n\n\n\n pub async fn open_from_offset(\n\n base_offset: Offset,\n\n option: &ConfigOption,\n\n ) -> Result<Self, IoError> {\n\n let index_file_path = generate_file_name(&option.base_dir, base_offset, EXTENSION);\n\n\n\n debug!(?index_file_path, \"opening index\");\n", "file_path": "crates/fluvio-storage/src/index.rs", "rank": 97, "score": 58.440601329958426 }, { "content": " if last_offset >= offset {\n\n debug!(last_offset, \"found batch last offset\");\n\n return Ok(Some(batch_pos));\n\n } else {\n\n trace!(last_offset, \"skipping batch end offset\");\n\n }\n\n }\n\n Ok(None)\n\n }\n\n}\n\n\n\nimpl Segment<LogIndex, FileRecordsSlice> {\n\n /// open read only segments if base and end offset are known\n\n pub async fn open_for_read(\n\n base_offset: Offset,\n\n end_offset: Offset,\n\n option: &ConfigOption,\n\n ) -> Result<Self, StorageError> {\n\n debug!(base_offset, end_offset, ?option, \"open for read\");\n\n let msg_log = FileRecordsSlice::open(base_offset, option).await?;\n", "file_path": "crates/fluvio-storage/src/segment.rs", "rank": 99, "score": 57.15096156832526 } ]
Rust
neutopia/src/lib.rs
konkers/neutopia
770dbbc3dd6a61d418f38683f43ccfb729ea1039
use std::collections::HashMap; use std::io::{prelude::*, Cursor, SeekFrom}; use failure::{format_err, Error}; pub mod interval; pub mod rom; pub mod rommap; pub mod util; pub mod verify; pub use rom::NeutopiaRom; pub use verify::{verify, RomInfo}; #[derive(Clone, Debug)] pub struct Room { pub warps: Vec<u8>, pub enemies: Vec<u8>, pub objects: Vec<rom::object::TableEntry>, } #[derive(Clone, Debug)] pub struct Area { pub rooms: Vec<Room>, pub chest_table: Vec<rom::Chest>, } #[derive(Clone, Debug)] pub struct Chest { pub info: rom::Chest, pub area: u8, pub room: u8, pub index: u8, } #[derive(Clone, Debug)] pub struct Conditional { pub data: Vec<rom::object::TableEntry>, } pub struct Neutopia { pub areas: Vec<Area>, pub conditionals: HashMap<rom::Chest, Conditional>, pub rom_data: Vec<u8>, n: NeutopiaRom, } impl Neutopia { pub fn new(data: &[u8]) -> Result<Self, Error> { let mut rando = Self { n: NeutopiaRom::new(data)?, areas: Vec::new(), conditionals: HashMap::new(), rom_data: Vec::from(data), }; for area_idx in 0..=0xf { rando.import_area(area_idx)?; } Ok(rando) } fn import_area(&mut self, area_idx: usize) -> Result<(), Error> { let room_info_table = &self.n.room_info_tables[area_idx]; let chest_table = &self.n.chest_tables[&self.n.chest_table_pointers[area_idx]]; let mut rooms = Vec::new(); for room_idx in 0u8..0x40 { let room = &room_info_table[&room_idx]; let mut object_table = rom::object::parse_object_table(&room.object_table)?; if object_table.len() > 2 { let mut i = 0; while (i + 2) < object_table.len() { if let Some(id) = object_table[i].chest_id() { let chest = &chest_table[id as usize]; let next = object_table[i + 1].clone(); let next_next = object_table[i + 2].clone(); if next.is_conditional() { object_table.remove(i + 1); object_table.remove(i + 1); self.conditionals.insert( chest.clone(), Conditional { data: vec![next, next_next], }, ); } } i += 1; } } rooms.push(Room { warps: room.warp_table.clone(), enemies: room.enemy_table.clone(), objects: object_table, }); } self.areas.push(Area { rooms, chest_table: chest_table.clone(), }); Ok(()) } pub fn filter_chests(&self, filter: impl Fn(&Chest) -> bool) -> Vec<Chest> { let mut chests = Vec::new(); for (area_idx, area) in self.areas.iter().enumerate() { for (room_idx, room) in area.rooms.iter().enumerate() { let mut chest_index = 0; for entry in &room.objects { if let Some(id) = entry.chest_id() { let chest = Chest { info: area.chest_table[id as usize].clone(), area: area_idx as u8, room: room_idx as u8, index: chest_index, }; chest_index += 1; if filter(&chest) { chests.push(chest); } } } } } chests } fn get_table_id_for_chest(&self, chest: &Chest) -> Result<usize, Error> { let area = &self.areas[chest.area as usize]; let room = &area.rooms[chest.room as usize]; let mut chest_index = 0; for obj_entry in &room.objects { if let Some(id) = obj_entry.chest_id() { if chest_index == chest.index { return Ok(id as usize); } chest_index += 1; } } Err(format_err!("can't find id for chest {:?}", chest)) } pub fn update_chests(&mut self, chests: &[Chest]) -> Result<(), Error> { for chest in chests { let id = self.get_table_id_for_chest(chest)?; let entry = self.areas[chest.area as usize] .chest_table .get_mut(id as usize) .ok_or_else(|| format_err!("incoherent chest id {:02x}", id))?; *entry = chest.info.clone(); } Ok(()) } fn write_area(&self, area_idx: usize, rom_writer: &mut Cursor<Vec<u8>>) -> Result<u32, Error> { let area = &self.areas[area_idx]; let cur_offset = rom_writer.position(); let mut room_ptrs = Cursor::new(Vec::new()); let room_ptrs_offset = cur_offset; let room_data_offset = cur_offset + 0x40 * 3; rom_writer.seek(SeekFrom::Start(room_data_offset as u64))?; for room_idx in 0..0x40 { let room = &area.rooms[room_idx]; let room_offset = rom_writer.position(); room_ptrs.write_all(&util::rom_offset_to_pointer(room_offset as u32))?; let mut object_table = room.objects.clone(); for i in 0..object_table.len() { if let Some(id) = object_table[i].chest_id() { let chest = &area.chest_table[id as usize]; let loc = match object_table[i].loc() { Some(loc) => loc, _ => continue, }; if let Some(cond) = self.conditionals.get(&chest) { for j in 0..cond.data.len() { let mut entry = cond.data[j].clone(); if let rom::object::TableEntry::Object(ref mut o) = entry { o.x = loc.0; o.y = loc.1; } object_table.insert(i + j + 1, entry); } break; } } } rom_writer.seek(SeekFrom::Current(3 * 3))?; let warp_table_ptr = rom_writer.position() as u32; rom_writer.write_all(&room.warps)?; let enemy_table_ptr = rom_writer.position() as u32; rom_writer.write_all(&room.enemies)?; rom_writer.write_all(&[0xff])?; let object_table_ptr = rom_writer.position() as u32; for o in &object_table { o.write(rom_writer)?; } rom_writer.write_all(&[0xff])?; let room_end_pos = rom_writer.position(); rom_writer.seek(SeekFrom::Start(room_offset))?; rom_writer.write_all(&util::rom_offset_to_pointer(warp_table_ptr))?; rom_writer.write_all(&util::rom_offset_to_pointer(enemy_table_ptr))?; rom_writer.write_all(&util::rom_offset_to_pointer(object_table_ptr))?; rom_writer.seek(SeekFrom::Start(room_end_pos))?; } let next_offset = rom_writer.position() as u32; rom_writer.seek(SeekFrom::Start(room_ptrs_offset as u64))?; rom_writer.write_all(room_ptrs.get_ref())?; rom_writer.seek(SeekFrom::Start( rommap::AREA_TABLE as u64 + area_idx as u64 * 3, ))?; rom_writer.write_all(&util::rom_offset_to_pointer(room_ptrs_offset as u32))?; Ok(next_offset) } pub fn write(&self) -> Result<Vec<u8>, Error> { let mut rom_writer = Cursor::new(self.rom_data.clone()); let area_range = 4..=0xf; for area_idx in area_range.clone() { let area = &self.areas[area_idx]; let offset = 0x4fe00 + (0x20 * area_idx as u64); rom_writer.seek(SeekFrom::Start(offset))?; for chest in &area.chest_table { chest.write(&mut rom_writer)?; } rom_writer.seek(SeekFrom::Start( rommap::CHEST_TABLE as u64 + 3 * area_idx as u64, ))?; let ptr = util::rom_offset_to_pointer(offset as u32); rom_writer.write_all(&ptr)?; } let mut cur_offset = self.n.area_pointers[4]; let mut offset_c = None; for area_idx in area_range { if area_idx == 0xc { offset_c = Some(cur_offset); } rom_writer.seek(SeekFrom::Start(cur_offset as u64))?; cur_offset = self.write_area(area_idx, &mut rom_writer)? } if let Some(offset) = offset_c { rom_writer.seek(SeekFrom::Start(rommap::AREA_TABLE as u64 + 0x10 * 3))?; rom_writer.write_all(&util::rom_offset_to_pointer(offset as u32))?; } Ok(rom_writer.into_inner()) } } pub fn area_name(area: u8) -> &'static str { match area { 0x0 => "Land Sphere", 0x1 => "Subterranean Sphere", 0x2 => "Sea Sphere", 0x3 => "Sky Sphere", 0x4 => "Crypt 1", 0x5 => "Crypt 2", 0x6 => "Crypt 3", 0x7 => "Crypt 4", 0x8 => "Crypt 5", 0x9 => "Crypt 6", 0xa => "Crypt 7", 0xb => "Crypt 8", 0xc => "Land Sphere Rooms", 0xd => "Subterranean Sphere Rooms", 0xe => "Sea Sphere Rooms", 0xf => "Sky Sphere Rooms", _ => "Unknown", } } #[cfg(test)] mod tests {}
use std::collections::HashMap; use std::io::{prelude::*, Cursor, SeekFrom}; use failure::{format_err, Error}; pub mod interval; pub mod rom; pub mod rommap; pub mod util; pub mod verify; pub use rom::NeutopiaRom; pub use verify::{verify, RomInfo}; #[derive(Clone, Debug)] pub struct Room { pub warps: Vec<u8>, pub enemies: Vec<u8>, pub objects: Vec<rom::object::TableEntry>, } #[derive(Clone, Debug)] pub struct Area { pub rooms: Vec<Room>, pub chest_table: Vec<rom::Chest>, } #[derive(Clone, Debug)] pub struct Chest { pub info: rom::Chest, pub area: u8, pub room: u8, pub index: u8, } #[derive(Clone, Debug)] pub struct Conditional { pub data: Vec<rom::object::TableEntry>, } pub struct Neutopia { pub areas: Vec<Area>, pub conditionals: HashMap<rom::Chest, Conditional>, pub rom_data: Vec<u8>, n: NeutopiaRom, } impl Neutopia { pub fn new(data: &[u8]) -> Result<Self, Error> { let mut rando = Self { n: NeutopiaRom::new(data)?, areas: Vec::new(), conditionals: HashMap::new(), rom_data: Vec::from(data), }; for area_idx in 0..=0xf { rando.import_area(area_idx)?; } Ok(rando) } fn import_area(&mut self, area_idx: usize) -> Result<(), Error> { let room_info_table = &self.n.room_info_tables[area_idx]; let chest_table = &self.n.chest_tables[&self.n.chest_table_pointers[area_idx]]; let mut rooms = Vec::new(); for room_idx in 0u8..0x40 { let room = &room_info_table[&room_idx]; let mut object_table = rom::object::parse_object_table(&room.object_table)?; if object_table.len() > 2 { let mut i = 0; while (i + 2) < object_table.len() { if let Some(id) = object_table[i].chest_id() { let chest = &chest_table[id as usize]; let next = object_table[i + 1].clone(); let next_next = object_table[i + 2].clone(); if next.is_conditional() { object_table.remove(i + 1); object_table.remove(i + 1); self.conditionals.insert( chest.clone(), Conditional { data: vec![next, next_next], }, ); } } i += 1; } } rooms.push(Room { warps: room.warp_table.clone(), enemies: room.enemy_table.clone(), objects: object_table, }); } self.areas.push(Area { rooms, chest_table: chest_table.clone(), }); Ok(()) } pub fn filter_chests(&self, filter: impl Fn(&Chest) -> bool) -> Vec<Chest> { let mut chests = Vec::new(); for (area_idx, area) in self.areas.iter().enumerate() { for (room_idx, room) in area.rooms.iter().enumerate() { let mut chest_index = 0; for entry in &room.objects { if let Some(id) = entry.chest_id() { let chest = Chest { info: area.chest_table[id as usize].clone(), area: area_idx as u8, room: room_idx as u8, index: chest_index, }; chest_index += 1; if filter(&chest) { chests.push(chest); } } } } } chests } fn get_table_id_for_chest(&self, chest: &Chest) -> Result<usize, Error> { let area = &self.areas[chest.area as usize]; let room = &area.rooms[chest.room as usize]; let mut chest_index = 0; for obj_entry in &room.objects { if let Some(id) = obj_entry.chest_id() { if chest_index == chest.index { return Ok(id as usize); } chest_index += 1; } } Err(format_err!("can't find id for chest {:?}", chest)) }
fn write_area(&self, area_idx: usize, rom_writer: &mut Cursor<Vec<u8>>) -> Result<u32, Error> { let area = &self.areas[area_idx]; let cur_offset = rom_writer.position(); let mut room_ptrs = Cursor::new(Vec::new()); let room_ptrs_offset = cur_offset; let room_data_offset = cur_offset + 0x40 * 3; rom_writer.seek(SeekFrom::Start(room_data_offset as u64))?; for room_idx in 0..0x40 { let room = &area.rooms[room_idx]; let room_offset = rom_writer.position(); room_ptrs.write_all(&util::rom_offset_to_pointer(room_offset as u32))?; let mut object_table = room.objects.clone(); for i in 0..object_table.len() { if let Some(id) = object_table[i].chest_id() { let chest = &area.chest_table[id as usize]; let loc = match object_table[i].loc() { Some(loc) => loc, _ => continue, }; if let Some(cond) = self.conditionals.get(&chest) { for j in 0..cond.data.len() { let mut entry = cond.data[j].clone(); if let rom::object::TableEntry::Object(ref mut o) = entry { o.x = loc.0; o.y = loc.1; } object_table.insert(i + j + 1, entry); } break; } } } rom_writer.seek(SeekFrom::Current(3 * 3))?; let warp_table_ptr = rom_writer.position() as u32; rom_writer.write_all(&room.warps)?; let enemy_table_ptr = rom_writer.position() as u32; rom_writer.write_all(&room.enemies)?; rom_writer.write_all(&[0xff])?; let object_table_ptr = rom_writer.position() as u32; for o in &object_table { o.write(rom_writer)?; } rom_writer.write_all(&[0xff])?; let room_end_pos = rom_writer.position(); rom_writer.seek(SeekFrom::Start(room_offset))?; rom_writer.write_all(&util::rom_offset_to_pointer(warp_table_ptr))?; rom_writer.write_all(&util::rom_offset_to_pointer(enemy_table_ptr))?; rom_writer.write_all(&util::rom_offset_to_pointer(object_table_ptr))?; rom_writer.seek(SeekFrom::Start(room_end_pos))?; } let next_offset = rom_writer.position() as u32; rom_writer.seek(SeekFrom::Start(room_ptrs_offset as u64))?; rom_writer.write_all(room_ptrs.get_ref())?; rom_writer.seek(SeekFrom::Start( rommap::AREA_TABLE as u64 + area_idx as u64 * 3, ))?; rom_writer.write_all(&util::rom_offset_to_pointer(room_ptrs_offset as u32))?; Ok(next_offset) } pub fn write(&self) -> Result<Vec<u8>, Error> { let mut rom_writer = Cursor::new(self.rom_data.clone()); let area_range = 4..=0xf; for area_idx in area_range.clone() { let area = &self.areas[area_idx]; let offset = 0x4fe00 + (0x20 * area_idx as u64); rom_writer.seek(SeekFrom::Start(offset))?; for chest in &area.chest_table { chest.write(&mut rom_writer)?; } rom_writer.seek(SeekFrom::Start( rommap::CHEST_TABLE as u64 + 3 * area_idx as u64, ))?; let ptr = util::rom_offset_to_pointer(offset as u32); rom_writer.write_all(&ptr)?; } let mut cur_offset = self.n.area_pointers[4]; let mut offset_c = None; for area_idx in area_range { if area_idx == 0xc { offset_c = Some(cur_offset); } rom_writer.seek(SeekFrom::Start(cur_offset as u64))?; cur_offset = self.write_area(area_idx, &mut rom_writer)? } if let Some(offset) = offset_c { rom_writer.seek(SeekFrom::Start(rommap::AREA_TABLE as u64 + 0x10 * 3))?; rom_writer.write_all(&util::rom_offset_to_pointer(offset as u32))?; } Ok(rom_writer.into_inner()) } } pub fn area_name(area: u8) -> &'static str { match area { 0x0 => "Land Sphere", 0x1 => "Subterranean Sphere", 0x2 => "Sea Sphere", 0x3 => "Sky Sphere", 0x4 => "Crypt 1", 0x5 => "Crypt 2", 0x6 => "Crypt 3", 0x7 => "Crypt 4", 0x8 => "Crypt 5", 0x9 => "Crypt 6", 0xa => "Crypt 7", 0xb => "Crypt 8", 0xc => "Land Sphere Rooms", 0xd => "Subterranean Sphere Rooms", 0xe => "Sea Sphere Rooms", 0xf => "Sky Sphere Rooms", _ => "Unknown", } } #[cfg(test)] mod tests {}
pub fn update_chests(&mut self, chests: &[Chest]) -> Result<(), Error> { for chest in chests { let id = self.get_table_id_for_chest(chest)?; let entry = self.areas[chest.area as usize] .chest_table .get_mut(id as usize) .ok_or_else(|| format_err!("incoherent chest id {:02x}", id))?; *entry = chest.info.clone(); } Ok(()) }
function_block-full_function
[ { "content": "pub fn verify(data: &[u8]) -> Result<RomInfo, Error> {\n\n let expected_size = 384 * 1024;\n\n let header_size = 0x200;\n\n\n\n let (headered, buffer) = if data.len() == expected_size {\n\n (false, &data as &[u8])\n\n } else if data.len() == expected_size + header_size {\n\n (true, &data[header_size..])\n\n } else {\n\n return Err(format_err!(\n\n \"Rom size ({}) is neither the expected size of the headered({}) nor the un-headered({}) rom\",\n\n data.len(), expected_size + header_size, expected_size));\n\n };\n\n\n\n let digest = md5::compute(buffer);\n\n let md5_hash = format!(\"{:x}\", digest);\n\n\n\n let db_entry = KNOWN_ROMS.get(&md5_hash).map_or(Default::default(), |o| *o);\n\n let known = KNOWN_ROMS.contains_key(&md5_hash);\n\n\n\n Ok(RomInfo {\n\n headered,\n\n md5_hash,\n\n known,\n\n desc: db_entry.desc.into(),\n\n region: db_entry.region,\n\n })\n\n}\n", "file_path": "neutopia/src/verify.rs", "rank": 0, "score": 305486.6041521085 }, { "content": "pub fn object_table_len(data: &[u8]) -> Result<usize, Error> {\n\n let (i, _) =\n\n many0(parse_object_table_entry)(data).map_err(|e| format_err!(\"parse error: {}\", e))?;\n\n\n\n if !i.is_empty() && i[0] != 0xff {\n\n return Err(format_err!(\"unparsed input: {:x?}\", i));\n\n }\n\n\n\n Ok(data.len() - i.len())\n\n}\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 1, "score": 274050.31584357086 }, { "content": "pub fn parse_object_table(data: &[u8]) -> Result<Vec<TableEntry>, Error> {\n\n let (i, table) =\n\n many0(parse_object_table_entry)(data).map_err(|e| format_err!(\"parse error: {}\", e))?;\n\n\n\n if !i.is_empty() {\n\n return Err(format_err!(\"unparsed input: {:x?}\", i));\n\n }\n\n\n\n Ok(table)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n fn run_parse_test(data: &[u8], entry: TableEntry) {\n\n let mut c = Cursor::new(Vec::new());\n\n entry.write(&mut c).unwrap();\n\n let enc_data = c.into_inner();\n", "file_path": "neutopia/src/rom/object.rs", "rank": 2, "score": 257910.98120532406 }, { "content": "// Shuffle all items within each crypt. Does not touch overworld items.\n\nfn crypt_rando(rng: &mut impl Rng, rom_data: &[u8]) -> Result<Vec<u8>, Error> {\n\n let mut n = Neutopia::new(rom_data)?;\n\n\n\n for area_idx in 0x4..=0xb {\n\n // Find all the chest we want to randomize.\n\n let mut chests = n.filter_chests(|chest| {\n\n // Chest is in current area\n\n (chest.area == area_idx)\n\n // Chest does not contain medallion\n\n && (chest.info.item_id < 0x12 || chest.info.item_id >= (0x12 + 8))\n\n });\n\n\n\n // Shuffle the chests.\n\n let mut randomized_chests: Vec<rom::Chest> =\n\n chests.iter().map(|chest| chest.info.clone()).collect();\n\n randomized_chests.shuffle(rng);\n\n\n\n // Update the chests' info\n\n for (i, chest) in chests.iter_mut().enumerate() {\n\n chest.info = randomized_chests[i].clone();\n\n }\n\n\n\n n.update_chests(&chests)?;\n\n }\n\n\n\n n.write()\n\n}\n\n\n", "file_path": "rando/src/lib.rs", "rank": 3, "score": 255135.900075224 }, { "content": "// Shuffle all items across crypts and overworld. Does not contain logic\n\n// to make sure seed is completable.\n\nfn global_rando(rng: &mut impl Rng, rom_data: &[u8]) -> Result<Vec<u8>, Error> {\n\n let n = Neutopia::new(rom_data)?;\n\n\n\n let mut state = State::new(n)?;\n\n let book = state.get_item_by_id(0xd)?;\n\n let moss = state.get_item_by_id(0x5)?;\n\n\n\n state.place_item(book, 0xc, 0x9, 0x0)?;\n\n state.place_item(moss, 0xc, 0x11, 0x1)?;\n\n\n\n // Place area locked items first.\n\n for area in 0x4..=0xf {\n\n let items = state.filter_items(|item| match item.area_lock {\n\n Some(a) => a == area,\n\n None => false,\n\n });\n\n\n\n for item in items {\n\n // Query checks each iteration so that we pick up changes we make.\n\n // Also, ignore key item gating as we know the area locked items\n", "file_path": "rando/src/lib.rs", "rank": 4, "score": 255135.90007522397 }, { "content": "pub fn pointer_to_rom_offset(data: &[u8]) -> Result<u32, Error> {\n\n assert!(data.len() >= 3);\n\n\n\n let value = ((data[0] as u32) << 13) | ((data[2] as u32 & 0x1f) << 8) | (data[1] as u32);\n\n if value < 0x40000 {\n\n Err(format_err!(\n\n \"can't convert: {:02x} {:02x} {:02x}\",\n\n data[0],\n\n data[1],\n\n data[2]\n\n ))\n\n } else {\n\n Ok(value - 0x40000)\n\n }\n\n}\n\n\n", "file_path": "neutopia/src/util.rs", "rank": 5, "score": 253406.58042026902 }, { "content": "pub fn decode_pointer_table(data: &[u8], entries: usize) -> Result<Vec<u32>, Error> {\n\n if data.len() < entries * 3 {\n\n return Err(format_err!(\n\n \"data only {} bytes in length. Need at least {}\",\n\n data.len(),\n\n entries * 3\n\n ));\n\n }\n\n\n\n let mut table = Vec::new();\n\n\n\n for i in 0..entries {\n\n let pointer = pointer_to_rom_offset(&data[(i * 3)..])?;\n\n table.push(pointer);\n\n }\n\n\n\n Ok(table)\n\n}\n\n\n", "file_path": "neutopia/src/util.rs", "rank": 6, "score": 244325.12237963206 }, { "content": "pub fn parse_chest_table(i: &[u8]) -> Result<Vec<Chest>, Error> {\n\n let (_, table) =\n\n many_m_n(8, 8, parse_chest)(i).map_err(|e| format_err!(\"parse error: {}\", e))?;\n\n\n\n Ok(table)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn test_parse_chest() {\n\n assert_eq!(\n\n parse_chest(&[0x11, 0x01, 0x85, 0x41]),\n\n Ok((\n\n &[][..],\n\n Chest {\n\n item_id: 0x11,\n\n arg: 0x01,\n\n text: 0x85,\n\n unknown: 0x41,\n\n }\n\n ))\n\n );\n\n }\n\n}\n", "file_path": "neutopia/src/rom/chest.rs", "rank": 7, "score": 238858.78865325474 }, { "content": "fn apply_patches(data: &mut [u8]) -> Result<(), Error> {\n\n let mut c = Cursor::new(data);\n\n for patch in patches::PATCHES.iter() {\n\n apply_patch(&mut c, patch)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rando/src/lib.rs", "rank": 8, "score": 232561.07621493813 }, { "content": "fn verify_rom(data: Vec<u8>) -> Result<Vec<u8>, Error> {\n\n // Verify\n\n let info = neutopia::verify(&data)?;\n\n if !info.known {\n\n return Err(format_err!(\n\n \"Rom with MD5 hash {} is unrecognized.\",\n\n &info.md5_hash\n\n ));\n\n }\n\n if info.region != Region::NA {\n\n return Err(format_err!(\n\n \"Region {:?} rom not supported. Please use NA rom.\",\n\n &info.region\n\n ));\n\n }\n\n\n\n if info.headered {\n\n Ok(data[0x200..].to_vec())\n\n } else {\n\n Ok(data)\n\n }\n\n}\n\n\n", "file_path": "rando/src/lib.rs", "rank": 9, "score": 229286.44956774157 }, { "content": "fn decode_section(data: &mut [u8]) -> Result<(), Error> {\n\n // First de-salt the data.\n\n let mut salt = data[0];\n\n #[allow(clippy::needless_range_loop)]\n\n for i in 1..8 {\n\n data[i] ^= salt_byte(salt);\n\n salt = (salt + 1) & 0x3f;\n\n }\n\n\n\n // Now do a \"forward xor\" on the data.\n\n for i in (0..6).rev() {\n\n data[i + 1] ^= data[i];\n\n }\n\n\n\n // Now calc checksum\n\n let mut sum = 0;\n\n #[allow(clippy::needless_range_loop)]\n\n for i in 0..7 {\n\n sum += data[i] & 0x3f;\n\n }\n", "file_path": "neutil/src/password.rs", "rank": 10, "score": 213098.58881282152 }, { "content": "pub fn read_object_table(data: &[u8]) -> Vec<u8> {\n\n let mut table = Vec::new();\n\n let mut i = 0;\n\n loop {\n\n let val = data[i];\n\n if val == 0xff {\n\n break;\n\n }\n\n table.push(val);\n\n\n\n i += 1;\n\n }\n\n table\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn test_pointer_to_rom_offset() {\n\n assert_eq!(pointer_to_rom_offset(&[0x48, 0x4e, 0x45]).unwrap(), 0x5054e);\n\n assert_eq!(pointer_to_rom_offset(&[0x49, 0x44, 0x51]).unwrap(), 0x53144);\n\n }\n\n}\n", "file_path": "neutopia/src/util.rs", "rank": 11, "score": 207913.47068899916 }, { "content": "fn write_area_markdown(opt: &DocOpt, n: &NeutopiaRom, area_index: usize) -> Result<(), Error> {\n\n let mut path: PathBuf = opt.outdir.clone();\n\n path.push(format!(\"area_{:02x}.md\", area_index));\n\n let mut f = File::create(path)?;\n\n\n\n writeln!(f, \"# Area {:01X}\\n\", area_index)?;\n\n\n\n writeln!(f, \"## Overview\\n\")?;\n\n writeln!(f, \"| | |\")?;\n\n writeln!(f, \"|-|-|\")?;\n\n writeln!(\n\n f,\n\n \"| Area data table pointer | {:05x} |\",\n\n n.area_pointers[area_index]\n\n )?;\n\n writeln!(\n\n f,\n\n \"| Room order table pointer | {:05x} |\",\n\n n.room_order_pointers[area_index]\n\n )?;\n", "file_path": "neutil/src/doc.rs", "rank": 12, "score": 206034.57548144777 }, { "content": "pub fn randomize(config: &Config, data: &[u8]) -> Result<RandomizedGame, Error> {\n\n // Let the user specify a seed in base36. Otherwise randomly generate one.\n\n let seed = match &config.seed {\n\n Some(s) => u64::from_str_radix(s, 36)\n\n .map_err(|e| format_err!(\"Seed name must be a valid base36 64 bit number: {}\", e))?,\n\n None => rand::thread_rng().gen(),\n\n };\n\n\n\n let mut rng = Pcg32::seed_from_u64(seed);\n\n\n\n let mut buffer = verify_rom(data.to_vec())?;\n\n\n\n apply_patches(&mut buffer)?;\n\n\n\n let new_data = match config.ty {\n\n RandoType::Local => crypt_rando(&mut rng, &buffer)?,\n\n RandoType::Global => global_rando(&mut rng, &buffer)?,\n\n _ => buffer,\n\n };\n\n\n\n Ok(RandomizedGame {\n\n seed: format!(\"{:#}\", radix_36(seed)),\n\n data: new_data,\n\n })\n\n}\n", "file_path": "rando/src/lib.rs", "rank": 13, "score": 204197.4812952578 }, { "content": "fn write_byte_array(f: &mut File, data: &[u8]) -> Result<(), Error> {\n\n write!(f, \"```\\n[\")?;\n\n for (i, val) in data.iter().enumerate() {\n\n if i != 0 {\n\n write!(f, \", \")?;\n\n }\n\n write!(f, \"{:02x}\", val)?;\n\n }\n\n writeln!(f, \"]\\n```\\n\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "neutil/src/doc.rs", "rank": 14, "score": 195152.50919027312 }, { "content": "fn parse_object_info(i: &[u8]) -> IResult<&[u8], ObjectInfo> {\n\n let (i, loc) = take(1usize)(i)?;\n\n let (i, id) = take(1usize)(i)?;\n\n let x = loc[0] & 0xf;\n\n let y = loc[0] >> 4;\n\n\n\n Ok((i, ObjectInfo { x, y, id: id[0] }))\n\n}\n\n\n\nimpl ObjectInfo {\n\n fn write(&self, w: &mut impl Write) -> Result<(), Error> {\n\n let loc = (self.x & 0xf) | ((self.y & 0xf) << 4);\n\n w.write_u8(loc)?;\n\n w.write_u8(self.id)?;\n\n\n\n Ok(())\n\n }\n\n}\n\nmacro_rules! gen_object_type {\n\n ($parse_func_name: ident, $write_func_name: ident, $tag: literal, $ty: ident) => {\n", "file_path": "neutopia/src/rom/object.rs", "rank": 15, "score": 188543.820998942 }, { "content": "fn parse_chest(i: &[u8]) -> IResult<&[u8], Chest> {\n\n let (i, item_id) = le_u8(i)?;\n\n let (i, arg) = le_u8(i)?;\n\n let (i, text) = le_u8(i)?;\n\n let (i, unknown) = le_u8(i)?;\n\n\n\n Ok((\n\n i,\n\n Chest {\n\n item_id,\n\n arg,\n\n text,\n\n unknown,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "neutopia/src/rom/chest.rs", "rank": 16, "score": 188030.87397269436 }, { "content": "fn parse_hidden_room(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([0x0d])(i)?;\n\n let (i, data) = take(3usize)(i)?;\n\n Ok((i, TableEntry::HiddenRoom([data[0], data[1], data[2]])))\n\n}\n\ngen_data_write!(write_hidden_room, 0x0d, HiddenRoom);\n\n\n\ngen_simple_type!(\n\n parse_falcon_boots_needed,\n\n write_falcon_boots_needed,\n\n 0x81,\n\n FalconBootsNeeded\n\n);\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 17, "score": 186687.1084345522 }, { "content": "fn parse_object_table_entry(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n // There seems to be a limit on the size of tuples in for alt so we\n\n // split it.\n\n alt((\n\n alt((\n\n parse_object,\n\n parse_open_door,\n\n parse_push_block_gated_door,\n\n parse_enemy_gated_door,\n\n parse_bombable_door,\n\n parse_push_block_gated_object,\n\n parse_enemy_gated_object,\n\n parse_bell_gated_object,\n\n parse_dark_room,\n\n parse_unknown_0b,\n\n parse_burnable,\n\n parse_hidden_room,\n\n parse_falcon_boots_needed,\n\n parse_npc,\n\n parse_boss_door,\n", "file_path": "neutopia/src/rom/object.rs", "rank": 18, "score": 179295.7387847558 }, { "content": "fn apply_patch<W: Write + Seek>(w: &mut W, patch_data: &[u8]) -> Result<(), Error> {\n\n let patch = Patch::parse(patch_data)?;\n\n\n\n for hunk in patch.hunks() {\n\n w.seek(SeekFrom::Start(hunk.offset() as u64))?;\n\n w.write_all(hunk.payload())?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rando/src/lib.rs", "rank": 19, "score": 176493.58736966562 }, { "content": "fn parse_npc(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([0x9a])(i)?;\n\n let (i, data) = take(5usize)(i)?;\n\n Ok((\n\n i,\n\n TableEntry::Npc([data[0], data[1], data[2], data[3], data[4]]),\n\n ))\n\n}\n\ngen_data_write!(write_npc, 0x9a, Npc);\n\n\n\ngen_object_type!(parse_ouch_rope, write_ouch_rope, 0xbd, OuchRope);\n\ngen_object_type!(\n\n parse_arrow_launcher,\n\n write_arrow_launcher,\n\n 0xbf,\n\n ArrowLauncher\n\n);\n\ngen_object_type!(parse_swords, write_swords, 0xc0, Swords);\n\ngen_object_type!(parse_ghost_spawner, write_ghost_spawner, 0xc1, GhostSpawner);\n\ngen_object_type!(\n\n parse_fireball_spawner,\n\n write_fireball_spawner,\n\n 0xc6,\n\n FireballSpawner\n\n);\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 20, "score": 171766.8761073599 }, { "content": "fn parse_unknown_0b(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([0x0b])(i)?;\n\n let (i, data) = take(3usize)(i)?;\n\n Ok((i, TableEntry::Unknown0b([data[0], data[1], data[2]])))\n\n}\n\ngen_data_write!(write_unknown_0b, 0x0b, Unknown0b);\n\n\n\ngen_object_type!(parse_burnable, write_burnable, 0x0c, Burnable);\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 21, "score": 171766.8761073599 }, { "content": "fn parse_shop_item(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([0xda])(i)?;\n\n let (i, data) = take(7usize)(i)?;\n\n Ok((\n\n i,\n\n TableEntry::ShopItem([\n\n data[0], data[1], data[2], data[3], data[4], data[5], data[6],\n\n ]),\n\n ))\n\n}\n\ngen_data_write!(write_shop_item, 0xda, ShopItem);\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 23, "score": 167041.1156420394 }, { "content": "fn parse_unknown_e1(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([0xe1])(i)?;\n\n let (i, data) = take(9usize)(i)?;\n\n Ok((\n\n i,\n\n TableEntry::UnknownE1([\n\n data[0], data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8],\n\n ]),\n\n ))\n\n}\n\ngen_data_write!(write_unknown_e1, 0xe1, UnknownE1);\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 24, "score": 167041.1156420394 }, { "content": "fn parse_unknown_f4(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([0xf4])(i)?;\n\n let (i, data) = take(5usize)(i)?;\n\n Ok((\n\n i,\n\n TableEntry::UnknownF4([data[0], data[1], data[2], data[3], data[4]]),\n\n ))\n\n}\n\ngen_data_write!(write_unknown_f4, 0xf4, UnknownF4);\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 25, "score": 167041.1156420394 }, { "content": "pub fn rom_offset_to_pointer(offset: u32) -> [u8; 3] {\n\n let addr = offset + 0x40000;\n\n\n\n [\n\n (addr >> 13) as u8,\n\n (addr & 0xff) as u8,\n\n (((addr >> 8) & 0x1f) as u8) | 0x40,\n\n ]\n\n}\n\n\n", "file_path": "neutopia/src/util.rs", "rank": 26, "score": 160793.77665564773 }, { "content": "fn main() -> Result<(), Error> {\n\n let asm_src_dir = PathBuf::from(\"src\").join(\"asm\");\n\n println!(\"cargo:rerun-if-changed={}\", asm_src_dir.to_string_lossy());\n\n for entry in fs::read_dir(&asm_src_dir)? {\n\n let path = entry?.path();\n\n if path.is_file() {\n\n if let Some(ext) = path.extension() {\n\n if ext == \"asm\" {\n\n println!(\"cargo:rerun-if-changed={}\", &path.to_string_lossy());\n\n handle_asm(&path)?;\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "rando/build.rs", "rank": 27, "score": 148397.61195522855 }, { "content": "fn main() -> Result<(), Error> {\n\n let opt = Opt::from_args();\n\n\n\n let mut f = File::open(&opt.rom)?;\n\n let mut buffer = Vec::new();\n\n f.read_to_end(&mut buffer)?;\n\n\n\n let config = rando::Config {\n\n seed: opt.seed,\n\n ty: opt.ty,\n\n };\n\n\n\n let r = rando::randomize(&config, &buffer)?;\n\n\n\n let filename = &opt\n\n .out\n\n .unwrap_or_else(|| PathBuf::from(format!(\"neutopia-randomizer-{}.pce\", r.seed)));\n\n\n\n let mut f = File::create(filename)?;\n\n f.write_all(&r.data)?;\n\n\n\n println!(\"wrote {}\", filename.display());\n\n\n\n Ok(())\n\n}\n", "file_path": "rando-cli/src/main.rs", "rank": 28, "score": 139975.73777825967 }, { "content": "fn decode_char(c: char) -> Result<u8, Error> {\n\n if !c.is_ascii() {\n\n return Err(format_err!(\"invalid character {}\", c as char));\n\n }\n\n let c = c as u8;\n\n\n\n // 0 - 25\n\n if b'A' <= c && c <= b'Z' {\n\n return Ok(c - b'A');\n\n }\n\n // 26 - 34\n\n if b'1' <= c && c <= b'9' {\n\n return Ok(c - b'1' + 26);\n\n }\n\n // 35 - 60\n\n if b'a' <= c && c <= b'z' {\n\n return Ok(c - b'a' + 35);\n\n }\n\n if c == b'#' {\n\n return Ok(61);\n", "file_path": "neutil/src/password.rs", "rank": 29, "score": 139819.4193326441 }, { "content": "fn get_checks() -> Result<BTreeMap<LocationId, Check>, Error> {\n\n let checks_vec: Vec<Check> = serde_json::from_slice(&CHECKS_DATA)\n\n .map_err(|e| format_err!(\"failed to parse checks JSON: {}\", e))?;\n\n\n\n let mut checks = BTreeMap::new();\n\n for check in checks_vec {\n\n let loc = LocationId {\n\n area: check.area,\n\n room: check.room,\n\n index: check.index,\n\n };\n\n if checks.contains_key(&loc) {\n\n return Err(format_err!(\n\n \"duplicate location {:?} for check {}\",\n\n &loc,\n\n &check.name\n\n ));\n\n }\n\n checks.insert(loc, check);\n\n }\n\n\n\n Ok(checks)\n\n}\n", "file_path": "rando/src/state.rs", "rank": 30, "score": 131990.35740320248 }, { "content": "fn handle_asm(path: &PathBuf) -> Result<(), Error> {\n\n let bass = bass_path();\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let tmp_dir = out_path.join(\"asm_build\");\n\n\n\n let ips = path.with_extension(\"ips\");\n\n let ips = ips.file_name().unwrap();\n\n let ips = out_path.join(\"asm\").join(PathBuf::from(&ips));\n\n\n\n fs::create_dir_all(ips.parent().unwrap()).map_err(|e| {\n\n format_err!(\n\n \"unable to create dir {}: {}\",\n\n ips.parent().unwrap().to_string_lossy(),\n\n e\n\n )\n\n })?;\n\n fs::create_dir_all(&tmp_dir)\n\n .map_err(|e| format_err!(\"unable to create dir {}: {}\", tmp_dir.to_string_lossy(), e))?;\n\n asm_build::build(&bass, 0x60000, &tmp_dir, &[path.clone()], &ips)\n\n .map_err(|e| format_err!(\"asm_build failed: {}\", e))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rando/build.rs", "rank": 31, "score": 126819.61490208472 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct DbEntry {\n\n desc: &'static str,\n\n region: Region,\n\n}\n\n\n\nimpl Default for DbEntry {\n\n fn default() -> Self {\n\n Self {\n\n desc: \"Unrecognized ROM\",\n\n region: Region::Unknown,\n\n }\n\n }\n\n}\n\n\n\nlazy_static! {\n\n static ref KNOWN_ROMS: HashMap<String, DbEntry> = {\n\n let mut roms = HashMap::new();\n\n roms.insert(\n\n \"eb0789088fc70be42b2f994c1b66be21\".to_string(),\n\n DbEntry {\n", "file_path": "neutopia/src/verify.rs", "rank": 32, "score": 125150.92995517763 }, { "content": "fn main() -> Result<(), Error> {\n\n let opt = Opt::from_args();\n\n match &opt {\n\n Opt::Checks(checks_opt) => checks::command(checks_opt),\n\n Opt::Doc(doc_opt) => doc::command(doc_opt),\n\n Opt::Info(info_opt) => info::command(info_opt),\n\n Opt::Password(password_opt) => password::command(password_opt),\n\n }\n\n}\n", "file_path": "neutil/src/main.rs", "rank": 33, "score": 121064.93095615275 }, { "content": "#[wasm_bindgen(start)]\n\npub fn run_app() {\n\n wasm_logger::init(wasm_logger::Config::default());\n\n App::<Model>::new().mount_to_body();\n\n}\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n fn saveAs(blob: Blob, filename: String);\n\n fn saveRom(data: &[u8], filename: String);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_website_mode() {\n\n assert_eq!(parse_semver(\"v0.1.0-beta1\"), WebsiteMode::Beta);\n\n assert_eq!(parse_semver(\"v0.1.0\"), WebsiteMode::Release);\n\n assert_eq!(parse_semver(\"UNKNOWN\"), WebsiteMode::Dev);\n\n }\n\n}\n", "file_path": "rando-web/src/lib.rs", "rank": 34, "score": 102348.33805525023 }, { "content": "fn salt_byte(i: u8) -> u8 {\n\n let table = [\n\n 0x1f, 0x3a, 0x06, 0x3f, 0x21, 0x3f, 0x30, 0x37, 0x1a, 0x01, 0x20, 0x3f, 0x35, 0x03, 0x29,\n\n 0x2b, 0x3e, 0x3f, 0x01, 0x00, 0x03, 0x2c, 0x37, 0x07, 0x3d, 0x11, 0x1e, 0x34, 0x3f, 0x19,\n\n 0x30, 0x28, 0x37, 0x37, 0x3c, 0x0d, 0x1e, 0x31, 0x0c, 0x05, 0x35, 0x11, 0x3f, 0x24, 0x3f,\n\n 0x3b, 0x3f, 0x26, 0x3b, 0x33, 0x3c, 0x39, 0x2e, 0x3e, 0x31, 0x08, 0x38, 0x1f, 0x00, 0x37,\n\n 0x19, 0x24, 0x12, 0x00,\n\n ];\n\n table[(i & 0x3f) as usize]\n\n}\n\n\n", "file_path": "neutil/src/password.rs", "rank": 35, "score": 85920.84107197687 }, { "content": "use std::io::prelude::*;\n\n\n\nuse byteorder::WriteBytesExt;\n\nuse failure::{format_err, Error};\n\nuse nom::{multi::many_m_n, number::complete::le_u8, IResult};\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash, Ord, PartialOrd)]\n\npub struct Chest {\n\n pub item_id: u8,\n\n pub arg: u8,\n\n pub text: u8,\n\n pub unknown: u8,\n\n}\n\n\n\nimpl Chest {\n\n pub fn write(&self, w: &mut impl Write) -> Result<(), Error> {\n\n w.write_u8(self.item_id)?;\n\n w.write_u8(self.arg)?;\n\n w.write_u8(self.text)?;\n\n w.write_u8(self.unknown)?;\n", "file_path": "neutopia/src/rom/chest.rs", "rank": 36, "score": 75808.56055924091 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n #[allow(clippy::useless_format)]\n\n pub fn get_item_name(&self) -> String {\n\n match self.item_id {\n\n 0x00 => format!(\"Bombs x{}\", self.arg),\n\n 0x01 => format!(\"Medicine\"),\n\n 0x02 => format!(\"Fire Wand\"),\n\n 0x03 => format!(\"Sky Bell\"),\n\n 0x04 => format!(\"Wings\"),\n\n 0x05 => format!(\"Moonbeam Moss\"),\n\n 0x06 => format!(\"Magic Ring\"),\n\n 0x07 => format!(\"Placeholder\"),\n\n 0x08 => match self.arg {\n\n 1 => format!(\"Starter Sword\"),\n\n 2 => format!(\"Bronze Sword\"),\n\n 3 => format!(\"Steel Sword\"),\n\n 4 => format!(\"Strongest Sword\"),\n", "file_path": "neutopia/src/rom/chest.rs", "rank": 37, "score": 75791.06961151221 }, { "content": " _ => format!(\"Unknown Sword\"),\n\n },\n\n 0x09 => match self.arg {\n\n 1 => format!(\"Starter Armor\"),\n\n 2 => format!(\"Bronze Armor\"),\n\n 3 => format!(\"Steel Armor\"),\n\n 4 => format!(\"Strongest Armor\"),\n\n _ => format!(\"Unknown Armor\"),\n\n },\n\n 0x0a => match self.arg {\n\n 1 => format!(\"Starter Shield\"),\n\n 2 => format!(\"Bronze Shield\"),\n\n 3 => format!(\"Steel Shield\"),\n\n 4 => format!(\"Strongest Shield\"),\n\n _ => format!(\"Unknown Shield\"),\n\n },\n\n 0x0b => format!(\"Falcon Shoes\"),\n\n 0x0c => format!(\"Rainbow Drop\"),\n\n 0x0d => format!(\"Book of Revival\"),\n\n 0x0e => format!(\"Placeholder\"),\n", "file_path": "neutopia/src/rom/chest.rs", "rank": 38, "score": 75786.7958790983 }, { "content": " 0x0f => format!(\"Placeholder\"),\n\n 0x10 => format!(\"Crystal Ball\"),\n\n 0x11 => format!(\"Crypt Key\"),\n\n 0x12 => format!(\"Crypt 1 Medallion\"),\n\n 0x13 => format!(\"Crypt 2 Medallion\"),\n\n 0x14 => format!(\"Crypt 3 Medallion\"),\n\n 0x15 => format!(\"Crypt 4 Medallion\"),\n\n 0x16 => format!(\"Crypt 5 Medallion\"),\n\n 0x17 => format!(\"Crypt 6 Medallion\"),\n\n 0x18 => format!(\"Crypt 7 Medallion\"),\n\n 0x19 => format!(\"Crypt 8 Medallion\"),\n\n 0x20 => format!(\"Placeholder\"),\n\n _ => format!(\"Unknown\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "neutopia/src/rom/chest.rs", "rank": 39, "score": 75786.7958790983 }, { "content": " pub enemy_table: Vec<u8>,\n\n pub object_table: Vec<u8>,\n\n}\n\n\n\npub struct NeutopiaRom {\n\n pub area_pointers: Vec<u32>,\n\n pub room_order_pointers: Vec<u32>,\n\n pub chest_table_pointers: Vec<u32>,\n\n\n\n pub room_order_tables: HashMap<u32, Vec<u8>>,\n\n pub room_info_tables: Vec<HashMap<u8, Room>>,\n\n pub chest_tables: HashMap<u32, Vec<Chest>>,\n\n\n\n pub room_info_intervals: HashMap<u8, IntervalStore<usize>>,\n\n}\n\n\n\nimpl NeutopiaRom {\n\n pub fn new(data: &[u8]) -> Result<NeutopiaRom, Error> {\n\n let area_pointers =\n\n util::decode_pointer_table(&data[rommap::AREA_TABLE..], rommap::AREA_TABLE_COUNT)?;\n", "file_path": "neutopia/src/rom/mod.rs", "rank": 40, "score": 75781.32547678387 }, { "content": "use std::collections::HashMap;\n\n\n\nuse failure::{format_err, Error};\n\n\n\nuse super::{interval::IntervalStore, rommap, util};\n\n\n\nmod chest;\n\npub mod object;\n\npub use chest::Chest;\n\npub use object::ObjectInfo;\n\n\n\n#[derive(Debug)]\n\npub struct Room {\n\n pub base_addr: u32,\n\n\n\n pub warp_table_pointer: u32,\n\n pub enemy_table_pointer: u32,\n\n pub object_table_pointer: u32,\n\n\n\n pub warp_table: Vec<u8>,\n", "file_path": "neutopia/src/rom/mod.rs", "rank": 41, "score": 75779.080250152 }, { "content": " let room_order_pointers = util::decode_pointer_table(\n\n &data[rommap::ROOM_ORDER_TABLE..],\n\n rommap::ROOM_ORDER_TABLE_COUNT,\n\n )?;\n\n let chest_table_pointers =\n\n util::decode_pointer_table(&data[rommap::CHEST_TABLE..], rommap::CHEST_TABLE_COUNT)?;\n\n\n\n let mut room_info_tables = Vec::new();\n\n let mut room_order_tables = HashMap::new();\n\n let mut chest_tables = HashMap::new();\n\n let mut room_info_intervals = HashMap::new();\n\n\n\n for (area_idx, area_ptr) in area_pointers.iter().enumerate() {\n\n let mut room_data_intervals: IntervalStore<usize> = IntervalStore::new();\n\n room_data_intervals.add(*area_ptr as usize, *area_ptr as usize + 0x40 * 3);\n\n let mut area_info = HashMap::new();\n\n for idx in 0..0x40 {\n\n let offset = (*area_ptr as usize) + (idx as usize) * 3;\n\n let offset = util::pointer_to_rom_offset(&data[offset..]).map_err(|e| {\n\n format_err!(\n", "file_path": "neutopia/src/rom/mod.rs", "rank": 42, "score": 75775.40662199665 }, { "content": " object_table_pointer: object_table_pointer as u32,\n\n warp_table,\n\n enemy_table,\n\n object_table,\n\n },\n\n );\n\n }\n\n room_info_tables.push(area_info);\n\n room_info_intervals.insert(area_idx as u8, room_data_intervals);\n\n }\n\n\n\n for room_order_ptr in &room_order_pointers {\n\n let offset = *room_order_ptr as usize;\n\n let table = Vec::from(&data[offset..offset + 0x40]);\n\n\n\n room_order_tables.insert(*room_order_ptr, table);\n\n }\n\n\n\n for chest_table_ptr in &chest_table_pointers {\n\n let table = chest::parse_chest_table(&data[*chest_table_ptr as usize..])?;\n", "file_path": "neutopia/src/rom/mod.rs", "rank": 43, "score": 75771.10057676598 }, { "content": " chest_tables.insert(*chest_table_ptr, table);\n\n }\n\n\n\n Ok(NeutopiaRom {\n\n area_pointers,\n\n room_order_pointers,\n\n chest_table_pointers,\n\n room_order_tables,\n\n room_info_tables,\n\n chest_tables,\n\n room_info_intervals,\n\n })\n\n }\n\n}\n", "file_path": "neutopia/src/rom/mod.rs", "rank": 44, "score": 75769.27094474 }, { "content": "\n\n let warp_table = Vec::from(&data[warp_table_pointer..enemy_table_pointer]);\n\n let enemy_table = util::read_object_table(&data[enemy_table_pointer..]);\n\n // Todo, clean this up once everything parses.\n\n let len = object::object_table_len(&data[object_table_pointer..])?;\n\n let object_table = data[object_table_pointer..object_table_pointer + len].to_vec();\n\n\n\n room_data_intervals.add(warp_table_pointer, warp_table_pointer + warp_table.len());\n\n room_data_intervals.add(\n\n enemy_table_pointer,\n\n enemy_table_pointer + enemy_table.len() + 1,\n\n );\n\n room_data_intervals.add(object_table_pointer, object_table_pointer + len + 1);\n\n\n\n area_info.insert(\n\n idx as u8,\n\n Room {\n\n base_addr: offset as u32,\n\n warp_table_pointer: warp_table_pointer as u32,\n\n enemy_table_pointer: enemy_table_pointer as u32,\n", "file_path": "neutopia/src/rom/mod.rs", "rank": 45, "score": 75768.72943507392 }, { "content": " \"can't decode room pointer {:02x}:{:02x}: {}\",\n\n area_idx,\n\n idx,\n\n e\n\n )\n\n })? as usize;\n\n\n\n let ptrs = util::decode_pointer_table(&data[offset..], 3).map_err(|e| {\n\n format_err!(\n\n \"can't decode room table pointers {:02x}:{:02x}: {}\",\n\n area_idx,\n\n idx,\n\n e\n\n )\n\n })?;\n\n let warp_table_pointer = ptrs[0] as usize;\n\n let enemy_table_pointer = ptrs[1] as usize;\n\n let object_table_pointer = ptrs[2] as usize;\n\n\n\n room_data_intervals.add(offset, offset + 3 * 3);\n", "file_path": "neutopia/src/rom/mod.rs", "rank": 46, "score": 75766.95006541639 }, { "content": "#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"basic\")]\n\nstruct Opt {\n\n #[structopt(long, parse(from_os_str), default_value = \"Neutopia (USA).pce\")]\n\n rom: PathBuf,\n\n\n\n #[structopt(long, parse(from_os_str))]\n\n out: Option<PathBuf>,\n\n\n\n #[structopt(long)]\n\n seed: Option<String>,\n\n\n\n #[structopt(long = \"type\", default_value = \"local\")]\n\n ty: RandoType,\n\n}\n\n\n", "file_path": "rando-cli/src/main.rs", "rank": 47, "score": 75136.5877279537 }, { "content": "struct Model {\n\n link: ComponentLink<Self>,\n\n\n\n reader: ReaderService,\n\n tasks: Vec<ReaderTask>,\n\n\n\n verified_str: String,\n\n}\n\n\n", "file_path": "rando-web/src/lib.rs", "rank": 48, "score": 75130.54945881489 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"0x{:02x} @ ({},{})\", self.id, self.x, self.y)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum TableEntry {\n\n Object(ObjectInfo),\n\n OpenDoor(u8),\n\n PushBlockGatedDoor(u8),\n\n EnemyGatedDoor(u8),\n\n BombableDoor(u8),\n\n PushBlockGatedObject(ObjectInfo),\n\n EnemyGatedObject(ObjectInfo),\n\n BellGatedObject(ObjectInfo),\n\n DarkRoom,\n\n BossDoor(u8),\n\n Unknown0b([u8; 3]),\n\n Burnable(ObjectInfo),\n\n HiddenRoom([u8; 3]),\n", "file_path": "neutopia/src/rom/object.rs", "rank": 49, "score": 74757.35264022498 }, { "content": " fn $parse_func_name(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([$tag])(i)?;\n\n let (i, info) = parse_object_info(i)?;\n\n\n\n Ok((i, TableEntry::$ty(info)))\n\n }\n\n\n\n fn $write_func_name(w: &mut impl Write, o: &TableEntry) -> Result<(), Error> {\n\n w.write_u8($tag)?;\n\n if let TableEntry::$ty(info) = o {\n\n info.write(w)?;\n\n } else {\n\n panic!(\"internal error\");\n\n }\n\n\n\n Ok(())\n\n }\n\n };\n\n}\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 50, "score": 74756.99162312929 }, { "content": "use std::fmt;\n\nuse std::io::prelude::*;\n\n\n\nuse byteorder::WriteBytesExt;\n\nuse failure::{format_err, Error};\n\nuse nom::{\n\n branch::alt,\n\n bytes::complete::{tag, take},\n\n multi::many0,\n\n IResult,\n\n};\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ObjectInfo {\n\n pub x: u8,\n\n pub y: u8,\n\n pub id: u8,\n\n}\n\n\n\nimpl fmt::Display for ObjectInfo {\n", "file_path": "neutopia/src/rom/object.rs", "rank": 51, "score": 74756.81671136992 }, { "content": " FalconBootsNeeded,\n\n Npc([u8; 5]),\n\n OuchRope(ObjectInfo),\n\n ArrowLauncher(ObjectInfo),\n\n Swords(ObjectInfo),\n\n GhostSpawner(ObjectInfo),\n\n FireballSpawner(ObjectInfo),\n\n ShopItem([u8; 7]),\n\n UnknownE1([u8; 9]),\n\n UnknownF4([u8; 5]),\n\n}\n\n\n\nimpl TableEntry {\n\n pub fn write(&self, w: &mut impl Write) -> Result<(), Error> {\n\n match self {\n\n Self::Object(_) => write_object(w, self)?,\n\n Self::OpenDoor(_) => write_open_door(w, self)?,\n\n Self::PushBlockGatedDoor(_) => write_push_block_gated_door(w, self)?,\n\n Self::EnemyGatedDoor(_) => write_enemy_gated_door(w, self)?,\n\n Self::BombableDoor(_) => write_bombable_door(w, self)?,\n", "file_path": "neutopia/src/rom/object.rs", "rank": 52, "score": 74756.49450201482 }, { "content": " }\n\n\n\n pub fn chest_id(&self) -> Option<u8> {\n\n if let Self::Object(o) = self {\n\n if 0x4c <= o.id && o.id <= (0x4c + 8) {\n\n return Some(o.id - 0x4c);\n\n }\n\n }\n\n None\n\n }\n\n\n\n pub fn is_conditional(&self) -> bool {\n\n match self {\n\n Self::Unknown0b(_) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn loc(&self) -> Option<(u8, u8)> {\n\n match self {\n", "file_path": "neutopia/src/rom/object.rs", "rank": 53, "score": 74756.14948633833 }, { "content": " Self::Object(o) => Some((o.x, o.y)),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for TableEntry {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::Object(info) => write!(f, \"object {}\", info),\n\n Self::OpenDoor(data) => write!(f, \"open door 0x{:02x}\", data),\n\n Self::PushBlockGatedDoor(data) => write!(f, \"push block gated door 0x{:02x}\", data),\n\n Self::EnemyGatedDoor(data) => write!(f, \"enemy gated door 0x{:02x}\", data),\n\n Self::BombableDoor(data) => write!(f, \"bombable door 0x{:02x}\", data),\n\n Self::PushBlockGatedObject(info) => write!(f, \"push block gated object {}\", info),\n\n Self::EnemyGatedObject(info) => write!(f, \"enemy gated object {}\", info),\n\n Self::BellGatedObject(info) => write!(f, \"bell gated object {}\", info),\n\n Self::DarkRoom => write!(f, \"dark room\"),\n\n Self::BossDoor(data) => write!(f, \"boss door 0x{:02x}\", data),\n\n Self::Unknown0b(data) => write!(f, \"unknown object 0x0b {:x?}\", data),\n", "file_path": "neutopia/src/rom/object.rs", "rank": 54, "score": 74755.74567390967 }, { "content": "macro_rules! gen_u8_type {\n\n ($parse_func_name: ident, $write_func_name: ident, $tag: literal, $ty: ident) => {\n\n fn $parse_func_name(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([$tag])(i)?;\n\n let (i, data) = take(1usize)(i)?;\n\n\n\n Ok((i, TableEntry::$ty(data[0])))\n\n }\n\n\n\n fn $write_func_name(w: &mut impl Write, o: &TableEntry) -> Result<(), Error> {\n\n w.write_u8($tag)?;\n\n if let TableEntry::$ty(data) = o {\n\n w.write_u8(*data)?;\n\n } else {\n\n panic!(\"internal error\");\n\n }\n\n\n\n Ok(())\n\n }\n\n };\n", "file_path": "neutopia/src/rom/object.rs", "rank": 55, "score": 74754.77649291459 }, { "content": "}\n\n\n\nmacro_rules! gen_simple_type {\n\n ($parse_func_name: ident, $write_func_name: ident, $tag: literal, $ty: ident) => {\n\n fn $parse_func_name(i: &[u8]) -> IResult<&[u8], TableEntry> {\n\n let (i, _) = tag([$tag])(i)?;\n\n\n\n Ok((i, TableEntry::$ty))\n\n }\n\n\n\n fn $write_func_name(w: &mut impl Write, _o: &TableEntry) -> Result<(), Error> {\n\n Ok(w.write_u8($tag)?)\n\n }\n\n };\n\n}\n\nmacro_rules! gen_data_write {\n\n ($func_name: ident, $tag: literal, $ty: ident) => {\n\n fn $func_name(w: &mut impl Write, o: &TableEntry) -> Result<(), Error> {\n\n w.write_u8($tag)?;\n\n if let TableEntry::$ty(data) = o {\n", "file_path": "neutopia/src/rom/object.rs", "rank": 56, "score": 74753.06994937967 }, { "content": " assert_eq!(&data[..], &enc_data[..]);\n\n\n\n assert_eq!(parse_object_table_entry(data), Ok((&[][..], entry)));\n\n }\n\n\n\n #[test]\n\n fn test_parse_entries() {\n\n run_parse_test(\n\n &[0x00, 0x52, 0xa5],\n\n TableEntry::Object(ObjectInfo {\n\n x: 2,\n\n y: 5,\n\n id: 0xa5,\n\n }),\n\n );\n\n\n\n run_parse_test(&[0x01, 0x02], TableEntry::OpenDoor(0x02));\n\n\n\n run_parse_test(&[0x02, 0x01], TableEntry::PushBlockGatedDoor(0x01));\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 57, "score": 74748.41210073886 }, { "content": " run_parse_test(&[0x03, 0x08], TableEntry::EnemyGatedDoor(0x08));\n\n\n\n run_parse_test(&[0x05, 0x0a], TableEntry::BombableDoor(0x0a));\n\n\n\n run_parse_test(\n\n &[0x06, 0x25, 0x5a],\n\n TableEntry::PushBlockGatedObject(ObjectInfo {\n\n x: 5,\n\n y: 2,\n\n id: 0x5a,\n\n }),\n\n );\n\n\n\n run_parse_test(\n\n &[0x07, 0x25, 0x5a],\n\n TableEntry::EnemyGatedObject(ObjectInfo {\n\n x: 5,\n\n y: 2,\n\n id: 0x5a,\n\n }),\n", "file_path": "neutopia/src/rom/object.rs", "rank": 58, "score": 74747.99962341308 }, { "content": " w.write_all(data)?;\n\n } else {\n\n panic!(\"internal error\");\n\n }\n\n Ok(())\n\n }\n\n };\n\n}\n\n\n\ngen_object_type!(parse_object, write_object, 0x00, Object);\n\ngen_u8_type!(parse_open_door, write_open_door, 0x01, OpenDoor);\n\ngen_u8_type!(\n\n parse_push_block_gated_door,\n\n write_push_block_gated_door,\n\n 0x02,\n\n PushBlockGatedDoor\n\n);\n\ngen_u8_type!(\n\n parse_enemy_gated_door,\n\n write_enemy_gated_door,\n", "file_path": "neutopia/src/rom/object.rs", "rank": 59, "score": 74747.36133602435 }, { "content": " Self::Burnable(info) => write!(f, \"burnable {}\", info),\n\n Self::HiddenRoom(data) => write!(f, \"hidden room {:x?}\", data),\n\n Self::FalconBootsNeeded => write!(f, \"falcon boots needed\"),\n\n Self::Npc(data) => write!(f, \"npc {:x?}\", data),\n\n Self::OuchRope(info) => write!(f, \"ouch rope segment {}\", info),\n\n Self::ArrowLauncher(info) => write!(f, \"arrow launcher {}\", info),\n\n Self::Swords(info) => write!(f, \"swords {}\", info),\n\n Self::GhostSpawner(info) => write!(f, \"ghost spawner {}\", info),\n\n Self::FireballSpawner(info) => write!(f, \"fireball spawner {}\", info),\n\n Self::ShopItem(data) => write!(f, \"shop item {:x?}\", data),\n\n Self::UnknownE1(data) => write!(f, \"unknown object 0xe1 {:x?}\", data),\n\n Self::UnknownF4(data) => write!(f, \"unknown object 0xf4 {:x?}\", data),\n\n }\n\n }\n\n}\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 60, "score": 74747.29135635986 }, { "content": " &[0xc0, 0x25, 0x5a],\n\n TableEntry::Swords(ObjectInfo {\n\n x: 5,\n\n y: 2,\n\n id: 0x5a,\n\n }),\n\n );\n\n\n\n run_parse_test(\n\n &[0xc1, 0x25, 0x5a],\n\n TableEntry::GhostSpawner(ObjectInfo {\n\n x: 5,\n\n y: 2,\n\n id: 0x5a,\n\n }),\n\n );\n\n\n\n run_parse_test(\n\n &[0xc6, 0x25, 0x5a],\n\n TableEntry::FireballSpawner(ObjectInfo {\n", "file_path": "neutopia/src/rom/object.rs", "rank": 61, "score": 74746.75815118392 }, { "content": " Self::PushBlockGatedObject(_) => write_push_block_gated_object(w, self)?,\n\n Self::EnemyGatedObject(_) => write_enemy_gated_object(w, self)?,\n\n Self::BellGatedObject(_) => write_bell_gated_object(w, self)?,\n\n Self::DarkRoom => write_dark_room(w, self)?,\n\n Self::BossDoor(_) => write_boss_door(w, self)?,\n\n Self::Unknown0b(_) => write_unknown_0b(w, self)?,\n\n Self::Burnable(_) => write_burnable(w, self)?,\n\n Self::HiddenRoom(_) => write_hidden_room(w, self)?,\n\n Self::FalconBootsNeeded => write_falcon_boots_needed(w, self)?,\n\n Self::Npc(_) => write_npc(w, self)?,\n\n Self::OuchRope(_) => write_ouch_rope(w, self)?,\n\n Self::ArrowLauncher(_) => write_arrow_launcher(w, self)?,\n\n Self::Swords(_) => write_swords(w, self)?,\n\n Self::GhostSpawner(_) => write_ghost_spawner(w, self)?,\n\n Self::FireballSpawner(_) => write_fireball_spawner(w, self)?,\n\n Self::ShopItem(_) => write_shop_item(w, self)?,\n\n Self::UnknownE1(_) => write_unknown_e1(w, self)?,\n\n Self::UnknownF4(_) => write_unknown_f4(w, self)?,\n\n }\n\n Ok(())\n", "file_path": "neutopia/src/rom/object.rs", "rank": 62, "score": 74746.46567714383 }, { "content": "\n\n run_parse_test(\n\n &[0xbd, 0x25, 0x5a],\n\n TableEntry::OuchRope(ObjectInfo {\n\n x: 5,\n\n y: 2,\n\n id: 0x5a,\n\n }),\n\n );\n\n\n\n run_parse_test(\n\n &[0xbf, 0x25, 0x5a],\n\n TableEntry::ArrowLauncher(ObjectInfo {\n\n x: 5,\n\n y: 2,\n\n id: 0x5a,\n\n }),\n\n );\n\n\n\n run_parse_test(\n", "file_path": "neutopia/src/rom/object.rs", "rank": 63, "score": 74745.99570003727 }, { "content": " );\n\n\n\n run_parse_test(\n\n &[0x08, 0x25, 0x5a],\n\n TableEntry::BellGatedObject(ObjectInfo {\n\n x: 5,\n\n y: 2,\n\n id: 0x5a,\n\n }),\n\n );\n\n\n\n run_parse_test(&[0x09], TableEntry::DarkRoom);\n\n\n\n run_parse_test(&[0x0a, 0x50], TableEntry::BossDoor(0x50));\n\n\n\n run_parse_test(\n\n &[0x0b, 0x46, 0x2a, 0x04],\n\n TableEntry::Unknown0b([0x46, 0x2a, 0x04]),\n\n );\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 64, "score": 74745.45704863807 }, { "content": " 0x03,\n\n EnemyGatedDoor\n\n);\n\ngen_u8_type!(parse_bombable_door, write_bombable_door, 0x05, BombableDoor);\n\ngen_object_type!(\n\n parse_push_block_gated_object,\n\n write_push_block_gated_object,\n\n 0x06,\n\n PushBlockGatedObject\n\n);\n\ngen_object_type!(\n\n parse_enemy_gated_object,\n\n write_enemy_gated_object,\n\n 0x07,\n\n EnemyGatedObject\n\n);\n\ngen_object_type!(\n\n parse_bell_gated_object,\n\n write_bell_gated_object,\n\n 0x08,\n\n BellGatedObject\n\n);\n\ngen_simple_type!(parse_dark_room, write_dark_room, 0x09, DarkRoom);\n\ngen_u8_type!(parse_boss_door, write_boss_door, 0x0a, BossDoor);\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 65, "score": 74745.24304382988 }, { "content": " run_parse_test(\n\n &[0x0c, 0x52, 0xa5],\n\n TableEntry::Burnable(ObjectInfo {\n\n x: 2,\n\n y: 5,\n\n id: 0xa5,\n\n }),\n\n );\n\n\n\n run_parse_test(\n\n &[0x0d, 0x14, 0x14, 0x33],\n\n TableEntry::HiddenRoom([0x14, 0x14, 0x33]),\n\n );\n\n\n\n run_parse_test(&[0x81], TableEntry::FalconBootsNeeded);\n\n\n\n run_parse_test(\n\n &[0x9a, 0x48, 0x02, 0x03, 0x00, 0x40],\n\n TableEntry::Npc([0x48, 0x02, 0x03, 0x00, 0x40]),\n\n );\n", "file_path": "neutopia/src/rom/object.rs", "rank": 66, "score": 74744.09166111259 }, { "content": "\n\n assert_eq!(\n\n parse_object_table(&[0x01, 0x02, 0x02, 0x01]).unwrap(),\n\n vec![\n\n TableEntry::OpenDoor(0x02),\n\n TableEntry::PushBlockGatedDoor(0x01)\n\n ]\n\n );\n\n }\n\n}\n", "file_path": "neutopia/src/rom/object.rs", "rank": 67, "score": 74740.33441827938 }, { "content": " x: 5,\n\n y: 2,\n\n id: 0x5a,\n\n }),\n\n );\n\n\n\n run_parse_test(\n\n &[0xda, 0x46, 0x00, 0x00, 0x02, 0x00, 0x01, 0x01],\n\n TableEntry::ShopItem([0x46, 0x00, 0x00, 0x02, 0x00, 0x01, 0x01]),\n\n );\n\n\n\n run_parse_test(\n\n &[0xe1, 0x48, 0x02, 0x00, 0x7d, 0x41, 0x56, 0x2e, 0x81, 0x01],\n\n TableEntry::UnknownE1([0x48, 0x02, 0x00, 0x7d, 0x41, 0x56, 0x2e, 0x81, 0x01]),\n\n );\n\n\n\n run_parse_test(\n\n &[0xf4, 0xa7, 0x02, 0x03, 0x40, 0x43],\n\n TableEntry::UnknownF4([0xa7, 0x02, 0x03, 0x40, 0x43]),\n\n );\n", "file_path": "neutopia/src/rom/object.rs", "rank": 68, "score": 74738.884407551 }, { "content": " )),\n\n alt((\n\n parse_ouch_rope,\n\n parse_arrow_launcher,\n\n parse_swords,\n\n parse_ghost_spawner,\n\n parse_fireball_spawner,\n\n parse_shop_item,\n\n parse_unknown_e1,\n\n parse_unknown_f4,\n\n )),\n\n ))(i)\n\n}\n\n\n", "file_path": "neutopia/src/rom/object.rs", "rank": 69, "score": 74735.10884012001 }, { "content": "fn main() {\n\n // Generate version, build date, and sha.\n\n let mut flags = ConstantsFlags::all();\n\n flags.toggle(ConstantsFlags::SEMVER_FROM_CARGO_PKG);\n\n generate_cargo_keys(flags).expect(\"Unable to generate the cargo keys!\");\n\n}\n", "file_path": "rando-web/build.rs", "rank": 70, "score": 64145.93450146357 }, { "content": "fn bass_path() -> PathBuf {\n\n PathBuf::from(\"../build/bin\")\n\n .join(os_type())\n\n .join(format!(\"bass{}\", exe_suffix()))\n\n}\n\n\n", "file_path": "rando/build.rs", "rank": 71, "score": 58622.3336503935 }, { "content": "#[cfg(not(target_os = \"windows\"))]\n\nfn exe_suffix() -> &'static str {\n\n \"\"\n\n}\n\n\n", "file_path": "rando/build.rs", "rank": 72, "score": 57760.56429231556 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn os_type() -> &'static str {\n\n \"linux\"\n\n}\n\n\n", "file_path": "rando/build.rs", "rank": 73, "score": 57760.56429231556 }, { "content": "pub const AREA_TABLE: usize = 0x50000;\n\npub const AREA_TABLE_COUNT: usize = 17;\n\n\n\npub const ROOM_ORDER_TABLE: usize = 0x50033;\n\npub const ROOM_ORDER_TABLE_COUNT: usize = 17;\n\n\n\npub const CHEST_TABLE: usize = 0x5041e;\n\npub const CHEST_TABLE_COUNT: usize = 16;\n", "file_path": "neutopia/src/rommap.rs", "rank": 74, "score": 53427.049287788126 }, { "content": "use std::collections::HashMap;\n\n\n\nuse failure::{format_err, Error};\n\nuse lazy_static::lazy_static;\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum Region {\n\n NA,\n\n JP,\n\n Unknown,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct RomInfo {\n\n pub headered: bool,\n\n pub md5_hash: String,\n\n pub known: bool,\n\n pub desc: String,\n\n pub region: Region,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n", "file_path": "neutopia/src/verify.rs", "rank": 75, "score": 53267.126869897234 }, { "content": " desc: \"Neutopia (U)\",\n\n region: Region::NA,\n\n },\n\n );\n\n roms.insert(\n\n \"08ae173878d8a3783fa35e80c99a5dc4\".to_string(),\n\n DbEntry {\n\n desc: \"Neutopia (J)\",\n\n region: Region::JP,\n\n },\n\n );\n\n\n\n roms\n\n };\n\n}\n\n\n", "file_path": "neutopia/src/verify.rs", "rank": 76, "score": 53259.978923960414 }, { "content": "use failure::{format_err, Error};\n\n\n", "file_path": "neutopia/src/util.rs", "rank": 77, "score": 53257.521229712875 }, { "content": "pub struct IntervalStore<T: Ord + Copy + Debug> {\n\n intervals: Vec<Interval<T>>,\n\n}\n\n\n\nimpl<T: Ord + Copy + Debug> Default for IntervalStore<T> {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl<T: Ord + Copy + Debug> IntervalStore<T> {\n\n /// Generate a new empty IntervalStore.\n\n pub fn new() -> Self {\n\n Self {\n\n intervals: Vec::new(),\n\n }\n\n }\n\n\n\n /// Add an interval to the store.\n\n pub fn add(&mut self, start: T, end: T) {\n", "file_path": "neutopia/src/interval.rs", "rank": 78, "score": 53189.163580989676 }, { "content": " /// Returns true if `self` and `other` can be combined.\n\n ///\n\n /// This is different that testing for overlapping in that two intervals\n\n /// that are adjacent are allowed to merge.\n\n pub fn can_merge(&self, other: &Self) -> bool {\n\n (self.start <= other.start && other.start <= self.end)\n\n || (other.start <= self.start && self.start <= other.end)\n\n }\n\n\n\n /// Merge `other` into this interval\n\n ///\n\n /// Panics if the intervals can't merge.\n\n pub fn merge(&mut self, other: &Self) {\n\n assert!(self.can_merge(other));\n\n self.start = min(self.start, other.start);\n\n self.end = max(self.end, other.end);\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "neutopia/src/interval.rs", "rank": 79, "score": 53188.02394958441 }, { "content": " }\n\n }\n\n\n\n /// Return a owned, sorted Vec of intervals in the store.\n\n pub fn get_intervals(&self) -> Vec<Interval<T>> {\n\n let mut intervals = self.intervals.clone();\n\n intervals.sort();\n\n intervals\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n pub fn no_overlap() {\n\n let mut store = IntervalStore::new();\n\n store.add(0u32, 2);\n\n store.add(3, 5);\n", "file_path": "neutopia/src/interval.rs", "rank": 80, "score": 53187.72006572164 }, { "content": "//! A data structure for accounting intervals.\n\n//!\n\n//! This is implemented with a brute force approach that traverses every\n\n//! interval on each add. A better approach would be to use an interval\n\n//! tree.\n\n\n\nuse std::cmp::{max, min};\n\nuse std::fmt::Debug;\n\n\n\n/// An interval from [`start`, `end`)\n\n#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct Interval<T: Ord + Copy + Debug> {\n\n /// The start of the interval (inclusive).\n\n pub start: T,\n\n\n\n /// The end of the interval (exclusive).\n\n pub end: T,\n\n}\n\n\n\nimpl<T: Ord + Copy + Debug> Interval<T> {\n", "file_path": "neutopia/src/interval.rs", "rank": 81, "score": 53187.68933935658 }, { "content": " store.add(6, 8);\n\n let mut intervals = store.intervals;\n\n intervals.sort();\n\n assert_eq!(\n\n intervals,\n\n vec![\n\n Interval { start: 0, end: 2 },\n\n Interval { start: 3, end: 5 },\n\n Interval { start: 6, end: 8 },\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n pub fn adjacent_overlap() {\n\n let mut store = IntervalStore::new();\n\n store.add(0u32, 2);\n\n store.add(4, 6);\n\n store.add(2, 4);\n\n let mut intervals = store.intervals;\n", "file_path": "neutopia/src/interval.rs", "rank": 82, "score": 53181.92212537995 }, { "content": " intervals.sort();\n\n assert_eq!(intervals, vec![Interval { start: 0, end: 6 }]);\n\n }\n\n\n\n #[test]\n\n pub fn full_overlap() {\n\n let mut store = IntervalStore::new();\n\n store.add(0u32, 2);\n\n store.add(4, 6);\n\n store.add(1, 5);\n\n let mut intervals = store.intervals;\n\n intervals.sort();\n\n assert_eq!(intervals, vec![Interval { start: 0, end: 6 }]);\n\n }\n\n}\n", "file_path": "neutopia/src/interval.rs", "rank": 83, "score": 53181.566609377835 }, { "content": " let mut new_interval = Interval { start, end };\n\n let mut first_match = None;\n\n let mut i = 0;\n\n while i != self.intervals.len() {\n\n let interval = self.intervals[i];\n\n if first_match.is_none() && interval.can_merge(&new_interval) {\n\n self.intervals[i].merge(&new_interval);\n\n new_interval = self.intervals[i];\n\n first_match = Some(i);\n\n i += 1;\n\n } else if first_match.is_some() && interval.can_merge(&new_interval) {\n\n let match_idx = first_match.unwrap();\n\n self.intervals[match_idx].merge(&interval);\n\n self.intervals.remove(i);\n\n } else {\n\n i += 1;\n\n }\n\n }\n\n if first_match.is_none() {\n\n self.intervals.push(new_interval)\n", "file_path": "neutopia/src/interval.rs", "rank": 84, "score": 53179.97416480167 }, { "content": "fn get_website_mode() -> WebsiteMode {\n\n parse_semver(env!(\"VERGEN_SEMVER_LIGHTWEIGHT\"))\n\n}\n\n\n", "file_path": "rando-web/src/lib.rs", "rank": 85, "score": 53171.9140121229 }, { "content": "use yew::{html, Component, ComponentLink, Html, ShouldRender};\n\n\n\npub struct Info {\n\n _link: ComponentLink<Self>,\n\n}\n\n\n\nimpl Component for Info {\n\n type Message = ();\n\n type Properties = ();\n\n fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n Info { _link: link }\n\n }\n\n\n\n fn update(&mut self, _msg: Self::Message) -> ShouldRender {\n\n false\n\n }\n\n\n\n fn change(&mut self, _msg: Self::Message) -> ShouldRender {\n\n false\n\n }\n", "file_path": "rando-web/src/info.rs", "rank": 86, "score": 52344.3567455655 }, { "content": "\n\n fn view(&self) -> Html {\n\n html! {\n\n <nav class=\"panel is-primary\">\n\n <p class=\"panel-heading\">\n\n {\"Information\"}\n\n </p>\n\n <div class=\"panel-block\">\n\n <p>\n\n {\"Neutopia randomizer is in very early development. Currently the only thing that is randomized is chest contents. Care has been taken to avoid un-completable seeds. If you find a bug, please feel free to file and issue on our \"}\n\n <a href=\"https://github.com/konkers/neutopia/issues\">{\"tracker\"}</a>{\".\"}\n\n </p>\n\n </div>\n\n </nav>\n\n\n\n }\n\n }\n\n}\n", "file_path": "rando-web/src/info.rs", "rank": 87, "score": 52334.868549510116 }, { "content": "fn parse_semver(semver: &str) -> WebsiteMode {\n\n let re = Regex::new(r\"^(v\\d+\\.\\d+\\.\\d+)(-.*)?$\").unwrap();\n\n let captures = re.captures(semver);\n\n match captures {\n\n Some(c) => {\n\n if let Some(_) = c.get(2) {\n\n WebsiteMode::Beta\n\n } else {\n\n WebsiteMode::Release\n\n }\n\n }\n\n None => WebsiteMode::Dev,\n\n }\n\n}\n\n\n", "file_path": "rando-web/src/lib.rs", "rank": 88, "score": 50018.329858194775 }, { "content": "use std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::path::PathBuf;\n\n\n\nuse failure::Error;\n\nuse structopt::StructOpt;\n\n\n\nuse neutopia::verify;\n\n\n\n#[derive(StructOpt, Debug)]\n\npub(crate) struct InfoOpt {\n\n #[structopt(long, parse(from_os_str), default_value = \"neutopia-jp.pce\")]\n\n rom: PathBuf,\n\n}\n\n\n\npub(crate) fn command(opt: &InfoOpt) -> Result<(), Error> {\n\n let mut f = File::open(&opt.rom)?;\n\n let mut buffer = Vec::new();\n\n // read the whole file\n\n f.read_to_end(&mut buffer)?;\n", "file_path": "neutil/src/info.rs", "rank": 89, "score": 28570.493466096013 }, { "content": "\n\n let info = verify(&buffer)?;\n\n\n\n println!(\"Info for {}:\", &opt.rom.display());\n\n println!(\" Headered: {}\", info.headered);\n\n println!(\" MD5 hash: {}\", info.md5_hash);\n\n println!(\" Description: {}\", info.desc);\n\n println!(\" Region: {:?}\", info.region);\n\n\n\n Ok(())\n\n}\n", "file_path": "neutil/src/info.rs", "rank": 90, "score": 28558.837179292277 }, { "content": "use std::env;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n\nuse failure::{format_err, Error};\n\n\n\n#[cfg(target_os = \"macos\")]\n", "file_path": "rando/build.rs", "rank": 91, "score": 27918.66713131363 }, { "content": "}\n\n\n\nimpl State {\n\n pub fn new(n: Neutopia) -> Result<Self, Error> {\n\n let mut unplaced_items = BTreeSet::new();\n\n\n\n // Filter out end game area and medallions\n\n let chests = n.filter_chests(|chest| (chest.area < 0x10) && (chest.info.item_id < 0x12));\n\n\n\n for chest in chests {\n\n // Lock crystal balls and crypt keys to their area\n\n let area_lock = match chest.info.item_id {\n\n 0x10 | 0x11 => Some(chest.area),\n\n _ => None,\n\n };\n\n\n\n unplaced_items.insert(Item {\n\n info: chest.info,\n\n area_lock,\n\n });\n", "file_path": "rando/src/state.rs", "rank": 92, "score": 26498.91283083017 }, { "content": " pub room: u8,\n\n pub index: u8,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub(crate) struct Item {\n\n pub info: rom::Chest,\n\n pub area_lock: Option<u8>,\n\n}\n\n\n\npub(crate) struct State {\n\n // We're using BTree data structures to give us deterministic traversal\n\n // ordering.\n\n unassigned_checks: BTreeMap<LocationId, Check>,\n\n unplaced_items: BTreeSet<Item>,\n\n cleared_gates: BTreeSet<Gate>,\n\n\n\n assigned_chests: Vec<neutopia::Chest>,\n\n\n\n n: Neutopia,\n", "file_path": "rando/src/state.rs", "rank": 93, "score": 26497.66307084251 }, { "content": " pub area: u8,\n\n pub room: u8,\n\n #[serde(default)]\n\n pub index: u8,\n\n pub gates: Vec<Gate>,\n\n}\n\n\n\nimpl Check {\n\n pub(crate) fn loc(&self) -> LocationId {\n\n LocationId {\n\n area: self.area,\n\n room: self.room,\n\n index: self.index,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub(crate) struct LocationId {\n\n pub area: u8,\n", "file_path": "rando/src/state.rs", "rank": 94, "score": 26497.37291447843 }, { "content": " self.assigned_chests.push(chest);\n\n\n\n assert_eq!(self.unassigned_checks.len(), self.unplaced_items.len());\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn filter_items(&self, filter: impl Fn(&Item) -> bool) -> Vec<Item> {\n\n let mut items = Vec::new();\n\n for item in &self.unplaced_items {\n\n if filter(item) {\n\n items.push(item.clone());\n\n }\n\n }\n\n\n\n items\n\n }\n\n\n\n pub fn get_item_by_id(&self, id: u8) -> Result<Item, Error> {\n\n let items = self.filter_items(|item| item.info.item_id == id);\n", "file_path": "rando/src/state.rs", "rank": 95, "score": 26496.42708232392 }, { "content": " 0x0b => Some(Gate::FalconShoes),\n\n 0x0c => Some(Gate::RainbowDrop),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn place_item(&mut self, item: Item, area: u8, room: u8, index: u8) -> Result<(), Error> {\n\n self.place_item_by_loc(item, &LocationId { area, room, index })\n\n }\n\n\n\n pub fn place_item_by_loc(&mut self, item: Item, loc: &LocationId) -> Result<(), Error> {\n\n if let Some(area) = &item.area_lock {\n\n if *area != loc.area {\n\n return Err(format_err!(\n\n \"attempting to place area locked item {:?} in area {}\",\n\n &item,\n\n loc.area\n\n ));\n\n }\n\n }\n", "file_path": "rando/src/state.rs", "rank": 96, "score": 26496.334086019477 } ]
Rust
vm/actor/src/util/balance_table.rs
zatoichi-labs/forest
4422cddcf42fab20912d1ad1f92e2b997f1b5bda
use crate::{BytesKey, HAMT_BIT_WIDTH}; use address::Address; use cid::Cid; use ipld_blockstore::BlockStore; use ipld_hamt::{Error, Hamt}; use num_bigint::biguint_ser::BigUintDe; use num_traits::CheckedSub; use vm::TokenAmount; pub struct BalanceTable<'a, BS>(Hamt<'a, BytesKey, BS>); impl<'a, BS> BalanceTable<'a, BS> where BS: BlockStore, { pub fn new(bs: &'a BS) -> Self { Self(Hamt::new_with_bit_width(bs, HAMT_BIT_WIDTH)) } pub fn from_root(bs: &'a BS, cid: &Cid) -> Result<Self, Error> { Ok(Self(Hamt::load_with_bit_width(cid, bs, HAMT_BIT_WIDTH)?)) } #[inline] pub fn root(&mut self) -> Result<Cid, Error> { self.0.flush() } #[inline] pub fn get(&self, key: &Address) -> Result<TokenAmount, String> { Ok(self .0 .get::<_, BigUintDe>(&key.to_bytes())? .ok_or("no key {} in map root")? .0) } #[inline] pub fn has(&self, key: &Address) -> Result<bool, Error> { match self.0.get::<_, BigUintDe>(&key.to_bytes())? { Some(_) => Ok(true), None => Ok(false), } } #[inline] pub fn set(&mut self, key: &Address, value: TokenAmount) -> Result<(), Error> { self.0.set(key.to_bytes().into(), BigUintDe(value)) } pub fn add(&mut self, key: &Address, value: &TokenAmount) -> Result<(), String> { let prev = self.get(key)?; Ok(self.0.set(key.to_bytes().into(), BigUintDe(prev + value))?) } pub fn add_create(&mut self, key: &Address, value: TokenAmount) -> Result<(), String> { let new_val = match self.0.get::<_, BigUintDe>(&key.to_bytes())? { Some(v) => v.0 + value, None => value, }; Ok(self.0.set(key.to_bytes().into(), BigUintDe(new_val))?) } pub fn subtract_with_minimum( &mut self, key: &Address, req: &TokenAmount, floor: &TokenAmount, ) -> Result<TokenAmount, String> { let prev = self.get(key)?; let res = prev .checked_sub(req) .unwrap_or_else(|| TokenAmount::from(0u8)); let new_val: &TokenAmount = std::cmp::max(&res, floor); if &prev > new_val { self.0 .set(key.to_bytes().into(), BigUintDe(new_val.clone()))?; Ok(prev - new_val) } else { Ok(TokenAmount::default()) } } pub fn must_subtract(&mut self, key: &Address, req: &TokenAmount) -> Result<(), String> { let sub_amt = self.subtract_with_minimum(key, req, &TokenAmount::from(0u8))?; if &sub_amt != req { return Err(format!( "Couldn't subtract value from address {} (req: {}, available: {})", key, req, sub_amt )); } Ok(()) } pub fn remove(&mut self, key: &Address) -> Result<TokenAmount, String> { let prev = self.get(key)?; self.0.delete(&key.to_bytes())?; Ok(prev) } pub fn total(&self) -> Result<TokenAmount, String> { let mut total = TokenAmount::default(); self.0.for_each(|_, v: BigUintDe| { total += v.0; Ok(()) })?; Ok(total) } }
use crate::{BytesKey, HAMT_BIT_WIDTH}; use address::Address; use cid::Cid; use ipld_blockstore::BlockStore; use ipld_hamt::{Error, Hamt}; use num_bigint::biguint_ser::BigUintDe; use num_traits::CheckedSub; use vm::TokenAmount; pub struct BalanceTable<'a, BS>(Hamt<'a, BytesKey, BS>); impl<'a, BS> BalanceTable<'a, BS> where BS: BlockStore, { pub fn new(bs: &'a BS) -> Self { Self(Hamt::new_with_bit_width(bs, HAMT_BIT_WIDTH)) } pub fn from_root(bs: &'a BS, cid: &Cid) -> Result<Self, Error> { Ok(Self(Hamt::load_with_bit_width(cid, bs, HAMT_BIT_WIDTH)?)) } #[inline] pub fn root(&mut self) -> Result<Cid, Error> { self.0.flush() } #[inline] pub fn get(&self, key: &Address) -> Result<TokenAmount, String> { Ok(self .0 .get::<_, BigUintDe>(&key.to_bytes())? .ok_or("no key {} in map root")? .0) } #[inline] pub fn has(&self, key: &Address) -> Result<bool, Error> { match self.0.get::<_, BigUintDe>(&key.to_bytes())? { Some(_) => Ok(true), None => Ok(false), } } #[inline] pub fn set(&mut self, key: &Address, value: TokenAmount) -> Result<(), Error> { self.0.set(key.to_bytes().into(), BigUintDe(value)) } pub fn add(&mut self, key: &Address, value: &TokenAmount) -> Result<(), String> { let prev = self.get(key)?; Ok(self.0.set(key.to_bytes().into(), BigUintDe(prev + value))?) } pub fn add_create(&mut self, key: &Address, value: TokenAmount) -> Result<(), String> { let new_val = match self.0.get::<_, BigUintDe>(&key.to_bytes())? { Some(v) => v.0 + value, None => value, }; Ok(self.0.set(key.to_bytes().into(), BigUintDe(new_val))?) } pub fn subtract_with_minimum( &mut self, key: &Address, req: &TokenAmount, floor: &TokenAmount, ) -> Result<TokenAmount, String> { let prev = self.get(key)?; let res = prev .checked_sub(req) .unwrap_or_else(|| TokenAmount::from(0u8)); let new_val: &TokenAmount = std::cmp::max(&res, floor); if &prev > new_val { self.0 .set(key.to_bytes().into(), BigUintDe(new_val.clone()))?; Ok(prev - new_val) } else { Ok(TokenAmount::default()) } } pub fn must_subtract(&mut self, key: &Address, req: &TokenAmount) -> Result<(), String> { let sub_amt = self.subtract_with_minimum(key, req, &TokenAmount::from(0u8))?; if &su
pub fn remove(&mut self, key: &Address) -> Result<TokenAmount, String> { let prev = self.get(key)?; self.0.delete(&key.to_bytes())?; Ok(prev) } pub fn total(&self) -> Result<TokenAmount, String> { let mut total = TokenAmount::default(); self.0.for_each(|_, v: BigUintDe| { total += v.0; Ok(()) })?; Ok(total) } }
b_amt != req { return Err(format!( "Couldn't subtract value from address {} (req: {}, available: {})", key, req, sub_amt )); } Ok(()) }
function_block-function_prefixed
[ { "content": "fn mutate_balance_table<BS, F>(store: &BS, c: &mut Cid, f: F) -> Result<(), String>\n\nwhere\n\n F: FnOnce(&mut BalanceTable<BS>) -> Result<(), String>,\n\n BS: BlockStore,\n\n{\n\n let mut t = BalanceTable::from_root(store, &c)?;\n\n\n\n f(&mut t)?;\n\n\n\n *c = t.root()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "vm/actor/src/builtin/market/state.rs", "rank": 0, "score": 338308.11628412886 }, { "content": "/// Return a new Address that is of a given SignatureType and uses the supplied public_key\n\npub fn new_address(sig_type: SignatureType, public_key: &[u8]) -> Result<Address, Error> {\n\n match sig_type {\n\n SignatureType::BLS => {\n\n let addr = Address::new_bls(public_key).map_err(|err| Error::Other(err.to_string()))?;\n\n Ok(addr)\n\n }\n\n SignatureType::Secp256k1 => {\n\n let addr =\n\n Address::new_secp256k1(public_key).map_err(|err| Error::Other(err.to_string()))?;\n\n Ok(addr)\n\n }\n\n }\n\n}\n\n\n", "file_path": "key_management/src/wallet_helpers.rs", "rank": 1, "score": 334085.5389777471 }, { "content": "// Resolves a provider or client address to the canonical form against which a balance should be held, and\n\n// the designated recipient address of withdrawals (which is the same, for simple account parties).\n\nfn escrow_address<BS, RT>(rt: &mut RT, addr: &Address) -> Result<(Address, Address), ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n // Resolve the provided address to the canonical form against which the balance is held.\n\n let nominal = rt.resolve_address(addr).map_err(|e| {\n\n ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n format!(\"Failed to resolve address provided: {}\", e),\n\n )\n\n })?;\n\n\n\n let code_id = rt.get_actor_code_cid(&nominal).map_err(|e| {\n\n ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n format!(\"Failed to retrieve actor code cid: {}\", e),\n\n )\n\n })?;\n\n\n", "file_path": "vm/actor/src/builtin/market/mod.rs", "rank": 2, "score": 327548.7217731218 }, { "content": "/// Resolves an address to an ID address and verifies that it is address of an account actor with an associated BLS key.\n\n/// The worker must be BLS since the worker key will be used alongside a BLS-VRF.\n\nfn resolve_worker_address<BS, RT>(rt: &mut RT, raw: Address) -> Result<Address, ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n let resolved = rt.resolve_address(&raw).map_err(|e| {\n\n ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n format!(\"unable to resolve address: {},{}\", raw, e),\n\n )\n\n })?;\n\n assert!(resolved.protocol() == Protocol::ID);\n\n\n\n let owner_code = rt.get_actor_code_cid(&resolved).map_err(|e| {\n\n ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n format!(\"no code for address: {}, {}\", resolved, e),\n\n )\n\n })?;\n\n if owner_code != *ACCOUNT_ACTOR_CODE_ID {\n", "file_path": "vm/actor/src/builtin/miner/mod.rs", "rank": 3, "score": 321375.2847929627 }, { "content": "/// Returns a vector of cids from provided root cid\n\nfn read_amt_cids<DB>(db: &DB, root: &Cid) -> Result<Vec<Cid>, Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n let amt = Amt::load(root, db)?;\n\n\n\n let mut cids = Vec::new();\n\n for i in 0..amt.count() {\n\n if let Some(c) = amt.get(i)? {\n\n cids.push(c);\n\n }\n\n }\n\n\n\n Ok(cids)\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 4, "score": 300866.5702981677 }, { "content": "pub fn deserialize<'de, D>(deserializer: D) -> Result<Cid, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let CidMap { cid } = Deserialize::deserialize(deserializer)?;\n\n cid.parse().map_err(de::Error::custom)\n\n}\n\n\n\n/// Struct just used as a helper to serialize a cid into a map with key \"/\"\n", "file_path": "ipld/cid/src/json.rs", "rank": 5, "score": 297787.37809535314 }, { "content": "#[inline]\n\nfn make_map<BS: BlockStore>(store: &'_ BS) -> Hamt<'_, BytesKey, BS> {\n\n Hamt::new_with_bit_width(store, HAMT_BIT_WIDTH)\n\n}\n\n\n", "file_path": "vm/actor/src/lib.rs", "rank": 6, "score": 296218.1477521294 }, { "content": "fn commit_worker_key_change<BS, RT>(rt: &mut RT) -> Result<(), ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n rt.transaction(|st: &mut State, rt| {\n\n if st.info.pending_worker_key.is_none() {\n\n return Err(ActorError::new(\n\n ExitCode::ErrIllegalState,\n\n \"No pending key change.\".to_string(),\n\n ));\n\n }\n\n if let Some(worker_key) = &st.info.pending_worker_key {\n\n if worker_key.effective_at > rt.curr_epoch() {\n\n return Err(ActorError::new(\n\n ExitCode::ErrIllegalState,\n\n format!(\n\n \"Too early for key change. Current: {}, Change: {}\",\n\n rt.curr_epoch(),\n\n worker_key.effective_at\n", "file_path": "vm/actor/src/builtin/miner/mod.rs", "rank": 7, "score": 290020.69619661535 }, { "content": "/// Return Address for a message given it's hash and signature\n\npub fn ecrecover(hash: &[u8; 32], signature: &[u8; 65]) -> Result<Address, Error> {\n\n // generate types to recover key from\n\n let rec_id = RecoveryId::parse(signature[64])?;\n\n let message = Message::parse(&hash);\n\n\n\n // Signature value without recovery byte\n\n let mut s = [0u8; 64];\n\n s.clone_from_slice(signature[..64].as_ref());\n\n // generate Signature\n\n let sig = EcsdaSignature::parse(&s);\n\n\n\n let key = recover(&message, &sig, &rec_id)?;\n\n let ret = key.serialize();\n\n let addr = Address::new_secp256k1(&ret)?;\n\n Ok(addr)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "crypto/src/signature.rs", "rank": 8, "score": 289467.89863604424 }, { "content": "/// Returns a tuple of cids for both Unsigned and Signed messages\n\npub fn read_msg_cids<DB>(db: &DB, msg_cid: &Cid) -> Result<(Vec<Cid>, Vec<Cid>), Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n if let Some(roots) = db\n\n .get::<TxMeta>(msg_cid)\n\n .map_err(|e| Error::Other(e.to_string()))?\n\n {\n\n let bls_cids = read_amt_cids(db, &roots.bls_message_root)?;\n\n let secpk_cids = read_amt_cids(db, &roots.secp_message_root)?;\n\n Ok((bls_cids, secpk_cids))\n\n } else {\n\n Err(Error::UndefinedKey(\"no msgs with that key\".to_string()))\n\n }\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 9, "score": 287237.13001959486 }, { "content": "/// Convert a `Ipld` structure into a type `T`\n\n/// Currently converts using a byte buffer with serde_cbor\n\npub fn from_ipld<T>(value: &Ipld) -> Result<T, String>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n // TODO update to not go through byte buffer to convert\n\n // There is a good amount of overhead for this (having to implement serde::Deserializer)\n\n // for Ipld, but possible. The benefit isn't worth changing yet since if the value is not\n\n // passed by reference as needed by HAMT, then the values will have to be cloned.\n\n let buf = to_vec(value).map_err(|e| e.to_string())?;\n\n from_slice(buf.as_slice()).map_err(|e| e.to_string())\n\n}\n", "file_path": "ipld/src/lib.rs", "rank": 10, "score": 285703.4573632217 }, { "content": "pub fn serialize<S>(c: &Cid, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n CidMap { cid: c.to_string() }.serialize(serializer)\n\n}\n\n\n", "file_path": "ipld/cid/src/json.rs", "rank": 11, "score": 284735.1004013292 }, { "content": "fn delete_many<BS: BlockStore>(amt: &mut Amt<BigUintDe, BS>, keys: &[u64]) -> Result<(), AmtError> {\n\n for &i in keys {\n\n amt.delete(i)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "vm/actor/src/builtin/miner/state.rs", "rank": 12, "score": 277962.8195191193 }, { "content": "/// Resolves an address to an ID address and verifies that it is address of an account or multisig actor.\n\nfn resolve_owner_address<BS, RT>(rt: &RT, raw: Address) -> Result<Address, ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n let resolved = rt.resolve_address(&raw).map_err(|_| {\n\n ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n format!(\"unable to resolve address {}\", raw),\n\n )\n\n })?;\n\n assert!(resolved.protocol() == Protocol::ID);\n\n\n\n let owner_code = rt.get_actor_code_cid(&resolved).map_err(|_| {\n\n ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n format!(\"no code for address: {}\", resolved),\n\n )\n\n })?;\n\n if !is_principal(&owner_code) {\n\n return Err(ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n format!(\"owner actor type must be a principal, was {}\", owner_code),\n\n ));\n\n }\n\n\n\n Ok(resolved)\n\n}\n\n\n", "file_path": "vm/actor/src/builtin/miner/mod.rs", "rank": 13, "score": 275092.38597169996 }, { "content": "/// Generate a new Key that satisfies the given SignatureType\n\nfn generate_key(typ: SignatureType) -> Result<Key, Error> {\n\n let private_key = wallet_helpers::generate(typ)?;\n\n let key_info = KeyInfo::new(typ, private_key);\n\n Key::try_from(key_info)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{generate, MemKeyStore};\n\n use encoding::blake2b_256;\n\n use secp256k1::{Message as SecpMessage, SecretKey as SecpPrivate};\n\n\n\n fn construct_priv_keys() -> Vec<Key> {\n\n let mut secp_keys = Vec::new();\n\n let mut bls_keys = Vec::new();\n\n for _ in 1..5 {\n\n let secp_priv_key = generate(SignatureType::Secp256k1).unwrap();\n\n let secp_key_info = KeyInfo::new(SignatureType::Secp256k1, secp_priv_key);\n\n let secp_key = Key::try_from(secp_key_info).unwrap();\n", "file_path": "key_management/src/wallet.rs", "rank": 14, "score": 273091.242258148 }, { "content": "/// Generate a new private key\n\npub fn generate(sig_type: SignatureType) -> Result<Vec<u8>, Error> {\n\n let rng = &mut OsRng::default();\n\n match sig_type {\n\n SignatureType::BLS => {\n\n let key = BlsPrivate::generate(rng);\n\n Ok(key.as_bytes())\n\n }\n\n SignatureType::Secp256k1 => {\n\n let key = SecpPrivate::random(rng);\n\n Ok(key.serialize().to_vec())\n\n }\n\n }\n\n}\n", "file_path": "key_management/src/wallet_helpers.rs", "rank": 15, "score": 266913.73748968437 }, { "content": "fn burn_funds<BS, RT>(rt: &mut RT, amount: &TokenAmount) -> Result<(), ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n if amount > &BigUint::zero() {\n\n rt.send(\n\n &*BURNT_FUNDS_ACTOR_ADDR,\n\n METHOD_SEND,\n\n &Serialized::default(),\n\n amount,\n\n )?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "vm/actor/src/builtin/miner/mod.rs", "rank": 16, "score": 266857.5711362939 }, { "content": "pub fn deserialize<'de, D>(deserializer: D) -> Result<(), D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let EmptyMap {} = Deserialize::deserialize(deserializer)?;\n\n Ok(())\n\n}\n", "file_path": "ipld/src/selector/empty_map.rs", "rank": 17, "score": 266809.0216669411 }, { "content": "/// Read file as a `String`.\n\npub fn read_file_to_string(path: &str) -> Result<String> {\n\n let mut file = File::open(path)?;\n\n let mut string = String::new();\n\n file.read_to_string(&mut string)?;\n\n Ok(string)\n\n}\n\n\n", "file_path": "node/utils/src/lib.rs", "rank": 18, "score": 265506.28867204435 }, { "content": "/// Invoked at the end of each proving period, at the end of the epoch before the next one starts.\n\nfn handle_proving_period<BS, RT>(rt: &mut RT) -> Result<(), ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n // Vest locked funds.\n\n // This happens first so that any subsequent penalties are taken from locked pledge, rather than free funds.\n\n let vested_amount =\n\n rt.transaction::<State, Result<TokenAmount, ActorError>, _>(|st, rt| {\n\n let newly_vested_fund = st\n\n .unlock_vested_funds(rt.store(), rt.curr_epoch())\n\n .map_err(|e| {\n\n ActorError::new(\n\n ExitCode::ErrIllegalState,\n\n format!(\"failed to vest funds {:}\", e),\n\n )\n\n })?;\n\n Ok(newly_vested_fund)\n\n })??;\n\n\n", "file_path": "vm/actor/src/builtin/miner/mod.rs", "rank": 19, "score": 260275.6462087844 }, { "content": "/// Returns Tipset from key-value store from provided cids\n\npub fn tipset_from_keys<DB>(db: &DB, tsk: &TipsetKeys) -> Result<Tipset, Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n let mut block_headers = Vec::new();\n\n for c in tsk.cids() {\n\n let raw_header = db.read(c.key())?;\n\n if let Some(x) = raw_header {\n\n // decode raw header into BlockHeader\n\n let bh = BlockHeader::unmarshal_cbor(&x)?;\n\n block_headers.push(bh);\n\n } else {\n\n return Err(Error::NotFound(\"Key for header\"));\n\n }\n\n }\n\n // construct new Tipset to return\n\n let ts = Tipset::new(block_headers)?;\n\n Ok(ts)\n\n}\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 20, "score": 260074.42558810758 }, { "content": "/// Return the public key for a given private_key and SignatureType\n\npub fn to_public(sig_type: SignatureType, private_key: &[u8]) -> Result<Vec<u8>, Error> {\n\n match sig_type {\n\n SignatureType::BLS => Ok(BlsPrivate::from_bytes(&private_key)\n\n .map_err(|err| Error::Other(err.to_string()))?\n\n .public_key()\n\n .as_bytes()),\n\n SignatureType::Secp256k1 => {\n\n let private_key = SecpPrivate::parse_slice(private_key)\n\n .map_err(|err| Error::Other(err.to_string()))?;\n\n let public_key = SecpPublic::from_secret_key(&private_key);\n\n Ok(public_key.serialize().to_vec())\n\n }\n\n }\n\n}\n\n\n", "file_path": "key_management/src/wallet_helpers.rs", "rank": 21, "score": 260064.66402517064 }, { "content": "pub fn serialize<S>(serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n EmptyMap {}.serialize(serializer)\n\n}\n\n\n", "file_path": "ipld/src/selector/empty_map.rs", "rank": 22, "score": 258570.24529204323 }, { "content": "/// Returns messages from key-value store\n\nfn messages_from_cids<DB, T>(db: &DB, keys: &[Cid]) -> Result<Vec<T>, Error>\n\nwhere\n\n DB: BlockStore,\n\n T: DeserializeOwned,\n\n{\n\n keys.iter()\n\n .map(|k| {\n\n let value = db.read(&k.key())?;\n\n let bytes = value.ok_or_else(|| Error::UndefinedKey(k.to_string()))?;\n\n\n\n // Decode bytes into type T\n\n let t = from_slice(&bytes)?;\n\n Ok(t)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 23, "score": 257450.22863882754 }, { "content": "/// Sign takes in SignatureType, private key and message. Returns a Signature for that message\n\npub fn sign(sig_type: SignatureType, private_key: &[u8], msg: &[u8]) -> Result<Signature, Error> {\n\n match sig_type {\n\n SignatureType::BLS => {\n\n let priv_key =\n\n BlsPrivate::from_bytes(private_key).map_err(|err| Error::Other(err.to_string()))?;\n\n // this returns a signature from bls-signatures, so we need to convert this to a crypto signature\n\n let sig = priv_key.sign(msg);\n\n let crypto_sig = Signature::new_bls(sig.as_bytes());\n\n Ok(crypto_sig)\n\n }\n\n SignatureType::Secp256k1 => {\n\n let priv_key = SecpPrivate::parse_slice(private_key)\n\n .map_err(|err| Error::Other(err.to_string()))?;\n\n let msg_complete = blake2b_256(msg);\n\n let message = SecpMessage::parse(&msg_complete);\n\n let (sig, recovery_id) = secp256k1::sign(&message, &priv_key);\n\n let mut new_bytes = [0; 65];\n\n new_bytes[..64].copy_from_slice(&sig.serialize());\n\n new_bytes[64] = recovery_id.serialize();\n\n let crypto_sig = Signature::new_secp256k1(new_bytes.to_vec());\n\n Ok(crypto_sig)\n\n }\n\n }\n\n}\n\n\n", "file_path": "key_management/src/wallet_helpers.rs", "rank": 24, "score": 252901.34796417522 }, { "content": "pub fn parse_uint_key(s: &[u8]) -> Result<u64, UVarintError> {\n\n let (v, _) = unsigned_varint::decode::u64(s)?;\n\n Ok(v)\n\n}\n", "file_path": "vm/actor/src/lib.rs", "rank": 25, "score": 250311.98805343898 }, { "content": "/// cid_to_piece_commitment_v1 converts a CID to a comm_p\n\n/// -- it is just a helper function that is equivalent to\n\n/// cid_to_data_commitment_v1.\n\npub fn cid_to_piece_commitment_v1(c: &Cid) -> Result<Commitment, &'static str> {\n\n cid_to_data_commitment_v1(c)\n\n}\n", "file_path": "utils/commcid/src/lib.rs", "rank": 26, "score": 247605.64663946908 }, { "content": "/// cid_to_data_commitment_v1 extracts the raw data commitment from a CID\n\n/// assuming that it has the correct hashing function and\n\n/// serialization types\n\npub fn cid_to_data_commitment_v1(c: &Cid) -> Result<Commitment, &'static str> {\n\n let (comm_d, code) = cid_to_commitment(c)?;\n\n\n\n if code != FilecoinMultihashCode::UnsealedV1 {\n\n return Err(\"incorrect hashing function for data commitment\");\n\n }\n\n\n\n Ok(comm_d)\n\n}\n\n\n", "file_path": "utils/commcid/src/lib.rs", "rank": 27, "score": 247605.20055673638 }, { "content": "/// cid_to_replica_commitment_v1 extracts the raw replica commitment from a CID\n\n/// assuming that it has the correct hashing function and\n\n/// serialization types\n\npub fn cid_to_replica_commitment_v1(c: &Cid) -> Result<Commitment, &'static str> {\n\n let (comm_r, hash) = cid_to_commitment(c)?;\n\n\n\n if hash != FilecoinMultihashCode::SealedV1 {\n\n return Err(\"incorrect hashing function for data commitment\");\n\n }\n\n\n\n Ok(comm_r)\n\n}\n\n\n", "file_path": "utils/commcid/src/lib.rs", "rank": 28, "score": 247605.2005567364 }, { "content": "/// Convert any object into an IPLD object\n\npub fn to_ipld<T>(ipld: T) -> Result<Ipld, Error>\n\nwhere\n\n T: Serialize,\n\n{\n\n ipld.serialize(Serializer)\n\n}\n\n\n", "file_path": "ipld/src/lib.rs", "rank": 29, "score": 245700.74964597682 }, { "content": "/// Returns public address of the specified actor address\n\npub fn resolve_to_key_addr<'st, 'bs, BS, S>(\n\n st: &'st StateTree<'bs, S>,\n\n store: &'bs BS,\n\n addr: &Address,\n\n) -> Result<Address, ActorError>\n\nwhere\n\n BS: BlockStore,\n\n S: BlockStore,\n\n{\n\n if addr.protocol() == Protocol::BLS || addr.protocol() == Protocol::Secp256k1 {\n\n return Ok(*addr);\n\n }\n\n\n\n let act = st\n\n .get_actor(&addr)\n\n .map_err(|e| ActorError::new(ExitCode::SysErrInternal, e))?\n\n .ok_or_else(|| {\n\n ActorError::new(\n\n ExitCode::SysErrInternal,\n\n format!(\"Failed to retrieve actor: {}\", addr),\n", "file_path": "vm/interpreter/src/default_runtime.rs", "rank": 30, "score": 244597.0647385125 }, { "content": "/// cid_to_commitment extracts the raw data commitment from a CID\n\n/// assuming that it has the correct hashing function and\n\n/// serialization types\n\npub fn cid_to_commitment(c: &Cid) -> Result<(Commitment, FilecoinMultihashCode), &'static str> {\n\n if c.codec != Codec::Raw {\n\n return Err(\"codec for all commitments is raw\");\n\n }\n\n\n\n let code = match c.hash.algorithm() {\n\n multihash::Code::Custom(code) => {\n\n FromPrimitive::from_u64(code).ok_or(\"Invalid custom code\")?\n\n }\n\n _ => return Err(\"Invalid Cid hash algorithm\"),\n\n };\n\n\n\n let mut comm = Commitment::default();\n\n comm.copy_from_slice(c.hash.digest());\n\n\n\n Ok((comm, code))\n\n}\n\n\n", "file_path": "utils/commcid/src/lib.rs", "rank": 31, "score": 239860.73819251385 }, { "content": "/// Returns a vec of 4 distinct CIDs\n\npub fn construct_keys() -> Vec<Cid> {\n\n return vec![\n\n template_key(b\"test content\"),\n\n template_key(b\"awesome test content \"),\n\n template_key(b\"even better test content\"),\n\n template_key(b\"the best test content out there\"),\n\n ];\n\n}\n\n\n", "file_path": "utils/test_utils/src/chain_structures.rs", "rank": 32, "score": 238971.19180476601 }, { "content": "fn notify_pledge_change<BS, RT>(rt: &mut RT, pledge_delta: &BigInt) -> Result<(), ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n if !pledge_delta.is_zero() {\n\n rt.send(\n\n &*STORAGE_POWER_ACTOR_ADDR,\n\n PowerMethod::UpdatePledgeTotal as u64,\n\n &Serialized::serialize(BigIntSer(pledge_delta))?,\n\n &TokenAmount::zero(),\n\n )?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "vm/actor/src/builtin/miner/mod.rs", "rank": 33, "score": 237862.73716698238 }, { "content": "fn decode_str(cid_str: &str) -> Result<Vec<u8>, Error> {\n\n static IPFS_DELIMETER: &str = \"/ipfs/\";\n\n\n\n let hash = match cid_str.find(IPFS_DELIMETER) {\n\n Some(index) => &cid_str[index + IPFS_DELIMETER.len()..],\n\n _ => cid_str,\n\n };\n\n\n\n if hash.len() < 2 {\n\n return Err(Error::InputTooShort);\n\n }\n\n\n\n let (_, decoded) = if Version::is_v0_str(hash) {\n\n // TODO: could avoid the roundtrip here and just use underlying\n\n // base-x base58btc decoder here.\n\n let hash = multibase::Base::Base58Btc.code().to_string() + hash;\n\n\n\n multibase::decode(hash)\n\n } else {\n\n multibase::decode(hash)\n", "file_path": "ipld/cid/src/to_cid.rs", "rank": 34, "score": 237817.7856523296 }, { "content": "fn request_terminate_deals<BS, RT>(rt: &mut RT, deal_ids: Vec<DealID>) -> Result<(), ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n if deal_ids.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n rt.send(\n\n &*STORAGE_MARKET_ACTOR_ADDR,\n\n MarketMethod::OnMinerSectorsTerminate as u64,\n\n &Serialized::serialize(OnMinerSectorsTerminateParams { deal_ids })?,\n\n &TokenAmount::zero(),\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "vm/actor/src/builtin/miner/mod.rs", "rank": 35, "score": 231053.17958076557 }, { "content": "/// Convert Link node into vector of Cids\n\nfn cids_from_links<V>(links: &[Option<Link<V>>; WIDTH]) -> Result<Vec<Cid>, Error> {\n\n links\n\n .iter()\n\n .filter_map(|c| match c {\n\n Some(Link::Cid(cid)) => Some(Ok(cid.clone())),\n\n Some(Link::Cached(_)) => Some(Err(Error::Cached)),\n\n None => None,\n\n })\n\n .collect()\n\n}\n\n\n\nimpl<V> Serialize for Node<V>\n\nwhere\n\n V: Clone + Serialize,\n\n{\n\n fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: ser::Serializer,\n\n {\n\n match &self {\n", "file_path": "ipld/amt/src/node.rs", "rank": 36, "score": 229977.55798445438 }, { "content": "/// Defines a TipsetKey used in testing\n\npub fn template_key(data: &[u8]) -> Cid {\n\n Cid::new_from_cbor(data, Blake2b256)\n\n}\n\n\n", "file_path": "utils/test_utils/src/chain_structures.rs", "rank": 37, "score": 229968.79406785534 }, { "content": "pub fn deserialize<'de, D>(deserializer: D) -> Result<Ipld, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_any(JSONVisitor)\n\n}\n\n\n", "file_path": "ipld/src/json.rs", "rank": 38, "score": 229636.69222739546 }, { "content": "/// Puts values from vector into shard array\n\nfn vec_to_values<V, T>(bmap: BitMap, values: Vec<V>) -> Result<[Option<T>; WIDTH], Error>\n\nwhere\n\n V: Clone,\n\n T: From<V>,\n\n{\n\n let mut r_arr: [Option<T>; WIDTH] = Default::default();\n\n\n\n let mut v_iter = values.iter();\n\n\n\n for (i, e) in r_arr.iter_mut().enumerate().take(WIDTH) {\n\n if bmap.get_bit(i as u64) {\n\n let value = v_iter.next().ok_or_else(|| Error::InvalidVecLength)?;\n\n *e = Some(<T>::from(value.clone()));\n\n }\n\n }\n\n\n\n Ok(r_arr)\n\n}\n\n\n", "file_path": "ipld/amt/src/node.rs", "rank": 39, "score": 228332.55911673533 }, { "content": "pub fn random_cid() -> Cid {\n\n Cid::new_from_cbor(&random_bytes(16), Blake2b256)\n\n}\n", "file_path": "ipld/graphsync/src/test_utils.rs", "rank": 40, "score": 224055.31636286253 }, { "content": "/// encode converts the address into a string\n\nfn encode(addr: &Address) -> String {\n\n match addr.protocol() {\n\n Protocol::Secp256k1 | Protocol::Actor | Protocol::BLS => {\n\n let ingest = addr.to_bytes();\n\n let mut bz = addr.payload_bytes();\n\n\n\n // payload bytes followed by calculated checksum\n\n bz.extend(checksum(&ingest));\n\n format!(\n\n \"{}{}{}\",\n\n addr.network.to_prefix(),\n\n addr.protocol().to_string(),\n\n ADDRESS_ENCODER.encode(bz.as_mut()),\n\n )\n\n }\n\n Protocol::ID => format!(\n\n \"{}{}{}\",\n\n addr.network.to_prefix(),\n\n addr.protocol().to_string(),\n\n from_leb_bytes(&addr.payload_bytes()).expect(\"should read encoded bytes\"),\n", "file_path": "vm/address/src/lib.rs", "rank": 41, "score": 223676.73898054616 }, { "content": "/// Returns the genesis block\n\npub fn genesis<DB>(db: &DB) -> Result<Option<BlockHeader>, Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n Ok(match db.read(GENESIS_KEY)? {\n\n Some(bz) => Some(BlockHeader::unmarshal_cbor(&bz)?),\n\n None => None,\n\n })\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 42, "score": 223482.22579200467 }, { "content": "/// Returns the heaviest tipset\n\npub fn get_heaviest_tipset<DB>(db: &DB) -> Result<Option<Tipset>, Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n match db.read(HEAD_KEY)? {\n\n Some(bz) => {\n\n let keys: Vec<Cid> = from_slice(&bz)?;\n\n Ok(Some(tipset_from_keys(db, &TipsetKeys::new(keys))?))\n\n }\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 43, "score": 220397.25121308753 }, { "content": "pub fn deserialize<'de, D>(deserializer: D) -> Result<BigUint, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let mut bz: Vec<u8> = serde_bytes::Deserialize::deserialize(deserializer)?;\n\n if bz.is_empty() {\n\n return Ok(BigUint::default());\n\n }\n\n\n\n if bz.remove(0) != 0 {\n\n return Err(serde::de::Error::custom(\n\n \"First byte must be 0 to decode as BigUint\",\n\n ));\n\n }\n\n\n\n Ok(BigUint::from_bytes_be(&bz))\n\n}\n", "file_path": "utils/bigint/src/biguint_ser.rs", "rank": 44, "score": 219479.14789027275 }, { "content": "pub fn deserialize<'de, D>(deserializer: D) -> Result<BlockHeader, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n #[derive(Deserialize)]\n\n #[serde(rename_all = \"PascalCase\")]\n\n struct BlockHeaderDe {\n\n miner: String,\n\n #[serde(with = \"ticket::json\")]\n\n ticket: Ticket,\n\n #[serde(default, with = \"election_proof::json::opt\")]\n\n election_proof: Option<ElectionProof>,\n\n #[serde(default, with = \"beacon_entries::json::vec\")]\n\n beacon_entries: Vec<BeaconEntry>,\n\n #[serde(default, rename = \"WinPoStProof\", with = \"post::json::vec\")]\n\n win_post_proof: Vec<PoStProof>,\n\n #[serde(rename = \"Parents\", with = \"tipset_keys_json\")]\n\n parents: TipsetKeys,\n\n #[serde(rename = \"ParentWeight\")]\n\n weight: String,\n", "file_path": "blockchain/blocks/src/header/json.rs", "rank": 45, "score": 219479.14789027275 }, { "content": "/// Deserializes bytes into big int.\n\npub fn deserialize<'de, D>(deserializer: D) -> Result<BigInt, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let mut bz: Vec<u8> = serde_bytes::Deserialize::deserialize(deserializer)?;\n\n if bz.is_empty() {\n\n return Ok(BigInt::default());\n\n }\n\n let sign_byte = bz.remove(0);\n\n let sign: Sign = match sign_byte {\n\n 1 => Sign::Minus,\n\n 0 => Sign::Plus,\n\n _ => {\n\n return Err(serde::de::Error::custom(\n\n \"First byte must be valid sign (0, 1)\",\n\n ));\n\n }\n\n };\n\n Ok(BigInt::from_bytes_be(sign, &bz))\n\n}\n", "file_path": "utils/bigint/src/bigint_ser.rs", "rank": 46, "score": 219479.14789027275 }, { "content": "fn decode_v1_bytes(bz: &[u8]) -> Result<(Vec<u8>, Version, Codec), Error> {\n\n let mut cur = Cursor::new(bz);\n\n let raw_version = cur.read_varint()?;\n\n let raw_codec = cur.read_varint()?;\n\n\n\n let version = Version::from(raw_version)?;\n\n let codec = Codec::from(raw_codec)?;\n\n\n\n let hash = &bz[cur.position() as usize..];\n\n Ok((hash.to_vec(), version, codec))\n\n}\n\n\n", "file_path": "ipld/cid/src/to_cid.rs", "rank": 47, "score": 217623.23429864764 }, { "content": "/// Converts a toml file represented as a string to `S`\n\n///\n\n/// # Example\n\n/// ```\n\n/// use serde::Deserialize;\n\n/// use utils::read_toml;\n\n///\n\n/// #[derive(Deserialize)]\n\n/// struct Config {\n\n/// name: String\n\n/// };\n\n///\n\n/// let toml_string = \"name = \\\"forest\\\"\\n\";\n\n/// let config: Config = read_toml(toml_string).unwrap();\n\n/// assert_eq!(config.name, \"forest\");\n\n/// ```\n\npub fn read_toml<S>(toml_string: &str) -> Result<S>\n\nwhere\n\n for<'de> S: serde::de::Deserialize<'de>,\n\n{\n\n let new_struct: S = toml::from_str(toml_string)?;\n\n Ok(new_struct)\n\n}\n", "file_path": "node/utils/src/lib.rs", "rank": 48, "score": 217428.3759861131 }, { "content": "pub fn serialize<S>(ipld: &Ipld, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n match &ipld {\n\n Ipld::Null => serializer.serialize_none(),\n\n Ipld::Bool(bool) => serializer.serialize_bool(*bool),\n\n Ipld::Integer(i128) => serializer.serialize_i128(*i128),\n\n Ipld::Float(f64) => serializer.serialize_f64(*f64),\n\n Ipld::String(string) => serializer.serialize_str(&string),\n\n Ipld::Bytes(bytes) => serialize(\n\n &ipld!({ \"/\": { BYTES_JSON_KEY: multibase::encode(Base::Base64, bytes) } }),\n\n serializer,\n\n ),\n\n Ipld::List(list) => {\n\n let wrapped = list.iter().map(|ipld| IpldJsonRef(ipld));\n\n serializer.collect_seq(wrapped)\n\n }\n\n Ipld::Map(map) => {\n\n let wrapped = map.iter().map(|(key, ipld)| (key, IpldJsonRef(ipld)));\n\n serializer.collect_map(wrapped)\n\n }\n\n Ipld::Link(cid) => serialize(&ipld!({ \"/\": cid.to_string() }), serializer),\n\n }\n\n}\n\n\n", "file_path": "ipld/src/json.rs", "rank": 49, "score": 215739.46773305294 }, { "content": "/// Uses an optional file path or the default genesis to parse the genesis and determine if\n\n/// chain store has existing data for the given genesis.\n\npub fn initialize_genesis<BS>(\n\n genesis_fp: &Option<String>,\n\n chain_store: &mut ChainStore<BS>,\n\n) -> Result<(Tipset, String), Box<dyn StdError>>\n\nwhere\n\n BS: BlockStore,\n\n{\n\n let genesis = match genesis_fp {\n\n Some(path) => {\n\n let file = File::open(path).expect(\"Could not open genesis file\");\n\n let reader = BufReader::new(file);\n\n process_car(reader, chain_store)?\n\n }\n\n None => {\n\n debug!(\"No specified genesis in config. Using default genesis.\");\n\n let bz = include_bytes!(\"devnet.car\");\n\n let reader = BufReader::new(bz.as_ref());\n\n process_car(reader, chain_store)?\n\n }\n\n };\n", "file_path": "forest/src/cli/genesis.rs", "rank": 50, "score": 214877.29713094665 }, { "content": "fn cids_from_messages<T: Cbor>(messages: &[T]) -> Result<Vec<Cid>, EncodingError> {\n\n messages.iter().map(Cbor::cid).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use async_std::sync::Sender;\n\n use beacon::MockBeacon;\n\n use blocks::BlockHeader;\n\n use db::MemoryDB;\n\n use forest_libp2p::NetworkEvent;\n\n use std::sync::Arc;\n\n use test_utils::{construct_blocksync_response, construct_messages, construct_tipset};\n\n\n\n fn chain_syncer_setup(\n\n db: Arc<MemoryDB>,\n\n ) -> (ChainSyncer<MemoryDB, MockBeacon>, Sender<NetworkEvent>) {\n\n let chain_store = ChainStore::new(db);\n\n\n", "file_path": "blockchain/chain_sync/src/sync.rs", "rank": 51, "score": 214529.32831823302 }, { "content": "/// Gets the home directory of the current system.\n\n/// Will return correct path for windows, linux, and osx.\n\n///\n\n/// # Panics\n\n/// We will panic if we cannot determine a home directory.\n\npub fn get_home_dir() -> String {\n\n home_dir().unwrap().to_str().unwrap().to_owned()\n\n}\n\n\n", "file_path": "node/utils/src/lib.rs", "rank": 52, "score": 212088.70911923423 }, { "content": "pub fn u64_key(k: u64) -> BytesKey {\n\n let mut bz = unsigned_varint::encode::u64_buffer();\n\n unsigned_varint::encode::u64(k, &mut bz);\n\n bz.to_vec().into()\n\n}\n\n\n", "file_path": "vm/actor/src/lib.rs", "rank": 53, "score": 210418.36901213066 }, { "content": "pub fn put_messages<DB, T: Cbor>(db: &DB, msgs: &[T]) -> Result<(), Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n for m in msgs {\n\n db.put(m, Blake2b256)\n\n .map_err(|e| Error::Other(e.to_string()))?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 54, "score": 209801.86053951568 }, { "content": "pub fn serialize<S>(m: &BlockHeader, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n #[derive(Serialize)]\n\n #[serde(rename_all = \"PascalCase\")]\n\n struct BlockHeaderSer<'a> {\n\n miner: String,\n\n #[serde(with = \"ticket::json\")]\n\n ticket: &'a Ticket,\n\n #[serde(with = \"election_proof::json::opt\")]\n\n election_proof: &'a Option<ElectionProof>,\n\n #[serde(with = \"beacon_entries::json::vec\")]\n\n beacon_entries: &'a [BeaconEntry],\n\n #[serde(rename = \"WinPoStProof\", with = \"post::json::vec\")]\n\n win_post_proof: &'a [PoStProof],\n\n #[serde(rename = \"Parents\", with = \"tipset_keys_json\")]\n\n parents: &'a TipsetKeys,\n\n #[serde(rename = \"ParentWeight\")]\n\n weight: String,\n", "file_path": "blockchain/blocks/src/header/json.rs", "rank": 55, "score": 209426.07665964376 }, { "content": "#[test]\n\nfn v0_error() {\n\n let bad = \"QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zIII\";\n\n assert_eq!(Cid::from_raw_cid(bad), Err(Error::ParsingError));\n\n}\n\n\n", "file_path": "ipld/cid/tests/base_cid_tests.rs", "rank": 56, "score": 209277.28335682984 }, { "content": "pub fn serialize<S>(int: &BigUint, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: serde::Serializer,\n\n{\n\n let mut bz = int.to_bytes_be();\n\n\n\n // Insert positive sign byte at start of encoded bytes if non-zero\n\n if bz == [0] {\n\n bz = Vec::new()\n\n } else {\n\n bz.insert(0, 0);\n\n }\n\n\n\n // Serialize as bytes\n\n serde_bytes::Serialize::serialize(&bz, serializer)\n\n}\n\n\n", "file_path": "utils/bigint/src/biguint_ser.rs", "rank": 57, "score": 206475.19184804644 }, { "content": "/// Serializes big int as bytes following Filecoin spec.\n\npub fn serialize<S>(int: &BigInt, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: serde::Serializer,\n\n{\n\n let (sign, mut bz) = int.to_bytes_be();\n\n\n\n // Insert sign byte at start of encoded bytes\n\n match sign {\n\n Sign::Minus => bz.insert(0, 1),\n\n Sign::Plus => bz.insert(0, 0),\n\n Sign::NoSign => bz = Vec::new(),\n\n }\n\n\n\n // Serialize as bytes\n\n serde_bytes::Serialize::serialize(&bz, serializer)\n\n}\n\n\n", "file_path": "utils/bigint/src/bigint_ser.rs", "rank": 58, "score": 206475.19184804644 }, { "content": "/// piece_commitment_v1_to_cid converts a comm_p to a CID\n\n/// -- it is just a helper function that is equivalent to\n\n/// data_commitment_v1_to_cid.\n\npub fn piece_commitment_v1_to_cid(comm_p: &Commitment) -> Cid {\n\n data_commitment_v1_to_cid(comm_p)\n\n}\n\n\n", "file_path": "utils/commcid/src/lib.rs", "rank": 59, "score": 204298.41897927076 }, { "content": "/// replica_commitment_v1_to_cid converts a raw data commitment to a CID\n\n/// by adding:\n\n/// - serialization type of raw\n\n/// - hashing type of Filecoin sealed hashing function v1 (0xfc2)\n\npub fn replica_commitment_v1_to_cid(comm_r: &Commitment) -> Cid {\n\n commitment_to_cid(comm_r, FilecoinMultihashCode::SealedV1)\n\n}\n\n\n", "file_path": "utils/commcid/src/lib.rs", "rank": 60, "score": 204297.7617790703 }, { "content": "/// data_commitment_v1_to_cid converts a raw data commitment to a CID\n\n/// by adding:\n\n/// - serialization type of raw\n\n/// - hashing type of Filecoin unsealed hashing function v1 (0xfc2)\n\npub fn data_commitment_v1_to_cid(comm_d: &Commitment) -> Cid {\n\n commitment_to_cid(comm_d, FilecoinMultihashCode::UnsealedV1)\n\n}\n\n\n", "file_path": "utils/commcid/src/lib.rs", "rank": 61, "score": 204297.7617790703 }, { "content": "/// Shared logic between the DefaultRuntime and the Interpreter.\n\n/// It invokes methods on different Actors based on the Message.\n\npub fn internal_send<BS, SYS>(\n\n runtime: &mut DefaultRuntime<'_, '_, '_, '_, '_, BS, SYS>,\n\n msg: &UnsignedMessage,\n\n _gas_cost: i64,\n\n) -> Result<Serialized, ActorError>\n\nwhere\n\n BS: BlockStore,\n\n SYS: Syscalls,\n\n{\n\n runtime.charge_gas(\n\n runtime\n\n .price_list()\n\n .on_method_invocation(msg.value(), msg.method_num()),\n\n )?;\n\n\n\n // TODO: we need to try to recover here and try to create account actor\n\n let to_actor = runtime.get_actor(msg.to())?;\n\n\n\n if msg.value() != &0u8.into() {\n\n transfer(runtime.state, &msg.from(), &msg.to(), &msg.value())\n", "file_path": "vm/interpreter/src/default_runtime.rs", "rank": 62, "score": 202387.60978042812 }, { "content": "/// Builds the transport stack that LibP2P will communicate over\n\npub fn build_transport(local_key: Keypair) -> Boxed<(PeerId, StreamMuxerBox), Error> {\n\n let transport = libp2p::tcp::TcpConfig::new().nodelay(true);\n\n let transport = libp2p::dns::DnsConfig::new(transport).unwrap();\n\n transport\n\n .upgrade(core::upgrade::Version::V1)\n\n .authenticate(secio::SecioConfig::new(local_key))\n\n .multiplex(core::upgrade::SelectUpgrade::new(\n\n yamux::Config::default(),\n\n mplex::MplexConfig::new(),\n\n ))\n\n .map(|(peer, muxer), _| (peer, core::muxing::StreamMuxerBox::new(muxer)))\n\n .timeout(Duration::from_secs(20))\n\n .map_err(|err| Error::new(ErrorKind::Other, err))\n\n .boxed()\n\n}\n\n\n", "file_path": "node/forest_libp2p/src/service.rs", "rank": 63, "score": 197825.20095978957 }, { "content": "// Tests whether a code CID represents an actor that can be an external principal: i.e. an account or multisig.\n\n// We could do something more sophisticated here: https://github.com/filecoin-project/specs-actors/issues/178\n\npub fn is_principal(code: &Cid) -> bool {\n\n CALLER_TYPES_SIGNABLE.iter().any(|c| c == code)\n\n}\n", "file_path": "vm/actor/src/builtin/codes.rs", "rank": 64, "score": 197520.43622433284 }, { "content": "/// Does some basic checks on the Message to see if the fields are valid.\n\nfn check_message(msg: &UnsignedMessage) -> Result<(), String> {\n\n if msg.gas_limit() == 0 {\n\n return Err(\"Message has no gas limit set\".to_owned());\n\n }\n\n if msg.value() == &BigUint::zero() {\n\n return Err(\"Message has no value set\".to_owned());\n\n }\n\n if msg.gas_price() == &BigUint::zero() {\n\n return Err(\"Message has no gas price set\".to_owned());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "vm/interpreter/src/vm.rs", "rank": 65, "score": 195767.22972795868 }, { "content": "fn construct_and_verify<BS: BlockStore>(rt: &mut MockRuntime<'_, BS>) {\n\n rt.expect_validate_caller_addr(&[SYSTEM_ACTOR_ADDR.clone()]);\n\n let ret = rt\n\n .call(\n\n &*REWARD_ACTOR_CODE_ID,\n\n METHOD_CONSTRUCTOR,\n\n &Serialized::default(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(Serialized::default(), ret);\n\n rt.verify();\n\n}\n", "file_path": "vm/actor/tests/reward_actor_test.rs", "rank": 66, "score": 193578.53304410382 }, { "content": "fn construct_and_verify<BS: BlockStore>(rt: &mut MockRuntime<'_, BS>) {\n\n rt.expect_validate_caller_addr(&[SYSTEM_ACTOR_ADDR.clone()]);\n\n let params = ConstructorParams {\n\n network_name: \"mock\".to_string(),\n\n };\n\n let ret = rt\n\n .call(\n\n &*INIT_ACTOR_CODE_ID,\n\n METHOD_CONSTRUCTOR,\n\n &Serialized::serialize(&params).unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(Serialized::default(), ret);\n\n rt.verify();\n\n\n\n let state_data: State = rt.get_state().unwrap();\n\n\n\n // Gets the Result(CID)\n\n let empty_map = Multimap::from_root(rt.store, &state_data.address_map)\n\n .unwrap()\n\n .root();\n\n\n\n assert_eq!(empty_map.unwrap(), state_data.address_map);\n\n assert_eq!(FIRST_NON_SINGLETON_ADDR, state_data.next_id);\n\n assert_eq!(\"mock\".to_string(), state_data.network_name);\n\n}\n\n\n", "file_path": "vm/actor/tests/init_actor_test.rs", "rank": 67, "score": 193578.53304410382 }, { "content": "fn verify_blocks<'a, I>(headers: I) -> Result<(), Error>\n\nwhere\n\n I: IntoIterator<Item = &'a BlockHeader>,\n\n{\n\n let mut headers = headers.into_iter();\n\n let first_header = headers.next().ok_or(Error::NoBlocks)?;\n\n\n\n let verify = |predicate: bool, message: &'static str| {\n\n if predicate {\n\n Ok(())\n\n } else {\n\n Err(Error::InvalidTipset(message.to_string()))\n\n }\n\n };\n\n\n\n for header in headers {\n\n verify(\n\n header.parents() == first_header.parents(),\n\n \"parent cids are not equal\",\n\n )?;\n", "file_path": "blockchain/blocks/src/tipset.rs", "rank": 68, "score": 193020.99208336676 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]\n\nstruct KeyValuePair<K>(K, Ipld);\n\n\n\nimpl<K> KeyValuePair<K> {\n\n pub fn key(&self) -> &K {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<K> KeyValuePair<K> {\n\n pub fn new(key: K, value: Ipld) -> Self {\n\n KeyValuePair(key, value)\n\n }\n\n}\n\n\n\n/// Key type to be used to isolate usage of unsafe code and allow non utf-8 bytes to be\n\n/// serialized as a string.\n\n#[derive(Eq, PartialOrd, Clone, Debug, Serialize, Deserialize)]\n\n#[serde(transparent)]\n\npub struct BytesKey(#[serde(with = \"serde_bytes\")] pub Vec<u8>);\n\n\n", "file_path": "ipld/hamt/src/lib.rs", "rank": 69, "score": 192930.5905869636 }, { "content": "fn validate_deal<BS, RT>(rt: &RT, deal: &ClientDealProposal) -> Result<(), ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n deal_proposal_is_internally_valid(rt, deal)?;\n\n\n\n if rt.curr_epoch() > deal.proposal.start_epoch {\n\n return Err(ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n \"Deal start epoch has already elapsed.\".to_owned(),\n\n ));\n\n };\n\n\n\n let (min_dur, max_dur) = deal_duration_bounds(deal.proposal.piece_size);\n\n if deal.proposal.duration() < min_dur || deal.proposal.duration() > max_dur {\n\n return Err(ActorError::new(\n\n ExitCode::ErrIllegalArgument,\n\n \"Deal duration out of bounds.\".to_owned(),\n\n ));\n", "file_path": "vm/actor/src/builtin/market/mod.rs", "rank": 70, "score": 192413.99598940602 }, { "content": "fn assert_count<V, BS>(a: &mut Amt<V, BS>, c: u64)\n\nwhere\n\n V: Clone + Serialize + DeserializeOwned + PartialEq,\n\n BS: BlockStore,\n\n{\n\n assert_eq!(a.count(), c);\n\n}\n\n\n", "file_path": "ipld/amt/tests/amt_tests.rs", "rank": 71, "score": 192093.28126464295 }, { "content": "/// Recursively traverses cache through Cid links.\n\nfn write_recursive<BS>(\n\n base: &BS,\n\n cache: &HashMap<Cid, Vec<u8>>,\n\n cid: &Cid,\n\n) -> Result<(), Box<dyn StdError>>\n\nwhere\n\n BS: BlockStore,\n\n{\n\n // Skip identity and Filecoin commitment Cids\n\n let ch = cid.hash.algorithm();\n\n if ch == Code::Identity\n\n || ch == Code::Custom(FilecoinMultihashCode::SealedV1 as u64)\n\n || ch == Code::Custom(FilecoinMultihashCode::UnsealedV1 as u64)\n\n {\n\n return Ok(());\n\n }\n\n\n\n let raw_cid_bz = cid.to_bytes();\n\n let raw_bz = cache\n\n .get(cid)\n", "file_path": "ipld/blockstore/src/buffered.rs", "rank": 72, "score": 191418.521716526 }, { "content": "fn epoch_tick_and_verify<BS: BlockStore>(rt: &mut MockRuntime<'_, BS>) {\n\n rt.expect_validate_caller_addr(&[*SYSTEM_ACTOR_ADDR]);\n\n let ret = rt\n\n .call(&*CRON_ACTOR_CODE_ID, 2, &Serialized::default())\n\n .unwrap();\n\n assert_eq!(Serialized::default(), ret);\n\n rt.verify();\n\n}\n", "file_path": "vm/actor/tests/cron_actor_test.rs", "rank": 73, "score": 191300.08383745945 }, { "content": "/// Converts a raw commitment hash to a CID\n\n/// by adding:\n\n/// - serialization type of raw\n\n/// - the given filecoin hash type\n\npub fn commitment_to_cid(commitment: &Commitment, code: FilecoinMultihashCode) -> Cid {\n\n let mh = multihash::wrap(multihash::Code::Custom(code as u64), commitment);\n\n\n\n Cid::new_v1(Codec::Raw, mh)\n\n}\n\n\n", "file_path": "utils/commcid/src/lib.rs", "rank": 74, "score": 190523.69206188663 }, { "content": "pub fn open<DB>(db: &mut DB)\n\nwhere\n\n DB: DatabaseService,\n\n{\n\n db.open().unwrap();\n\n}\n\n\n", "file_path": "node/db/tests/subtests/mod.rs", "rank": 75, "score": 190176.36685248837 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct CidMap {\n\n #[serde(rename = \"/\")]\n\n cid: String,\n\n}\n\n\n\npub mod vec {\n\n use super::*;\n\n use forest_json_utils::GoVecVisitor;\n\n use serde::ser::SerializeSeq;\n\n\n\n pub fn serialize<S>(m: &[Cid], serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut seq = serializer.serialize_seq(Some(m.len()))?;\n\n for e in m {\n\n seq.serialize_element(&CidJsonRef(e))?;\n\n }\n\n seq.end()\n\n }\n\n\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<Cid>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_any(GoVecVisitor::<Cid, CidJson>::new())\n\n }\n\n}\n", "file_path": "ipld/cid/src/json.rs", "rank": 76, "score": 186101.8487501815 }, { "content": "fn assert_get<V, BS>(a: &mut Amt<V, BS>, i: u64, v: &V)\n\nwhere\n\n V: Clone + Serialize + DeserializeOwned + PartialEq + Debug,\n\n BS: BlockStore,\n\n{\n\n assert_eq!(&a.get(i).unwrap().unwrap(), v);\n\n}\n\n\n", "file_path": "ipld/amt/tests/amt_tests.rs", "rank": 77, "score": 184276.18666913774 }, { "content": "fn construct_and_verify<BS: BlockStore>(rt: &mut MockRuntime<'_, BS>, params: &ConstructorParams) {\n\n rt.expect_validate_caller_addr(&[*SYSTEM_ACTOR_ADDR]);\n\n let ret = rt\n\n .call(\n\n &*CRON_ACTOR_CODE_ID,\n\n 1,\n\n &Serialized::serialize(&params).unwrap(),\n\n )\n\n .unwrap();\n\n assert_eq!(Serialized::default(), ret);\n\n rt.verify();\n\n}\n\n\n", "file_path": "vm/actor/tests/cron_actor_test.rs", "rank": 78, "score": 180672.32289267756 }, { "content": "/// Read file as a `Vec<u8>`\n\npub fn read_file_to_vec(path: &str) -> Result<Vec<u8>> {\n\n let mut file = File::open(path)?;\n\n let mut buffer = Vec::new();\n\n file.read_to_end(&mut buffer)?;\n\n Ok(buffer)\n\n}\n\n\n", "file_path": "node/utils/src/lib.rs", "rank": 79, "score": 179805.14812987932 }, { "content": "/// Recursively explores Ipld for links and calls a function with a reference to the Cid.\n\nfn for_each_link<F>(ipld: &Ipld, cb: &F) -> Result<(), Box<dyn StdError>>\n\nwhere\n\n F: Fn(&Cid) -> Result<(), Box<dyn StdError>>,\n\n{\n\n match ipld {\n\n Ipld::Link(c) => cb(&c)?,\n\n Ipld::List(arr) => {\n\n for item in arr {\n\n for_each_link(item, cb)?\n\n }\n\n }\n\n Ipld::Map(map) => {\n\n for v in map.values() {\n\n for_each_link(v, cb)?\n\n }\n\n }\n\n _ => (),\n\n }\n\n Ok(())\n\n}\n", "file_path": "ipld/blockstore/src/buffered.rs", "rank": 80, "score": 179692.93443034083 }, { "content": "fn roundtrip_test(gsm: GraphSyncMessage) -> Result<(), Box<dyn Error>> {\n\n // Encode to protobuf bytes\n\n let pbm = proto::Message::try_from(gsm.clone())?;\n\n let proto_bytes: Vec<u8> = pbm.write_to_bytes()?;\n\n\n\n // Decode to proto type\n\n let d_pbm = parse_from_bytes::<proto::Message>(&proto_bytes)?;\n\n assert_eq!(&d_pbm, &pbm);\n\n\n\n // Decode back to original type\n\n let d_gsm = GraphSyncMessage::try_from(d_pbm)?;\n\n assert_eq!(d_gsm, gsm);\n\n Ok(())\n\n}\n\n\n", "file_path": "ipld/graphsync/tests/proto_roundtrip.rs", "rank": 81, "score": 175760.60174541728 }, { "content": "/// Decode an RLE+ encoded bitset into its original form.\n\npub fn decode(enc: &BitVec) -> Result<BitVec, &'static str> {\n\n let mut decoded = BitVec::new();\n\n\n\n if enc.is_empty() {\n\n return Ok(decoded);\n\n }\n\n\n\n // Header\n\n if enc.len() < 3 {\n\n return Err(\"Failed to decode, bytes must be at least 3 bits long\");\n\n }\n\n\n\n // read version (expects \"00\")\n\n if *enc.get(0).unwrap() || *enc.get(1).unwrap() {\n\n return Err(\"Invalid version, expected '00'\");\n\n }\n\n\n\n // read the inital bit\n\n let mut cur = *enc.get(2).unwrap();\n\n\n", "file_path": "utils/bitfield/src/rleplus.rs", "rank": 82, "score": 173584.89570926107 }, { "content": "#[test]\n\nfn invalid_string_addresses() {\n\n struct StringAddrVec {\n\n input: &'static str,\n\n expected: Error,\n\n }\n\n let test_vectors = &[\n\n StringAddrVec {\n\n input: \"Q2gfvuyh7v2sx3patm5k23wdzmhyhtmqctasbr23y\",\n\n expected: Error::UnknownNetwork,\n\n },\n\n StringAddrVec {\n\n input: \"t4gfvuyh7v2sx3patm5k23wdzmhyhtmqctasbr23y\",\n\n expected: Error::UnknownProtocol,\n\n },\n\n StringAddrVec {\n\n input: \"t2gfvuyh7v2sx3patm5k23wdzmhyhtmqctasbr24y\",\n\n expected: Error::InvalidChecksum,\n\n },\n\n StringAddrVec {\n\n input: \"t0banananananannnnnnnnn\",\n", "file_path": "vm/address/tests/address_test.rs", "rank": 83, "score": 172444.0007045562 }, { "content": "/// Returns a tuple of UnsignedMessage and SignedMessages from their Cid\n\npub fn block_messages_from_cids<DB>(\n\n db: &DB,\n\n bls_cids: &[Cid],\n\n secp_cids: &[Cid],\n\n) -> Result<(Vec<UnsignedMessage>, Vec<SignedMessage>), Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n let bls_msgs: Vec<UnsignedMessage> = messages_from_cids(db, bls_cids)?;\n\n let secp_msgs: Vec<SignedMessage> = messages_from_cids(db, secp_cids)?;\n\n\n\n Ok((bls_msgs, secp_msgs))\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 84, "score": 172205.07058439968 }, { "content": "/// Requests the storage market actor compute the unsealed sector CID from a sector's deals.\n\nfn request_unsealed_sector_cid<BS, RT>(\n\n rt: &mut RT,\n\n sector_type: RegisteredSealProof,\n\n deal_ids: Vec<DealID>,\n\n) -> Result<Cid, ActorError>\n\nwhere\n\n BS: BlockStore,\n\n RT: Runtime<BS>,\n\n{\n\n let ret = rt.send(\n\n &*STORAGE_MARKET_ACTOR_ADDR,\n\n MarketMethod::ComputeDataCommitment as u64,\n\n &Serialized::serialize(ComputeDataCommitmentParams {\n\n sector_type,\n\n deal_ids,\n\n })?,\n\n &TokenAmount::zero(),\n\n )?;\n\n let unsealed_cid: Cid = ret.deserialize()?;\n\n Ok(unsealed_cid)\n\n}\n", "file_path": "vm/actor/src/builtin/miner/mod.rs", "rank": 85, "score": 170626.85754529256 }, { "content": "fn set_genesis<DB>(db: &DB, header: BlockHeader) -> Result<(), Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n db.write(GENESIS_KEY, header.marshal_cbor()?)?;\n\n Ok(persist_headers(db, &[header])?)\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 86, "score": 169721.21145372264 }, { "content": "fn persist_headers<DB>(db: &DB, bh: &[BlockHeader]) -> Result<(), Error>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n let mut raw_header_data = Vec::new();\n\n let mut keys = Vec::new();\n\n // loop through block to push blockheader raw data and cid into vector to be stored\n\n for header in bh {\n\n if !db.exists(header.cid().key())? {\n\n raw_header_data.push(header.marshal_cbor()?);\n\n keys.push(header.cid().key());\n\n }\n\n }\n\n\n\n Ok(db.bulk_write(&keys, &raw_header_data)?)\n\n}\n\n\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 87, "score": 169721.21145372264 }, { "content": "/// Writes a string to a specified file. Creates the desired path if it does not exist.\n\n/// Note: `path` and `filename` are appended to produce the resulting file path.\n\npub fn write_to_file(message: &[u8], path: &str, file_name: &str) -> Result<()> {\n\n // Create path if it doesn't exist\n\n create_dir_all(Path::new(path))?;\n\n let join = format!(\"{}{}\", path, file_name);\n\n let mut file = File::create(join)?;\n\n file.write_all(message)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "node/utils/src/lib.rs", "rank": 88, "score": 167851.64186202636 }, { "content": "/// Returns the weight of provided tipset\n\nfn weight<DB>(db: &DB, ts: &Tipset) -> Result<BigUint, String>\n\nwhere\n\n DB: BlockStore,\n\n{\n\n let mut tpow = BigUint::zero();\n\n let state = StateTree::new_from_root(db, ts.parent_state())?;\n\n if let Some(act) = state.get_actor(&*STORAGE_POWER_ACTOR_ADDR)? {\n\n if let Some(state) = db\n\n .get::<PowerState>(&act.state)\n\n .map_err(|e| e.to_string())?\n\n {\n\n tpow = state.total_quality_adj_power;\n\n }\n\n }\n\n let log2_p = if tpow > BigUint::zero() {\n\n BigUint::from(tpow.bits() - 1)\n\n } else {\n\n return Err(\n\n \"All power in the net is gone. You network might be disconnected, or the net is dead!\"\n\n .to_owned(),\n", "file_path": "blockchain/chain/src/store/chain_store.rs", "rank": 89, "score": 166879.65401884585 }, { "content": "#[test]\n\nfn v0_handling() {\n\n let old = \"QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n\";\n\n let cid = Cid::from_raw_cid(old).unwrap();\n\n\n\n assert_eq!(cid.version, Version::V0);\n\n assert_eq!(cid.to_string(), old);\n\n}\n\n\n", "file_path": "ipld/cid/tests/base_cid_tests.rs", "rank": 90, "score": 166655.00981974526 }, { "content": "#[test]\n\nfn empty_string() {\n\n assert_eq!(Cid::from_raw_cid(\"\"), Err(Error::InputTooShort));\n\n}\n\n\n", "file_path": "ipld/cid/tests/base_cid_tests.rs", "rank": 91, "score": 166629.68470332693 }, { "content": "#[test]\n\nfn key_len_validations() {\n\n // Short\n\n assert!(Address::new_bls(&[8; BLS_PUB_LEN - 1]).is_err());\n\n assert!(Address::new_secp256k1(&[8; SECP_PUB_LEN - 1]).is_err());\n\n\n\n // Equal\n\n assert!(Address::new_bls(&[8; BLS_PUB_LEN]).is_ok());\n\n assert!(Address::new_secp256k1(&[8; SECP_PUB_LEN]).is_ok());\n\n\n\n // Long\n\n assert!(Address::new_bls(&[8; BLS_PUB_LEN + 1]).is_err());\n\n assert!(Address::new_secp256k1(&[8; SECP_PUB_LEN + 1]).is_err());\n\n}\n\n\n", "file_path": "vm/address/tests/address_test.rs", "rank": 92, "score": 166576.04649475875 }, { "content": "#[cfg(feature = \"identity-hash\")]\n\nfn add_and_remove_keys(\n\n bit_width: u8,\n\n keys: &[&[u8]],\n\n extra_keys: &[&[u8]],\n\n expected: &'static str,\n\n) {\n\n let all: Vec<(BytesKey, u8)> = keys\n\n .iter()\n\n .enumerate()\n\n // Value doesn't matter for this test, only checking cids against previous\n\n .map(|(i, k)| (k.to_vec().into(), i as u8))\n\n .collect();\n\n\n\n let store = db::MemoryDB::default();\n\n\n\n let mut hamt: Hamt<BytesKey, _> = Hamt::new_with_bit_width(&store, bit_width);\n\n\n\n for (k, v) in all.iter() {\n\n hamt.set(k.clone(), *v).unwrap();\n\n }\n", "file_path": "ipld/hamt/tests/hamt_tests.rs", "rank": 93, "score": 166516.48437617117 }, { "content": "/// Checksum calculates the 4 byte checksum hash\n\npub fn checksum(ingest: &[u8]) -> Vec<u8> {\n\n blake2b_variable(ingest, CHECKSUM_HASH_LEN)\n\n}\n\n\n", "file_path": "vm/address/src/lib.rs", "rank": 94, "score": 165700.94804335816 }, { "content": "#[inline]\n\nfn check_empty_params(params: &Serialized) -> Result<(), EncodingError> {\n\n params.deserialize::<[u8; 0]>().map(|_| ())\n\n}\n\n\n\n/// Create a map\n", "file_path": "vm/actor/src/lib.rs", "rank": 95, "score": 163407.17328613685 }, { "content": "pub fn random_blocks(len: usize, block_size: usize) -> (Vec<Vec<u8>>, Vec<Cid>) {\n\n let blocks: Vec<_> = iter::repeat_with(|| random_bytes(block_size))\n\n .take(len)\n\n .collect();\n\n let links = blocks\n\n .iter()\n\n .map(|block| Cid::new_from_cbor(block, Blake2b256))\n\n .collect();\n\n (blocks, links)\n\n}\n\n\n", "file_path": "ipld/graphsync/src/test_utils.rs", "rank": 96, "score": 155679.14405217447 }, { "content": "/// Aggregates and verifies bls signatures collectively\n\npub fn verify_bls_aggregate(data: &[&[u8]], pub_keys: &[&[u8]], aggregate_sig: &Signature) -> bool {\n\n // If the number of public keys and data does not match, then return false\n\n if data.len() != pub_keys.len() {\n\n return false;\n\n }\n\n\n\n let sig = match BlsSignature::from_bytes(aggregate_sig.bytes()) {\n\n Ok(v) => v,\n\n Err(_) => return false,\n\n };\n\n\n\n let pk_map_results: Result<Vec<_>, _> =\n\n pub_keys.iter().map(|x| BlsPubKey::from_bytes(x)).collect();\n\n\n\n let pks = match pk_map_results {\n\n Ok(v) => v,\n\n Err(_) => return false,\n\n };\n\n\n\n let hashed_data: Vec<G2> = data.iter().map(|x| bls_hash(x)).collect();\n\n\n\n // DOes the aggregate verification\n\n verify(&sig, &hashed_data[..], &pks[..])\n\n}\n\n\n", "file_path": "crypto/src/signature.rs", "rank": 97, "score": 155628.20786177687 }, { "content": "fn empty_cid() -> Cid {\n\n Cid::new_from_cbor(&[], Identity)\n\n}\n\n\n", "file_path": "node/forest_libp2p/tests/hello_test.rs", "rank": 98, "score": 155317.58971470155 }, { "content": "#[test]\n\nfn annotating_struct_json() {\n\n #[derive(Serialize, Deserialize, Debug, PartialEq)]\n\n struct TestStruct {\n\n #[serde(with = \"json\")]\n\n cid_one: Cid,\n\n #[serde(with = \"json\")]\n\n cid_two: Cid,\n\n other: String,\n\n }\n\n let test_json = r#\"\n\n {\n\n \"cid_one\": {\n\n \"/\": \"QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n\"\n\n },\n\n \"cid_two\": {\n\n \"/\": \"bafy2bzaceaa466o2jfc4g4ggrmtf55ygigvkmxvkr5mvhy4qbwlxetbmlkqjk\"\n\n },\n\n \"other\": \"Some data\"\n\n }\n\n \"#;\n", "file_path": "ipld/cid/tests/json_tests.rs", "rank": 99, "score": 154403.91727540878 } ]
Rust
src/client-rs/src/profile/profile_file.rs
vinimin/fluvio
142c050a2f1aaa83aeda19705fedd670fffaf1a1
use std::env; use std::fs::read_to_string; use std::io::Error as IoError; use std::io::ErrorKind; use std::path::{Path, PathBuf}; use dirs::home_dir; use serde::Deserialize; use types::defaults::{CLI_CONFIG_PATH, CLI_DEFAULT_PROFILE, CLI_PROFILES_DIR}; use types::defaults::{CONFIG_FILE_EXTENTION, FLV_FLUVIO_HOME}; use types::socket_helpers::ServerAddress; use super::ProfileConfig; #[derive(Debug, PartialEq, Deserialize)] pub struct ProfileFile { pub version: String, sc: Option<TargetAddr>, spu: Option<TargetAddr>, kf: Option<TargetAddr>, } #[derive(Debug, PartialEq, Deserialize)] struct TargetAddr { pub host: String, pub port: u16, } impl Into<ServerAddress> for TargetAddr { fn into(self) -> ServerAddress { ServerAddress::new(self.host,self.port) } } impl ProfileFile { pub fn from_file<T: AsRef<Path>>(path: T) -> Result<Self, IoError> { let file_str: String = read_to_string(path)?; toml::from_str(&file_str) .map_err(|err| IoError::new(ErrorKind::InvalidData, format!("{}", err))) } } impl From<ProfileFile> for ProfileConfig { fn from(file: ProfileFile) -> ProfileConfig { Self { sc_addr: file.sc.map(|addr| addr.into()), spu_addr: file.spu.map(|addr| addr.into()), kf_addr: file.kf.map( |addr| addr.into()) } } } pub fn build_cli_profile_file_path(profile_name: Option<&String>) -> Result<PathBuf, IoError> { let base_path = match env::var(FLV_FLUVIO_HOME) { Ok(val) => { let mut user_dir = PathBuf::new(); user_dir.push(val); user_dir } Err(_) => { if let Some(mut home_dir) = home_dir() { home_dir.push(CLI_CONFIG_PATH); home_dir } else { return Err(IoError::new( ErrorKind::InvalidInput, "can't get home directory", )); } } }; let mut file_path = base_path.join(CLI_PROFILES_DIR); if profile_name.is_some() { file_path.push(profile_name.unwrap()); } else { file_path.push(CLI_DEFAULT_PROFILE); } file_path.set_extension(CONFIG_FILE_EXTENTION); Ok(file_path) } #[cfg(test)] pub mod test { use super::*; use std::path::PathBuf; #[test] fn test_default_profile_ok() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/default.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_ok()); let expected = ProfileFile { version: "1.0".to_owned(), sc: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9033, }), spu: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9034, }), kf: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9093, }), }; assert_eq!(result.unwrap(), expected); } #[test] fn test_default_profile_not_found() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/notfound.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_err()); assert_eq!( format!("{}", result.unwrap_err()), "No such file or directory (os error 2)" ); } #[test] fn test_invalid_profile_file() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/invalid.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_err()); assert!( format!("{}", result.unwrap_err()).contains( "missing field `port` for key `sc`") ); } #[test] fn test_build_default_profile_file_path() { let file_path = build_cli_profile_file_path(None); assert_eq!(file_path.is_ok(), true); let mut expected_file_path = PathBuf::new(); expected_file_path.push(home_dir().unwrap()); expected_file_path.push(".fluvio/profiles/default.toml"); assert_eq!(file_path.unwrap(), expected_file_path); } #[test] fn test_build_custom_cli_profile_file_path() { let file_path = build_cli_profile_file_path(Some(&"custom".to_owned())); assert_eq!(file_path.is_ok(), true); let mut expected_file_path = PathBuf::new(); expected_file_path.push(home_dir().unwrap()); expected_file_path.push(".fluvio/profiles/custom.toml"); assert_eq!(file_path.unwrap(), expected_file_path); } }
use std::env; use std::fs::read_to_string; use std::io::Error as IoError; use std::io::ErrorKind; use std::path::{Path, PathBuf}; use dirs::home_dir; use serde::Deserialize; use types::defaults::{CLI_CONFIG_PATH, CLI_DEFAULT_PROFILE, CLI_PROFILES_DIR}; use types::defaults::{CONFIG_FILE_EXTENTION, FLV_FLUVIO_HOME}; use types::socket_helpers::ServerAddress; use super::ProfileConfig; #[derive(Debug, PartialEq, Deserialize)] pub struct ProfileFile { pub version: String, sc: Option<TargetAddr>, spu: Option<TargetAddr>, kf: Option<TargetAddr>, } #[derive(Debug, PartialEq, Deserialize)] struct TargetAddr { pub host: String, pub port: u16, } impl Into<ServerAddress> for TargetAddr { fn into(self) -> ServerAddress { ServerAddress::new(self.host,self.port) } } impl ProfileFile { pub fn from_file<T: AsRef<Path>>(path: T) -> Result<Self, IoError> { let file_str: String = read_to_string(path)?; toml::from_str(&file_str) .map_err(|err| IoError::new(ErrorKind::InvalidData, format!("{}", err))) } } impl From<ProfileFile> for ProfileConfig { fn from(file: ProfileFile) -> ProfileConfig { Self { sc_addr: file.sc.map(|addr| addr.into()), spu_addr: file.spu.map(|addr| addr.into()), kf_addr: file.kf.map( |addr| addr.into()) } } } pub fn build_cli_profile_file_path(profile_name: Option<&String>) -> Result<PathBuf, IoError> { let base_path = match env::var(FLV_FLUVIO_HOME) { Ok(val) => { let mut user_dir = PathBuf::new(); user_dir.push(val); user_dir } Err(_) =
expected_file_path.push(home_dir().unwrap()); expected_file_path.push(".fluvio/profiles/custom.toml"); assert_eq!(file_path.unwrap(), expected_file_path); } }
> { if let Some(mut home_dir) = home_dir() { home_dir.push(CLI_CONFIG_PATH); home_dir } else { return Err(IoError::new( ErrorKind::InvalidInput, "can't get home directory", )); } } }; let mut file_path = base_path.join(CLI_PROFILES_DIR); if profile_name.is_some() { file_path.push(profile_name.unwrap()); } else { file_path.push(CLI_DEFAULT_PROFILE); } file_path.set_extension(CONFIG_FILE_EXTENTION); Ok(file_path) } #[cfg(test)] pub mod test { use super::*; use std::path::PathBuf; #[test] fn test_default_profile_ok() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/default.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_ok()); let expected = ProfileFile { version: "1.0".to_owned(), sc: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9033, }), spu: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9034, }), kf: Some(TargetAddr { host: "127.0.0.1".to_owned(), port: 9093, }), }; assert_eq!(result.unwrap(), expected); } #[test] fn test_default_profile_not_found() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/notfound.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_err()); assert_eq!( format!("{}", result.unwrap_err()), "No such file or directory (os error 2)" ); } #[test] fn test_invalid_profile_file() { let mut profile_path = PathBuf::new(); profile_path.push("./test-data/profiles/invalid.toml"); let result = ProfileFile::from_file(profile_path); assert!(result.is_err()); assert!( format!("{}", result.unwrap_err()).contains( "missing field `port` for key `sc`") ); } #[test] fn test_build_default_profile_file_path() { let file_path = build_cli_profile_file_path(None); assert_eq!(file_path.is_ok(), true); let mut expected_file_path = PathBuf::new(); expected_file_path.push(home_dir().unwrap()); expected_file_path.push(".fluvio/profiles/default.toml"); assert_eq!(file_path.unwrap(), expected_file_path); } #[test] fn test_build_custom_cli_profile_file_path() { let file_path = build_cli_profile_file_path(Some(&"custom".to_owned())); assert_eq!(file_path.is_ok(), true); let mut expected_file_path = PathBuf::new();
random
[ { "content": "// converts a host/port to SocketAddress\n\npub fn host_port_to_socket_addr(host: &str, port: u16) -> Result<SocketAddr, IoError> {\n\n let addr_string = format!(\"{}:{}\", host, port);\n\n string_to_socket_addr(&addr_string)\n\n}\n\n\n", "file_path": "src/types/src/socket_helpers.rs", "rank": 0, "score": 397873.3315491762 }, { "content": "/// Takes an unformatted string of code and converts to Rustmformatted string\n\npub fn rustify_code(input: String) -> Result<String, IoError> {\n\n // configuration\n\n let mut config = Config::default();\n\n config.set().emit_mode(EmitMode::Stdout);\n\n config.set().verbose(Verbosity::Quiet);\n\n\n\n // output buffer\n\n let mut buf: Vec<u8> = vec![];\n\n\n\n // create a session and transform string (enclowse in braket, so it gets dropped\n\n // after call, otherwise buf cannot be used)\n\n {\n\n let mut session = Session::new(config, Some(&mut buf));\n\n if let Err(err) = session.format(Input::Text(input)) {\n\n return Err(IoError::new(\n\n ErrorKind::InvalidInput,\n\n format!(\"cannot format output: {}\", err),\n\n ));\n\n }\n\n }\n", "file_path": "src/kf-protocol/kf-protocol-build/src/format_code.rs", "rank": 1, "score": 318247.22391762387 }, { "content": "/// Take Request/Reponse code, format to file and inject into directory\n\npub fn code_to_output_file(file: &mut File, code: String) -> Result<(),IoError> {\n\n file.write_all(code.as_bytes())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/output_to_file.rs", "rank": 2, "score": 300346.9383395971 }, { "content": "/// Converts about to sized (90 column) code comment\n\npub fn field_comment(about: &Option<String>) -> Option<String> {\n\n if let Some(text) = about {\n\n let data = fill(text, 92);\n\n let mut comment = String::new();\n\n for line in data.lines() {\n\n comment.push_str(&format!(\"/// {}\\n\", line));\n\n }\n\n Some(comment)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 3, "score": 259967.87588827056 }, { "content": "/// convert string to socket addr\n\npub fn string_to_socket_addr(addr_string: &str) -> Result<SocketAddr, IoError> {\n\n debug!(\"resolving host: {}\",addr_string);\n\n match addr_string.to_socket_addrs() {\n\n Err(err) => {\n\n error!(\"error resolving addr: {} {}\",addr_string,err);\n\n Err(err)\n\n },\n\n Ok(mut addrs_iter) => {\n\n match addrs_iter.next() {\n\n Some(addr) => {\n\n debug!(\"resolved: {}\",addr);\n\n Ok(addr)\n\n },\n\n None => {\n\n error!(\"error resolving addr: {}\",addr_string);\n\n Err(IoError::new(\n\n ErrorKind::InvalidInput,\n\n format!(\"host/port cannot be resolved {}\", addr_string).as_str(),\n\n ))\n\n }\n", "file_path": "src/types/src/socket_helpers.rs", "rank": 4, "score": 257200.8748323252 }, { "content": "/// Check if all header and field keys are known\n\npub fn check_header_and_field_keys_are_known(dir: &String) -> Result<(), IoError> {\n\n let file_pairs = FilePairs::new(dir)?;\n\n\n\n for file_pair in &file_pairs.pairs {\n\n check_known_header_and_field_keys(&file_pair.req_file)?;\n\n check_known_header_and_field_keys(&file_pair.res_file)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/check_keys.rs", "rank": 5, "score": 257180.91640415622 }, { "content": "/// Generate field value, if nullableVersion is set and Typpe is String or Array, make it an Option\n\npub fn field_value(field_type: &SpecFieldType, nullable_ver: &Option<NullableVersions>) -> String {\n\n if nullable_ver.is_some() {\n\n if field_type.is_string_or_array() {\n\n return format!(\"Option<{}>\", field_type.value());\n\n }\n\n }\n\n field_type.value()\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 6, "score": 250022.59236795816 }, { "content": "/// Generate field name, replace with entity_type if map_key is set\n\npub fn field_name(name: &String, map_key: &Option<bool>, entity_type: &Option<String>) -> String {\n\n let new_name = if let Some(map_key) = map_key {\n\n if *map_key && entity_type.is_some() {\n\n &entity_type.as_ref().unwrap()\n\n } else {\n\n name\n\n }\n\n } else {\n\n name\n\n };\n\n\n\n new_name.to_snake_case()\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 7, "score": 242870.6769307013 }, { "content": "/// Create and open file\n\npub fn make_file_path(dir: &str, filename: &str) -> Result<String,IoError> {\n\n // check if exists\n\n metadata(dir)?;\n\n\n\n // construct file\n\n let mut target_file = PathBuf::new();\n\n target_file.push(dir);\n\n target_file.push(make_rust_filename!(filename));\n\n\n\n Ok(target_file.to_string_lossy().to_string())\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/output_to_file.rs", "rank": 8, "score": 240817.72707418998 }, { "content": "fn decode_vec<T, M>(len: i32, item: &mut Vec<M>, src: &mut T, version: Version) -> Result<(), Error>\n\nwhere\n\n T: Buf,\n\n M: Default + Decoder,\n\n{\n\n for _ in 0..len {\n\n let mut value = <M>::default();\n\n value.decode(src, version)?;\n\n item.push(value);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl<M> Decoder for Option<Vec<M>>\n\nwhere\n\n M: Default + Decoder,\n\n{\n\n fn decode<T>(&mut self, src: &mut T, version: Version) -> Result<(), Error>\n\n where\n", "file_path": "src/kf-protocol/kf-protocol-core/src/decoder.rs", "rank": 9, "score": 240752.69964770376 }, { "content": "fn find_spu_image() -> String {\n\n std::env::var(\"SPU_IMAGE\").expect(\"SPU IMAGE must be passed as env\")\n\n}\n\n\n", "file_path": "src/sc-k8/src/operator/conversion.rs", "rank": 10, "score": 237057.88221506937 }, { "content": "// macro function to look-up all fields in Tera templates\n\nfn make_contains_field(all_fields: BTreeMap<String, Vec<String>>) -> GlobalFn {\n\n Box::new(move |args| -> Result<Value, TeraError> {\n\n // lookup name\n\n let req_name = match args.get(\"name\") {\n\n Some(some_name) => match from_value::<String>(some_name.clone()) {\n\n Ok(name) => Some(name),\n\n Err(_) => None,\n\n },\n\n None => None,\n\n };\n\n\n\n // lookup value\n\n let req_value = match args.get(\"value\") {\n\n Some(some_value) => match from_value::<String>(some_value.clone()) {\n\n Ok(value) => Some(value),\n\n Err(_) => None,\n\n },\n\n None => None,\n\n };\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/format_code.rs", "rank": 11, "score": 236875.3693498255 }, { "content": "/// Generates a random authorization token\n\npub fn generate_auth_token() -> (String, String) {\n\n const CHARSET: &[u8] = b\"abcdefghijklmnopqrstuvwxyz\\\n\n 0123456789\";\n\n\n\n const ID_SIZE: usize = 6;\n\n let token_name: String = (0..ID_SIZE)\n\n .map(|_| {\n\n let idx = thread_rng().gen_range(0, CHARSET.len());\n\n // This is safe because `idx` is in range of `CHARSET`\n\n char::from(unsafe { *CHARSET.get_unchecked(idx) })\n\n })\n\n .collect();\n\n\n\n const SECRET_SIZE: usize = 16;\n\n let token_secret: String = (0..SECRET_SIZE)\n\n .map(|_| {\n\n let idx = thread_rng().gen_range(0, CHARSET.len());\n\n // This is safe because `idx` is in range of `CHARSET`\n\n char::from(unsafe { *CHARSET.get_unchecked(idx) })\n\n })\n\n .collect();\n\n\n\n (token_name, token_secret)\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "src/utils/src/generators.rs", "rank": 12, "score": 236832.5961532609 }, { "content": "/// Loop through SpecFields and generate GUI friendly Fields & Structs\n\npub fn build_fields_and_structs(\n\n maybe_s_fields: &Option<SpecFields>,\n\n parent_structures: &mut Vec<Structure>,\n\n) -> Vec<Field> {\n\n let mut fields: Vec<Field> = vec![];\n\n let mut collect_structures: Vec<Structure> = vec![];\n\n\n\n if let Some(s_fields) = maybe_s_fields {\n\n for s_field in s_fields {\n\n // check if field and subtree should be skipped\n\n if skip_field(s_field) {\n\n continue;\n\n }\n\n\n\n // generate field\n\n fields.push(generate_field(s_field));\n\n\n\n // generate structs (if sub-fields)\n\n if s_field.fields.is_some() {\n\n let structure = generate_struct(\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 13, "score": 235716.39927638334 }, { "content": "fn decode_string<T>(len: i16, src: &mut T) -> Result<String, Error>\n\nwhere\n\n T: Buf,\n\n{\n\n let mut value = String::default();\n\n let read_size = src.take(len as usize).reader().read_to_string(&mut value)?;\n\n\n\n if read_size != len as usize {\n\n return Err(Error::new(ErrorKind::UnexpectedEof, \"not enough string\"));\n\n }\n\n Ok(value)\n\n}\n\n\n\nimpl Decoder for String {\n\n fn decode<T>(&mut self, src: &mut T, _version: Version) -> Result<(), Error>\n\n where\n\n T: Buf,\n\n {\n\n if src.remaining() < 2 {\n\n return Err(Error::new(\n", "file_path": "src/kf-protocol/kf-protocol-core/src/decoder.rs", "rank": 14, "score": 235590.22000022986 }, { "content": "/// Generate Structure\n\npub fn generate_struct<'a>(\n\n name: &String,\n\n maybe_s_fields: &Option<SpecFields>,\n\n parent_structures: &mut Vec<Structure>,\n\n) -> Structure {\n\n let name = name.clone();\n\n let fields = build_fields_and_structs(maybe_s_fields, parent_structures);\n\n Structure { name, fields }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 15, "score": 235394.2191293096 }, { "content": "/// Process server based on output type\n\npub fn format_spu_response_output<O>(\n\n out: std::sync::Arc<O>,\n\n spus: Vec<ScSpuMetadata>,\n\n output_type: OutputType,\n\n) -> Result<(), CliError>\n\n where O: Terminal\n\n{\n\n\n\n if spus.len() > 0 {\n\n out.render_list(&spus,output_type)?;\n\n } else {\n\n t_println!(out,\"no spu\");\n\n }\n\n \n\n Ok(())\n\n}\n\n\n\n// -----------------------------------\n\n// Output Handlers\n\n// -----------------------------------\n", "file_path": "src/cli/src/spu/helpers/list_output.rs", "rank": 16, "score": 233188.94947071644 }, { "content": "// converts a host/port to SocketAddress\n\npub fn server_to_socket_addr(server_addr: &ServerAddress) -> Result<SocketAddr, IoError> {\n\n host_port_to_socket_addr(&server_addr.host, server_addr.port)\n\n}\n\n\n", "file_path": "src/types/src/socket_helpers.rs", "rank": 17, "score": 229483.94315805176 }, { "content": "/// Encode Spus metadata into SPU FLV response\n\npub fn spu_store_metadata_to_spu_response(name: &str, spu: &SpuKV) -> FlvFetchSpuResponse {\n\n let public_ep = spu.public_endpoint();\n\n let private_ep = spu.private_endpoint();\n\n let flv_spu_type = match spu.spec().spu_type {\n\n SpuType::Custom => FlvSpuType::Custom,\n\n SpuType::Managed => FlvSpuType::Managed,\n\n };\n\n let flv_resolution = match spu.status().resolution {\n\n SpuResolution::Online => FlvSpuResolution::Online,\n\n SpuResolution::Offline => FlvSpuResolution::Offline,\n\n SpuResolution::Init => FlvSpuResolution::Init,\n\n };\n\n\n\n let flv_spu = FlvFetchSpu {\n\n id: *spu.id(),\n\n spu_type: flv_spu_type,\n\n public_ep: FlvEndPointMetadata {\n\n host: public_ep.host_string(),\n\n port: public_ep.port,\n\n },\n", "file_path": "src/sc-core/src/services/public_api/flv/fetch_spu_req.rs", "rank": 18, "score": 229461.41535835946 }, { "content": "/// Encode all partitions for a topic in Kf format.\n\npub fn topic_partitions_to_kf_partitions(\n\n partitions: &PartitionLocalStore,\n\n topic: &String,\n\n) -> Vec<MetadataResponsePartition> {\n\n let mut kf_partitions = vec![];\n\n\n\n for (idx, partition) in partitions.topic_partitions(topic).iter().enumerate() {\n\n kf_partitions.push(MetadataResponsePartition {\n\n error_code: KfErrorCode::None,\n\n partition_index: idx as i32,\n\n leader_id: partition.spec.leader,\n\n leader_epoch: 0,\n\n replica_nodes: partition.spec.replicas.clone(),\n\n isr_nodes: partition.status.live_replicas().clone(),\n\n offline_replicas: partition.status.offline_replicas(),\n\n })\n\n }\n\n\n\n kf_partitions\n\n}\n", "file_path": "src/sc-core/src/services/public_api/kf/metadata_req.rs", "rank": 19, "score": 227815.7229804679 }, { "content": "/// Generates a random authorization secret\n\npub fn generate_secret() -> String {\n\n const CHARSET: &[u8] = b\"abcdefghijklmnopqrstuvwxyz\\\n\n 0123456789\";\n\n\n\n const SECRET_SIZE: usize = 16;\n\n let secret: String = (0..SECRET_SIZE)\n\n .map(|_| {\n\n let idx = thread_rng().gen_range(0, CHARSET.len());\n\n // This is safe because `idx` is in range of `CHARSET`\n\n char::from(unsafe { *CHARSET.get_unchecked(idx) })\n\n })\n\n .collect();\n\n\n\n secret\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "src/utils/src/generators.rs", "rank": 20, "score": 226748.97787696577 }, { "content": "/// Given an API key, it returns max_version. None if not found\n\npub fn lookup_version(api_key: ScApiKey, versions: &ApiVersions) -> Option<i16> {\n\n for version in versions {\n\n if version.api_key == api_key as i16 {\n\n return Some(version.max_version);\n\n }\n\n }\n\n None\n\n}\n\n\n\n// -----------------------------------\n\n// ApiVersionsRequest\n\n// -----------------------------------\n\n\n\n#[derive(Decode, Encode, Default, Debug)]\n\npub struct ApiVersionsRequest {}\n\n\n\n// -----------------------------------\n\n// ApiVersionsResponse\n\n// -----------------------------------\n\n\n", "file_path": "src/api/sc-api/src/api_versions.rs", "rank": 21, "score": 223673.53548324102 }, { "content": "/// Generate a random client group key (50001 to 65535)\n\npub fn generate_group_id() -> String {\n\n format!(\"fluvio-consumer-{}\", thread_rng().gen_range(50001, 65535))\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "src/utils/src/generators.rs", "rank": 22, "score": 223565.94083783455 }, { "content": "/// Generates a random key\n\npub fn generate_random_key() -> String {\n\n const CHARSET: &[u8] = b\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\\\n\n abcdefghijklmnopqrstuvwxyz\\\n\n 0123456789)(*&^%$#@!~\";\n\n const SIZE: usize = 32;\n\n let key: String = (0..SIZE)\n\n .map(|_| {\n\n let idx = thread_rng().gen_range(0, CHARSET.len());\n\n // This is safe because `idx` is in range of `CHARSET`\n\n char::from(unsafe { *CHARSET.get_unchecked(idx) })\n\n })\n\n .collect();\n\n\n\n key\n\n}\n", "file_path": "src/utils/src/generators.rs", "rank": 23, "score": 223565.94083783455 }, { "content": "// start server\n\npub fn create_public_server(addr: SocketAddr, ctx: DefaultSharedGlobalContext) -> PublicApiServer\n\n{\n\n info!(\"starting SPU: {} at public service at: {}\", ctx.local_spu_id(),addr);\n\n\n\n KfApiServer::new(addr, ctx, PublicService::new())\n\n}\n", "file_path": "src/spu-server/src/services/public/mod.rs", "rank": 24, "score": 223044.58582033578 }, { "content": "// start server\n\npub fn create_internal_server(addr: SocketAddr, ctx: DefaultSharedGlobalContext) -> InternalApiServer\n\n {\n\n info!(\"starting SPU: {} at internal service at: {}\", ctx.local_spu_id(),addr);\n\n\n\n KfApiServer::new(addr, ctx, InternalService::new())\n\n}\n", "file_path": "src/spu-server/src/services/internal/mod.rs", "rank": 25, "score": 223044.58582033578 }, { "content": "// store varaint\n\npub fn variant_encode<T>(buf: &mut T,num: i64) -> Result<(),Error> where T:BufMut {\n\n\n\n let mut v = (num << 1) ^ (num >> 31);\n\n\n\n while (v & 0xffffff80) != 0 {\n\n let b: u8 = (( v & 0x7f) | 0x80) as u8;\n\n if buf.remaining_mut() == 0 {\n\n return Err(Error::new(ErrorKind::UnexpectedEof,\"varint encoding no more bytes left\"));\n\n }\n\n buf.put_u8(b);\n\n v >>= 7;\n\n }\n\n if buf.remaining_mut() == 0 {\n\n return Err(Error::new(ErrorKind::UnexpectedEof,\"varint encoding no more bytes left\"));\n\n }\n\n buf.put_u8(v as u8);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-core/src/varint.rs", "rank": 27, "score": 220960.34242475132 }, { "content": "/// Return separator for hex dump\n\npub fn hex_dump_separator() -> String {\n\n \"------------------------------------------------------------------------------\\n\".to_owned()\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::bytes_to_hex_dump;\n\n\n\n #[test]\n\n fn test_bytes_to_hex_dump() {\n\n let records: Vec<u8> = vec![\n\n 123, 10, 32, 32, 32, 32, 34, 112, 97, 114, 116, 105, 116, 105, 111, 110, 115, 34, 58,\n\n 32, 91, 10, 32, 32, 32, 32, 32, 32, 32, 32, 123, 10, 32, 32, 32, 32, 32, 32, 32, 32,\n\n 32, 32, 32, 32, 34, 105, 100, 34, 58, 32, 48, 44, 10, 32, 32, 32, 32, 32, 32, 32, 32,\n\n 32, 32, 32, 32, 34, 114, 101, 112, 108, 105, 99, 97, 115, 34, 58, 32, 91, 10, 32, 32,\n\n 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 53, 48, 48, 49, 44, 10, 32, 32,\n\n 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 53, 48, 48, 50, 44, 10, 32, 32,\n\n 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 53, 48, 48, 51, 10, 32, 32, 32,\n\n 32, 32, 32, 32, 32, 32, 32, 32, 32, 93, 10, 32, 32, 32, 32, 32, 32, 32, 32, 125, 10,\n", "file_path": "src/cli/src/common/hex_dump.rs", "rank": 28, "score": 217616.75563014988 }, { "content": "/// Validate SPU (Streaming Processing Unit) cli inputs and generate SpuConfig\n\npub fn get_spu_config() -> Result<SpuConfig, IoError> {\n\n let cfg = SpuOpt::from_args();\n\n\n\n // generate config from file from user-file or default (if exists)\n\n let spu_config_file = match &cfg.config_file {\n\n Some(cfg_file) => Some(SpuConfigFile::from_file(&cfg_file)?),\n\n None => SpuConfigFile::from_default_file()?,\n\n };\n\n\n\n trace!(\"spu cli: {:#?}, file: {:#?}\",cfg,spu_config_file);\n\n // send config file and cli parameters to generate final config.\n\n SpuConfig::new_from_all(cfg, spu_config_file)\n\n}\n", "file_path": "src/spu-server/src/config/cli.rs", "rank": 29, "score": 215640.39815328288 }, { "content": "/// Augment code\n\n/// * add Warning,\n\n/// * run Rust formatter (if not skipped)\n\nfn augment_code(content: &String, skip_formatter: bool) -> Result<String, IoError> {\n\n let with_warning = format!(\"{}{}\", WARNING, content);\n\n let new_content = if skip_formatter {\n\n with_warning\n\n } else {\n\n rustify_code(with_warning)?\n\n };\n\n\n\n Ok(new_content)\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/generate_code.rs", "rank": 30, "score": 212036.8542893997 }, { "content": "/// Reads file and removes comments for clean json\n\nfn file_to_clean_json(file_name: &PathBuf) -> String {\n\n // Access file\n\n let f = match File::open(file_name) {\n\n Ok(f) => f,\n\n Err(err) => {\n\n eprintln!(\"Error: {}\", err);\n\n process::exit(1);\n\n }\n\n };\n\n let file = BufReader::new(&f);\n\n\n\n // strip comments & collect everything else\n\n let mut result = String::new();\n\n for line in file.lines() {\n\n if let Ok(text) = line {\n\n let raw = text.trim_start();\n\n if raw.len() >= 2 && &raw[..2] == \"//\" {\n\n continue;\n\n }\n\n result.push_str(&text);\n\n }\n\n }\n\n\n\n result\n\n}\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_to_json.rs", "rank": 31, "score": 211514.5628434922 }, { "content": "// zigzag decoding\n\npub fn varint_decode<T>(buf: &mut T) -> Result<(i64,usize),Error> where T:Buf {\n\n\n\n let mut num: i64 = 0;\n\n let mut shift: usize = 0;\n\n\n\n loop {\n\n if buf.remaining() == 0 {\n\n return Err(Error::new(ErrorKind::UnexpectedEof,\"varint decoding no more bytes left\"));\n\n }\n\n\n\n let b = buf.get_u8();\n\n trace!(\"var byte: {:#X}\",b);\n\n\n\n num |= ((b & 0x7f) as i64) << shift;\n\n shift += 7;\n\n\n\n if b & 0x80 == 0 {\n\n break;\n\n }\n\n \n\n }\n\n\n\n Ok(( (num >> 1) ^ - (num & 1),shift/7))\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-core/src/varint.rs", "rank": 32, "score": 209876.98287667532 }, { "content": "/// Takes a filename and returns the message type without Request/Response\n\nfn message_from_filename(filename: &String) -> String {\n\n if let Some(req_idx) = filename.find(\"Request\") {\n\n filename[0..req_idx].to_string()\n\n } else if let Some(res_idx) = filename.find(\"Response\") {\n\n filename[0..res_idx].to_string()\n\n } else {\n\n filename.clone()\n\n }\n\n}\n\n\n\n// -----------------------------------\n\n// Test Cases\n\n// -----------------------------------\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::path::Path;\n\n\n\n #[test]\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_pairs.rs", "rank": 33, "score": 209553.57200864662 }, { "content": "/// find status matching it,\n\nfn find_status(status: &mut Vec<ReplicaStatus>, spu: SpuId) -> Option<&'_ mut ReplicaStatus>{\n\n status.iter_mut().find(|status| status.spu == spu)\n\n}\n\n\n\n\n\n\n\n// -----------------------------------\n\n// Encode - from KV Partition Status\n\n// -----------------------------------\n\n\n\nimpl From<K8PartitionStatus> for PartitionStatus {\n\n fn from(kv_status: K8PartitionStatus) -> Self {\n\n Self {\n\n resolution: kv_status.resolution.into(),\n\n leader: kv_status.leader.into(),\n\n replicas: kv_status.replicas.into_iter().map(|lrs| lrs.into()).collect(),\n\n lsr: kv_status.lsr\n\n }\n\n }\n\n}\n", "file_path": "src/metadata/src/partition/status.rs", "rank": 34, "score": 206954.46462447138 }, { "content": "pub fn run_cli() -> Result<String, CliError> {\n\n\n\n run_block_on(async move{\n\n\n\n let terminal = Arc::new(PrintTerminal::new());\n\n \n\n match Root::from_args() {\n\n Root::Consume(consume) => process_consume_log(terminal.clone(),consume).await,\n\n Root::Produce(produce) => process_produce_record(terminal.clone(),produce).await,\n\n Root::SPU(spu) => process_spu(terminal.clone(),spu).await,\n\n Root::SPUGroup(spu_group) => process_spu_group(terminal.clone(),spu_group).await,\n\n Root::CustomSPU(custom_spu) => process_custom_spu(terminal.clone(),custom_spu).await,\n\n Root::Topic(topic) => process_topic(terminal.clone(),topic).await,\n\n Root::Advanced(advanced) => process_advanced(terminal.clone(),advanced).await,\n\n }\n\n })\n\n}\n\n\n\n\n\nuse crate::Terminal;\n\n\n\n\n", "file_path": "src/cli/src/root_cli.rs", "rank": 35, "score": 205059.23726217396 }, { "content": "pub fn flv_response_to_spu_metadata(flv_spus: Vec<FlvFetchSpuResponse>) -> Vec<ScSpuMetadata> {\n\n let mut sc_spus: Vec<ScSpuMetadata> = vec![];\n\n for flv_spu in flv_spus {\n\n sc_spus.push(ScSpuMetadata::new(flv_spu));\n\n }\n\n sc_spus\n\n}\n\n\n\n\n\nimpl ScSpuMetadata {\n\n pub fn new(fetch_spu_resp: FlvFetchSpuResponse) -> Self {\n\n\n\n let (f_spu,f_error_code,f_name) = (fetch_spu_resp.spu,fetch_spu_resp.error_code,fetch_spu_resp.name);\n\n // if spu is present, convert it\n\n let spu = if let Some(fetched_spu) = f_spu {\n\n Some(Spu::new(f_name.clone(), fetched_spu))\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "src/cli/src/spu/helpers/list_metadata.rs", "rank": 36, "score": 204159.3247180337 }, { "content": "/// Lookup group coordinator\n\npub fn kf_consume_log_from_topic(\n\n server_addr: SocketAddr,\n\n cfg: ConsumeLogConfig,\n\n response_paramss: ResponseLogParams,\n\n) -> Result<(), CliError> {\n\n run_block_on(process_consume_log_from_topic_all(\n\n server_addr,\n\n cfg,\n\n response_paramss,\n\n ))\n\n}\n\n\n\n/// Processing engine to consume logs one topic & and multiple partitions.\n\n/// Step 1: Collection system information\n\n/// * Lookup API versions,\n\n/// * Request metadata\n\n/// * Fetch group coordinator\n\n/// Step 2: Create loop for Group keep-alives\n\n/// Step 3: Create loop for continuous log fetch\n\nasync fn process_consume_log_from_topic_all(\n", "file_path": "src/cli/src/consume/kf/kf_fetch_topic_all.rs", "rank": 37, "score": 199881.27817352995 }, { "content": "/// Run SPU Cli and return SPU configuration. Errors are consider fatal\n\n/// and the program exits.\n\npub fn process_spu_cli_or_exit() -> SpuConfig {\n\n match get_spu_config() {\n\n Err(err) => {\n\n print_cli_err!(err);\n\n process::exit(0x0100);\n\n }\n\n Ok(config) => config,\n\n }\n\n}\n\n\n", "file_path": "src/spu-server/src/config/cli.rs", "rank": 38, "score": 197448.4044877436 }, { "content": "pub fn main_loop() {\n\n // parse configuration (program exits on error)\n\n let spu_config = process_spu_cli_or_exit();\n\n\n\n println!(\n\n \"starting {}-spu services (id:{})\",\n\n spu_config.type_label(),\n\n spu_config.id\n\n );\n\n\n\n debug!(\"spu config: {:#?}\", spu_config);\n\n\n\n main(async {\n\n let (_ctx, internal_server, public_server) = create_services(spu_config, true, true);\n\n\n\n let _public_shutdown = internal_server.unwrap().run();\n\n let _private_shutdown = public_server.unwrap().run();\n\n\n\n println!(\"SPU Version: {} started successfully\", VERSION);\n\n });\n\n}\n\n\n", "file_path": "src/spu-server/src/start.rs", "rank": 39, "score": 196932.21798207515 }, { "content": "pub fn start_main() {\n\n flv_util::init_logger();\n\n main_loop();\n\n}\n", "file_path": "src/spu-server/src/lib.rs", "rank": 40, "score": 196932.21798207515 }, { "content": "/// create server and spin up services, but don't run server\n\npub fn create_services(\n\n local_spu: SpuConfig,\n\n internal: bool,\n\n public: bool,\n\n) -> (\n\n DefaultSharedGlobalContext,\n\n Option<InternalApiServer>,\n\n Option<PublicApiServer>,\n\n) {\n\n let ctx = FileReplicaContext::new_shared_context(local_spu);\n\n\n\n let public_ep_addr = ctx.config().public_socket_addr().clone();\n\n let private_ep_addr = ctx.config().private_socket_addr().clone();\n\n\n\n let public_server = if public {\n\n Some(create_public_server(public_ep_addr, ctx.clone()))\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "src/spu-server/src/start.rs", "rank": 41, "score": 196932.21798207515 }, { "content": "/// Takes a u8 array of bytes and converts to hex dump\n\npub fn bytes_to_hex_dump(record: &Vec<u8>) -> String {\n\n let cols = 16;\n\n let record_cnt = record.len();\n\n let mut result = String::new();\n\n let mut collector = String::new();\n\n\n\n for row_idx in 0..record_cnt {\n\n // colunn index\n\n if row_idx % cols == 0 {\n\n result.push_str(&format!(\"{:08x}\", row_idx));\n\n }\n\n\n\n // spacing half way\n\n if row_idx % (cols / 2) == 0 {\n\n result.push_str(\" \");\n\n }\n\n\n\n // convert and add character to collector\n\n collector.push_str(&byte_to_string(&record[row_idx]));\n\n\n", "file_path": "src/cli/src/common/hex_dump.rs", "rank": 42, "score": 195723.92126636943 }, { "content": "/// Convert string to json\n\npub fn file_to_json(file_name: &PathBuf) -> JsonResult<Value> {\n\n let v = serde_json::from_str(&file_to_clean_json(&file_name))?;\n\n Ok(v)\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_to_json.rs", "rank": 43, "score": 195711.8210891087 }, { "content": "pub fn generate_service(spg: &SpuGroupSpec,name: &str) -> ServiceSpec {\n\n\n\n let spg_template = &spg.template.spec;\n\n let mut public_port = ServicePort {\n\n port: spg_template.public_endpoint.as_ref().map(|t|t.port).unwrap_or(SPU_PUBLIC_PORT),\n\n ..Default::default()\n\n };\n\n\n\n public_port.name = Some(\"public\".to_owned());\n\n let mut private_port = ServicePort {\n\n port: spg_template.private_endpoint.as_ref().map(|t|t.port).unwrap_or(SPU_PRIVATE_PORT),\n\n ..Default::default()\n\n };\n\n private_port.name = Some(\"private\".to_owned());\n\n\n\n\n\n let mut selector = HashMap::new();\n\n selector.insert(\"app\".to_owned(), SPU_DEFAULT_NAME.to_owned());\n\n selector.insert(\"group\".to_owned(),name.to_owned());\n\n\n\n ServiceSpec {\n\n cluster_ip: \"None\".to_owned(),\n\n ports: vec![public_port, private_port],\n\n selector: Some(selector),\n\n ..Default::default()\n\n }\n\n \n\n}", "file_path": "src/sc-k8/src/operator/conversion.rs", "rank": 44, "score": 195267.92295806808 }, { "content": "pub fn main_k8_loop() {\n\n println!(\"starting sc server with k8\");\n\n\n\n // parse configuration (program exits on error)\n\n let (sc_config, k8_config) = parse_cli_or_exit();\n\n\n\n println!(\"starting sc server with k8: {}\", VERSION);\n\n\n\n main(async move {\n\n // init k8 service\n\n let k8_client = new_shared(k8_config).expect(\"problem creating k8 client\");\n\n let namespace = sc_config.namespace.clone();\n\n let (ws_service, metadata) = start_main_loop(sc_config, k8_client).await;\n\n run_k8_operators(ws_service.clone(), namespace.clone(), metadata.owned_spus());\n\n\n\n println!(\"Streaming Controller started successfully\");\n\n });\n\n}\n", "file_path": "src/sc-k8/src/init.rs", "rank": 45, "score": 193347.4691185264 }, { "content": "/// create batches with produce and records count\n\npub fn create_batch_with_producer(producer: i64,records: u16) -> DefaultBatch {\n\n let mut batches = DefaultBatch::default();\n\n let header = batches.get_mut_header();\n\n header.magic = 2;\n\n header.producer_id = producer;\n\n header.producer_epoch = -1;\n\n\n\n for _ in 0..records {\n\n let mut record = DefaultRecord::default();\n\n let bytes: Vec<u8> = vec![10, 20];\n\n record.value = Some(bytes).into();\n\n batches.add_record(record);\n\n }\n\n \n\n batches\n\n}\n\n\n\n\n", "file_path": "src/storage/src/fixture.rs", "rank": 46, "score": 192502.2267425222 }, { "content": "pub fn create_partition_name(topic_name: &str, idx: &i32) -> String {\n\n format!(\"{}-{}\", topic_name.clone(), idx)\n\n}\n", "file_path": "src/types/src/partition.rs", "rank": 47, "score": 192028.09427093988 }, { "content": "/// Generate code and to individual files in directory\n\npub fn gen_code_and_output_to_dir(\n\n input_dir: &String,\n\n dir: &String,\n\n template: &TemplateFormat,\n\n skip_formatter: bool,\n\n) -> Result<(), IoError> {\n\n // ensure output directory exists\n\n if let Err(err) = metadata(dir) {\n\n return Err(IoError::new(\n\n ErrorKind::InvalidData,\n\n format!(\"{} - {}\", dir, err),\n\n ));\n\n }\n\n\n\n // each generate goes into its own file\n\n let file_pairs = FilePairs::new(input_dir)?;\n\n for file_pair in &file_pairs.pairs {\n\n match generate_code_from_files(file_pair, template) {\n\n Ok(content) => {\n\n let code = augment_code(&content, skip_formatter)?;\n", "file_path": "src/kf-protocol/kf-protocol-build/src/generate_code.rs", "rank": 48, "score": 191500.10603101418 }, { "content": "/// convert SpuGroup to Statefulset\n\npub fn convert_cluster_to_statefulset(\n\n group_spec: &SpuGroupSpec,\n\n metadata: &ObjectMeta,\n\n group_name: &str,\n\n group_svc_name: String,\n\n namespace: &str) \n\n -> InputK8Obj<StatefulSetSpec>\n\n{\n\n\n\n let statefulset_name = format!(\"flv-spg-{}\",group_name);\n\n let spec = generate_stateful(group_spec, group_name,group_svc_name,namespace);\n\n let owner_ref = metadata.make_owner_reference::<SpuGroupSpec>();\n\n \n\n InputK8Obj {\n\n api_version: StatefulSetSpec::api_version(),\n\n kind: StatefulSetSpec::kind(),\n\n metadata: InputObjectMeta {\n\n name: statefulset_name.clone(),\n\n namespace: metadata.namespace().to_string(),\n\n owner_references: vec![owner_ref],\n\n ..Default::default() \n\n },\n\n spec,\n\n ..Default::default()\n\n }\n\n\n\n}\n\n\n", "file_path": "src/sc-k8/src/operator/conversion.rs", "rank": 49, "score": 190350.44781206324 }, { "content": "pub fn run_k8_operators(\n\n k8_ws: K8WSUpdateService<K8Client>,\n\n namespace: String,\n\n spu_store: SharedSpuLocalStore,\n\n) {\n\n SpgOperator::new(k8_ws.own_client(), namespace.clone(), spu_store.clone()).run();\n\n SvcOperator::new(k8_ws, namespace, spu_store).run();\n\n}\n", "file_path": "src/sc-k8/src/operator/mod.rs", "rank": 50, "score": 190344.8426644686 }, { "content": "// Generate a file pointer form a filename\n\npub fn file_from_name<P>(filename: &P) -> Result<File, IoError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n match open_file(filename) {\n\n Ok(file) => Ok(file),\n\n Err(err) => Err(IoError::new(\n\n ErrorKind::InvalidData,\n\n format!(\"{} - {}\", filename.as_ref().display(), err),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/output_to_file.rs", "rank": 51, "score": 188509.20728313722 }, { "content": "/// return SC configuration or exist program.\n\npub fn parse_cli_or_exit() -> (ScConfig, K8Config) {\n\n match get_sc_config() {\n\n Err(err) => {\n\n print_cli_err!(err);\n\n process::exit(0x0100);\n\n }\n\n Ok(config) => config,\n\n }\n\n}\n\n\n\n// ---------------------------------------\n\n// Unit Tests\n\n// ---------------------------------------\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use std::net::{IpAddr, Ipv4Addr};\n\n use std::net::SocketAddr;\n\n use types::socket_helpers::EndPoint;\n\n use flv_sc_core::config::ScConfig;\n", "file_path": "src/sc-k8/src/cli.rs", "rank": 52, "score": 187556.08195952626 }, { "content": "// generates parts of the impl\n\n// \n\n// impl Default for TestRequest {\n\n// fn default() -> Self {\n\n// \n\n// Self {\n\n// field: 10,\n\n// field2: 20,\n\n//\n\n//\n\n// }\n\n//\n\nfn impl_default_impl(input: &DeriveInput, data: &DataStruct,name: &Ident) -> TokenStream {\n\n\n\n\n\n let generics = &input.generics;\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n \n\n match data.fields {\n\n Fields::Named(ref fields) => {\n\n let recurse = fields.named.iter().map(|f| {\n\n let fname = &f.ident;\n\n \n\n \n\n if let Some(default_attr) = find_attr(&f.attrs,\"fluvio_kf\") {\n\n\n\n if let Some(expr_str) = find_string_name_value(&default_attr, \"default\") {\n\n \n\n \n\n \n\n use std::str::FromStr;\n\n use syn::spanned::Spanned;\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/de.rs", "rank": 53, "score": 187268.24177955085 }, { "content": "/// Create and open file\n\npub fn open_file<P>(file_path: P) -> Result<File,IoError>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n Ok(OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .truncate(true)\n\n .open(file_path)?)\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/output_to_file.rs", "rank": 54, "score": 186561.95785818584 }, { "content": "// start server\n\npub fn create_internal_server(\n\n local_stores: ShareLocalStores,\n\n conn_mgr: SharedConnManager,\n\n conn_status_sender: Sender<SpuConnectionStatusChange>,\n\n lrs_sender: Sender<UpdateLrsRequest>,\n\n) -> InternalApiServer\n\n{\n\n let addr = local_stores.config().private_endpoint.addr.clone();\n\n let ctx = InternalContext::new(\n\n local_stores,\n\n conn_mgr,\n\n conn_status_sender,\n\n lrs_sender\n\n );\n\n info!(\"SC: starting internal services at: {}\", addr);\n\n\n\n KfApiServer::new(addr, Arc::new(ctx), ScInternalService::new())\n\n}\n", "file_path": "src/sc-core/src/services/private_api/mod.rs", "rank": 55, "score": 184724.44891839626 }, { "content": "///\n\n/// Generate replica map for a specific topic\n\n///\n\npub fn generate_replica_map_for_topic(\n\n spus: &SpuLocalStore,\n\n param: &TopicReplicaParam,\n\n from_index: Option<i32>,\n\n) -> ReplicaMap {\n\n \n\n let in_rack_count = spus.spus_in_rack_count();\n\n let start_index = from_index.unwrap_or(-1);\n\n\n\n // generate partition map (with our without rack assignment)\n\n if param.ignore_rack_assignment || in_rack_count == 0 {\n\n generate_partitions_without_rack(&spus,&param, start_index)\n\n } else {\n\n generate_partitions_with_rack_assignment(&spus, &param,start_index)\n\n }\n\n}\n\n\n", "file_path": "src/sc-core/src/core/topics/metadata.rs", "rank": 56, "score": 184724.44891839626 }, { "content": "// returns a tuple (topic_name, idx)\n\npub fn decompose_partition_name(partition_name: &str) -> Result<(String, i32), PartitionError> {\n\n let dash_pos = partition_name.rfind('-');\n\n if dash_pos.is_none() {\n\n return Err(PartitionError::InvalidSyntax(partition_name.to_owned()));\n\n }\n\n\n\n let pos = dash_pos.unwrap();\n\n if (pos + 1) >= partition_name.len() {\n\n return Err(PartitionError::InvalidSyntax(partition_name.to_owned()));\n\n }\n\n\n\n let topic_name = &partition_name[..pos];\n\n let idx_string = &partition_name[(pos + 1)..];\n\n let idx = match idx_string.parse::<i32>() {\n\n Ok(n) => n,\n\n Err(_) => {\n\n return Err(PartitionError::InvalidSyntax(partition_name.to_owned()));\n\n }\n\n };\n\n\n\n Ok((topic_name.to_string(), idx))\n\n}\n\n\n", "file_path": "src/types/src/partition.rs", "rank": 57, "score": 183872.22337112116 }, { "content": "#[proc_macro_derive(KfDefault, attributes(fluvio_kf))]\n\npub fn kf_default(input: TokenStream) -> TokenStream {\n\n let ast: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let expanded = generate_default_traits(&ast);\n\n expanded.into()\n\n}", "file_path": "src/kf-protocol/kf-protocol-derive/src/lib.rs", "rank": 58, "score": 183390.99679300227 }, { "content": "#[proc_macro_derive(Decode, attributes(varint, fluvio_kf))]\n\npub fn kf_decode(input: TokenStream) -> TokenStream {\n\n let ast: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let expanded = generate_decode_traits(&ast);\n\n expanded.into()\n\n}\n\n\n\n\n\n/// Custom derive for encoding structure or enum to bytes using Kafka protocol format.\n\n/// This assumes all fields(or enum variants) implement kafka encode traits.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use kf_protocol::Encoder;\n\n/// use kf_protocol::derive::Encode;\n\n///\n\n/// #[derive(Encode)]\n\n/// pub struct SimpleRecord {\n\n/// val: u8\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/lib.rs", "rank": 59, "score": 183390.1105470755 }, { "content": "#[proc_macro_derive(Encode, attributes(varint, fluvio_kf))]\n\npub fn kf_encode(input: TokenStream) -> TokenStream {\n\n let ast: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let expanded = generate_encode_traits(&ast);\n\n expanded.into()\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/lib.rs", "rank": 60, "score": 183390.1105470755 }, { "content": "#[proc_macro_derive(RequestApi, attributes(varint, fluvio_kf))]\n\npub fn kf_request(input: TokenStream) -> TokenStream {\n\n let ast: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let expanded = generate_request_traits(&ast);\n\n expanded.into()\n\n}\n\n\n\n\n\n\n\n/// Custom derive for generating default structure\n\n/// \n\n///\n\n/// Example:\n\n///\n\n/// ```\n\n/// #[derive(KfDefault)]\n\n/// #[fluvio_kf(default)]\n\n/// pub struct SimpleRecord {\n\n/// #[fluvio_kf(default = \"-1\" )]\n\n/// val: u8\n\n/// }\n\n/// \n\n/// fn main() {\n\n///\n\n/// let record = SimpleRecord::default;\n\n/// assert_eq!(record.val,-1);\n\n/// }\n\n/// ```\n\n///\n\n/// `default` assignment can be any Rust expression.\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/lib.rs", "rank": 61, "score": 183390.06754255889 }, { "content": "#[proc_macro]\n\npub fn kf_api(input: TokenStream) -> TokenStream {\n\n let ast: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let expanded = parse_and_generate_api(&ast);\n\n expanded.into()\n\n}\n\n\n\n/// Custom derive for implementating Request trait.\n\n/// This derives requires `fluvio_kf` \n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use kf_protocol::derive::Decode;\n\n/// use kf_protocol::derive::Encode;\n\n/// use kf_protocol::api::Request;\n\n/// use kf_protocol::derive::RequestApi;\n\n///\n\n/// #[fluvio_kf(default,api_min_version = 5, api_max_version = 6, api_key = 10, response = \"SimpleResponse\")]\n\n/// #[derive(Request,Encode,Decode,Default)]\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/lib.rs", "rank": 62, "score": 183384.6598441858 }, { "content": "/// Format SPU Group based on output type\n\npub fn spu_group_response_to_output<O: Terminal>(\n\n out: std::sync::Arc<O>,\n\n spu_groups: FlvFetchSpuGroupsResponse,\n\n output_type: OutputType,\n\n) -> Result<(), CliError> {\n\n\n\n let groups = spu_groups.spu_groups;\n\n\n\n // TODO: display error output\n\n\n\n let list_spu_groups: Vec<SpuGroupRow> = groups\n\n .into_iter()\n\n .map(|g| {\n\n let (name,spec,status) = g.into();\n\n SpuGroupRow {\n\n name,\n\n spec,\n\n status\n\n }\n\n }).collect();\n", "file_path": "src/cli/src/spu/group/helpers/list_output.rs", "rank": 63, "score": 183327.26973307552 }, { "content": "/// validate streaming controller cli inputs and generate ScConfig\n\npub fn get_sc_config() -> Result<(ScConfig, K8Config), ScK8Error> {\n\n sc_opt_to_sc_config(ScOpt::from_args())\n\n}\n\n\n", "file_path": "src/sc-k8/src/cli.rs", "rank": 64, "score": 183319.4165206395 }, { "content": "pub trait KfApiKey: Sized + Encoder + Decoder + TryFrom<u16> {\n\n \n\n}\n\n\n\n\n\n\n\n\n\n#[derive(Debug, Encode, Decode, Default)]\n\npub struct RequestHeader {\n\n api_key: u16,\n\n api_version: i16,\n\n correlation_id: i32,\n\n client_id: String,\n\n}\n\n\n\nimpl fmt::Display for RequestHeader {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f,\"api: {} client: {}\",self.api_key,self.client_id)\n\n }\n\n}\n", "file_path": "src/kf-protocol/kf-protocol-api/src/api.rs", "rank": 65, "score": 182299.4299103096 }, { "content": "/// If Request file, looks-up the index of the Response file (or the reverese)\n\n/// Returns the index of the file, or -1\n\nfn second_file_index(first_file: String, files: &Vec<PathBuf>) -> Option<(usize, bool)> {\n\n let found_request: bool;\n\n\n\n // request or reponse\n\n let file_len = first_file.len();\n\n let request_len = first_file.find(\"Request\").unwrap_or(file_len);\n\n let response_len = first_file.find(\"Response\").unwrap_or(file_len);\n\n\n\n // generate second file\n\n let second_file = if file_len != request_len {\n\n found_request = false;\n\n\n\n if request_len == 0 {\n\n // Some files begin with Response\n\n let mut second_file = \"Response\".to_owned();\n\n second_file.push_str(&first_file[\"Request\".len()..].to_owned());\n\n second_file\n\n } else {\n\n let mut second_file = first_file[..request_len].to_owned();\n\n second_file.push_str(\"Response\");\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_pairs.rs", "rank": 66, "score": 182231.31699135428 }, { "content": "// Generate a file pointer form directory and filename\n\npub fn make_file_from_dir(dir: &str, filename: &str) -> Result<File, IoError> {\n\n match make_file_path(dir, &filename) {\n\n Ok(file_path) => match open_file(&file_path) {\n\n Ok(file) => Ok(file),\n\n Err(err) => Err(IoError::new(\n\n ErrorKind::InvalidData,\n\n format!(\"{} - {}\", filename, err),\n\n )),\n\n },\n\n Err(err) => Err(IoError::new(\n\n ErrorKind::InvalidData,\n\n format!(\"{} - {}\", filename, err),\n\n )),\n\n }\n\n}\n", "file_path": "src/kf-protocol/kf-protocol-build/src/output_to_file.rs", "rank": 67, "score": 181631.08306830435 }, { "content": "pub fn variant_size(num: i64) -> usize {\n\n\n\n let mut v = (num << 1) ^ (num >> 31);\n\n let mut bytes = 1;\n\n\n\n while (v & 0xffffff80) != 0 {\n\n bytes += 1;\n\n v >>= 7;\n\n } \n\n\n\n bytes\n\n}\n\n\n\n\n\n\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n", "file_path": "src/kf-protocol/kf-protocol-core/src/varint.rs", "rank": 68, "score": 181626.02831118603 }, { "content": "/// Decode 'String' at key or error\n\nfn get_string(val: &Value, key: &str) -> Result<String, Error> {\n\n match get_key(&val, key)?.as_str() {\n\n Some(v) => Ok(v.to_string()),\n\n None => Err(Error::new(\n\n ErrorKind::InvalidData,\n\n format!(\"key '{}', not string\", key),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/json_to_msg.rs", "rank": 69, "score": 181599.60189972457 }, { "content": "/// create sample batches with message\n\nfn create_batches(records: u16) -> DefaultBatch {\n\n let mut batches = DefaultBatch::default();\n\n let header = batches.get_mut_header();\n\n header.magic = 2;\n\n header.producer_id = 20;\n\n header.producer_epoch = -1;\n\n\n\n for i in 0..records {\n\n let msg = format!(\"record {}\", i);\n\n let record: DefaultRecord = msg.into();\n\n batches.add_record(record);\n\n }\n\n batches\n\n}\n\n\n\nasync fn setup_batch_file() -> Result<(), IoError> {\n\n let test_file_path = temp_dir().join(\"batch_fetch\");\n\n ensure_clean_file(&test_file_path);\n\n debug!(\"creating test file: {:#?}\", test_file_path);\n\n let mut file = file_util::create(&test_file_path).await?;\n", "file_path": "src/kf-socket/tests/file_fetch.rs", "rank": 70, "score": 180798.51919696864 }, { "content": "/// essential core controllers\n\npub fn create_core_services<W, D>(\n\n local_stores: ShareLocalStores,\n\n ws_service: W,\n\n ws_dispatcher: &mut D,\n\n) -> (ShareLocalStores, InternalApiServer)\n\nwhere\n\n W: WSUpdateService + Clone + Sync + Send + 'static,\n\n D: WSChangeDispatcher,\n\n{\n\n // connect conn manager and controllers\n\n let conn_manager = ConnManager::new_with_local_stores(local_stores.clone());\n\n let spu_lc_channel = ws_dispatcher.create_spu_channel();\n\n let topic_spu_channel = ws_dispatcher.create_spu_channel();\n\n let topic_topic_channel = ws_dispatcher.create_topic_channel();\n\n let partition_channel = ws_dispatcher.create_partition_channel();\n\n let partition_spu_channel = ws_dispatcher.create_spu_channel();\n\n\n\n let shared_conn_manager = Arc::new(conn_manager);\n\n\n\n // start controller\n", "file_path": "src/sc-core/src/init.rs", "rank": 71, "score": 180500.23611546343 }, { "content": "fn addr_client_config(addr: ServerAddress) -> CliClientConfig {\n\n ClientConfig::new(addr.to_string()).client_id(CLIENT_ID)\n\n}\n\n\n\npub enum ReplicaLeaderTarget {\n\n Spu(SpuLeader),\n\n Kf(KfLeader),\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ReplicaLeaderConfig {\n\n Sc(ServerAddress),\n\n Spu(ServerAddress),\n\n Kf(ServerAddress),\n\n}\n\n\n\nimpl ReplicaLeaderConfig {\n\n pub fn new(\n\n sc_host_port: Option<String>,\n\n spu_host_port: Option<String>,\n", "file_path": "src/client-rs/src/profile/config.rs", "rank": 72, "score": 179092.38019147227 }, { "content": "#[test]\n\nfn test_decode_version() {\n\n\n\n // version 0 record\n\n let data = [0x08];\n\n let record = TestRecord::decode_from(&mut Cursor::new(&data),0).expect(\"decode\");\n\n assert_eq!(record.value,8);\n\n assert_eq!(record.value2,0); // default\n\n\n\n let data = [0x08];\n\n assert!(TestRecord::decode_from(&mut Cursor::new(&data),1).is_err(),\"version 1 needs 3 bytes\"); \n\n\n\n let data = [0x08,0x01,0x05];\n\n let record = TestRecord::decode_from(&mut Cursor::new(&data),1).expect(\"decode\");\n\n assert_eq!(record.value,8);\n\n assert_eq!(record.value2,1); \n\n assert_eq!(record.value3,5); \n\n\n\n let data = [0x08,0x01,0x05];\n\n let record = TestRecord::decode_from(&mut Cursor::new(&data),3).expect(\"decode\");\n\n assert_eq!(record.value,8);\n\n assert_eq!(record.value2,0); \n\n assert_eq!(record.value3,1); // default, didn't consume\n\n\n\n}\n", "file_path": "src/kf-protocol/tests/version.rs", "rank": 73, "score": 179010.22445788022 }, { "content": "#[test]\n\nfn test_encode_version() {\n\n\n\n flv_util::init_logger();\n\n let mut record = TestRecord::default();\n\n record.value2 = 10;\n\n record.value3 = 5;\n\n\n\n // version 0 should only encode value\n\n let mut dest = vec![];\n\n record.encode(&mut dest,0).expect(\"encode\");\n\n assert_eq!(dest.len(),1);\n\n assert_eq!(record.write_size(0),1);\n\n\n\n \n\n // version 1 should encode value1,value2,value3\n\n let mut dest = vec![];\n\n record.encode(&mut dest,1).expect(\"encode\");\n\n assert_eq!(dest.len(),3);\n\n assert_eq!(record.write_size(1),3);\n\n\n\n // version 3 should only encode value, value3\n\n let mut dest = vec![];\n\n record.encode(&mut dest,2).expect(\"encode\");\n\n assert_eq!(dest.len(),2);\n\n assert_eq!(dest[1],5); \n\n assert_eq!(record.write_size(2),2);\n\n \n\n}\n\n\n\n\n", "file_path": "src/kf-protocol/tests/version.rs", "rank": 74, "score": 179010.22445788022 }, { "content": "/// create public server\n\npub fn create_public_server<C>(\n\n metadata: ShareLocalStores,\n\n k8_ws: K8WSUpdateService<C>,\n\n namespace: String,\n\n) -> PublicApiServer<C>\n\n where C: MetadataClient\n\n{\n\n let addr = metadata.config().public_endpoint.addr.clone();\n\n info!(\"start public api service at: {}\", addr);\n\n\n\n KfApiServer::new(\n\n addr,\n\n Arc::new(PublicContext {\n\n metadata,\n\n k8_ws,\n\n namespace,\n\n }),\n\n PublicService::new(),\n\n )\n\n}\n", "file_path": "src/sc-core/src/services/public_api/mod.rs", "rank": 75, "score": 178384.26859505812 }, { "content": "#[derive(Encode,Decode,Default,Debug)]\n\nstruct TestRecord {\n\n value: i8,\n\n #[fluvio_kf(min_version = 1,max_version = 1)]\n\n value2: i8,\n\n #[fluvio_kf(min_version = 1)]\n\n value3: i8\n\n}\n\n \n\n\n", "file_path": "src/kf-protocol/tests/version.rs", "rank": 76, "score": 178041.93066004684 }, { "content": "/// Decode 'String' if avialable at key or error\n\nfn maybe_string(val: &Value, key: &str) -> Result<Option<String>, Error> {\n\n match get_key(&val, key) {\n\n Ok(v) => match v.as_str() {\n\n Some(v) => Ok(Some(v.to_string())),\n\n None => Err(Error::new(\n\n ErrorKind::InvalidData,\n\n format!(\"key '{}', not string\", key),\n\n )),\n\n },\n\n Err(_) => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/json_to_msg.rs", "rank": 77, "score": 177619.8001263612 }, { "content": "struct ScServerCtx {\n\n ctx: SharedScContext,\n\n sender: Sender<bool>,\n\n}\n", "file_path": "src/spu-server/src/tests/fixture/spu_client.rs", "rank": 78, "score": 177259.78435521887 }, { "content": "/// Decode 'name' string or error\n\nfn get_name(val: &Value) -> Result<String, Error> {\n\n let name = get_string(val, \"name\")?;\n\n //println!(\"{}\", name);\n\n match name.as_str() {\n\n \"Type\" => Ok(\"Typ\".to_string()),\n\n _ => Ok(name),\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/json_to_msg.rs", "rank": 79, "score": 177154.0599190743 }, { "content": "/// Skip fields if they match the following criteria\n\n/// - version is exactly 0.\n\npub fn skip_field(field: &SpecField) -> bool {\n\n field.versions.is_zero()\n\n}\n\n\n\n// -----------------------------------\n\n// Test Cases\n\n// -----------------------------------\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::fs::read_to_string;\n\n use std::io::Error as IoError;\n\n use std::io::ErrorKind;\n\n use std::path::{Path, PathBuf};\n\n\n\n use super::*;\n\n\n\n use crate::file_to_json::file_to_json;\n\n use crate::json_to_msg::{parse_json_to_request, parse_json_to_response};\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 80, "score": 177084.64142213156 }, { "content": "/// Generate Field\n\npub fn generate_field(s_field: &SpecField) -> Field {\n\n let name = field_name(&s_field.name, &s_field.map_key, &s_field.entity_type);\n\n let value = field_value(&s_field.typ, &s_field.nullable_versions);\n\n let comment = field_comment(&s_field.about);\n\n let annotation = field_annotation(s_field);\n\n\n\n Field {\n\n name,\n\n value,\n\n comment,\n\n annotation,\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 81, "score": 177073.3832015295 }, { "content": "///\n\n/// Translate incoming k8 items into KVInputAction against MemStore which contains local state\n\n/// It only generates KVInputAction if incoming k8 object is different from memstore\n\n/// \n\n///\n\npub fn k8_events_to_metadata_actions<S>(\n\n k8_tokens: K8List<S::K8Spec,<S::K8Spec as K8Spec>::Status>,\n\n local_store: &LocalStore<S>,\n\n) -> Actions<LSChange<S>> \n\n where \n\n S: Spec + PartialEq + Debug, \n\n S::Status: Status + PartialEq + Debug , \n\n S::K8Spec: Debug, \n\n S::Key: Clone + Ord + Debug + Display\n\n{\n\n let (mut add_cnt, mut mod_cnt, mut del_cnt, mut skip_cnt) = (0, 0, 0, 0);\n\n let mut local_names = local_store.all_keys();\n\n let all = local_store.count();\n\n let mut actions: Actions<LSChange<S>> = Actions::default();\n\n\n\n // loop through items and generate add/mod actions\n\n for k8_obj in k8_tokens.items {\n\n\n\n match k8_obj_to_kv_obj(k8_obj) {\n\n\n", "file_path": "src/sc-core/src/metadata/k8_events_to_actions.rs", "rank": 82, "score": 175858.32256370937 }, { "content": "/// generate implementation for decoding kf protocol\n\npub fn generate_decode_traits(input: &DeriveInput) -> TokenStream {\n\n\n\n let name = &input.ident;\n\n\n\n let int_type = default_int_type(&input.attrs);\n\n\n\n let decoded_field_tokens = decode_fields(&input.data,&int_type,name);\n\n let generics = &input.generics;\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let try_enum = generate_try_enum_if(&input.data,&int_type,name);\n\n\n\n quote! {\n\n\n\n impl #impl_generics kf_protocol::Decoder for #name #ty_generics #where_clause {\n\n fn decode<T>(&mut self, src: &mut T,version: kf_protocol::Version) -> Result<(),std::io::Error> where T: kf_protocol::bytes::Buf {\n\n log::trace!(\"decoding struct: {}\",stringify!(#name));\n\n #decoded_field_tokens\n\n Ok(())\n\n }\n\n\n\n }\n\n\n\n #try_enum\n\n \n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/de.rs", "rank": 83, "score": 174921.64287571266 }, { "content": "/// generate implementation for encoding kf protocol\n\npub fn generate_encode_traits(input: &DeriveInput) -> TokenStream {\n\n \n\n let name = &input.ident;\n\n\n\n let encoded_field_tokens = encode_fields_for_writing(&input.data,&input.attrs,name);\n\n let size_field_tokens = encode_field_sizes(&input.data,&input.attrs,name);\n\n let generics = &input.generics;\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n \n\n quote! {\n\n\n\n impl #impl_generics kf_protocol::Encoder for #name #ty_generics #where_clause {\n\n\n\n fn encode<T>(&self, src: &mut T, version: kf_protocol::Version) -> Result<(),std::io::Error> where T: kf_protocol::bytes::BufMut {\n\n log::trace!(\"encoding struct: {} version: {}\",stringify!(#name),version);\n\n #encoded_field_tokens\n\n Ok(())\n\n }\n\n\n\n fn write_size(&self, version: kf_protocol::Version) -> usize {\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/ser.rs", "rank": 84, "score": 174921.64287571266 }, { "content": "/// generate implementation for decoding kf protocol\n\npub fn generate_default_traits(input: &DeriveInput) -> TokenStream {\n\n\n\n let name = &input.ident;\n\n\n\n let default_impl = generate_default_impl(input,name);\n\n\n\n\n\n quote! {\n\n\n\n #default_impl\n\n \n\n }\n\n}\n\n\n\n\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/de.rs", "rank": 85, "score": 174921.64287571266 }, { "content": "/// Generate annotation for request message\n\npub fn request_annotation(req: &SpecMessage) -> RequestAnnotation {\n\n let (min_api_version, max_api_version) = req.api_versions.touples();\n\n\n\n RequestAnnotation {\n\n api_key: req.api_key as i16,\n\n min_api_version,\n\n max_api_version,\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 86, "score": 174916.05895507033 }, { "content": "/// Encode partitions into a Replica Reponse\n\npub fn partition_metadata_to_replica_response(\n\n partitions: &PartitionLocalStore,\n\n topic: &String,\n\n) -> Vec<FlvPartitionReplica> {\n\n let mut res: Vec<FlvPartitionReplica> = Vec::default();\n\n let partition_cnt = partitions.count_topic_partitions(topic);\n\n for idx in 0..partition_cnt {\n\n let name = ReplicaKey::new(topic.clone(), idx);\n\n if let Some(partition) = partitions.value(&name) {\n\n res.push(FlvPartitionReplica {\n\n id: idx,\n\n leader: partition.spec.leader,\n\n replicas: partition.spec.replicas.clone(),\n\n live_replicas: partition.status.live_replicas().clone(),\n\n })\n\n }\n\n }\n\n res\n\n}\n", "file_path": "src/sc-core/src/services/public_api/flv/fetch_topics_req.rs", "rank": 87, "score": 174811.01816368807 }, { "content": "/// Encode Topic metadata into a Topic FLV Reponse\n\npub fn topic_store_metadata_to_topic_response(\n\n topics: &TopicLocalStore,\n\n topic_name: &String,\n\n) -> FlvFetchTopicResponse {\n\n if let Some(topic) = topics.topic(topic_name) {\n\n FlvFetchTopicResponse::new(\n\n topic_name.clone(),\n\n topic.spec.clone(),\n\n topic.status.clone(),\n\n None,\n\n )\n\n } else {\n\n FlvFetchTopicResponse::new_not_found(topic_name.clone())\n\n }\n\n}\n\n\n", "file_path": "src/sc-core/src/services/public_api/flv/fetch_topics_req.rs", "rank": 88, "score": 172571.83200326702 }, { "content": "/// Convert Server string to Server Address\n\nfn server_str_to_server_addr(\n\n server_str: &Option<String>,\n\n) -> Result<Option<ServerAddress>, IoError> {\n\n if let Some(server) = server_str {\n\n // parse host and port\n\n let host_port: Vec<&str> = server.split(':').collect();\n\n if host_port.len() != 2 {\n\n return Err(IoError::new(\n\n ErrorKind::InvalidData,\n\n format!(\"Expected 'host:port' format, found '{}'\", server),\n\n ));\n\n }\n\n\n\n let host = host_port[0].to_owned();\n\n let port: u16 = host_port[1].parse().map_err(|err| {\n\n IoError::new(ErrorKind::InvalidInput, format!(\"invalid port: {}\", err))\n\n })?;\n\n\n\n Ok(Some(ServerAddress { host, port }))\n\n } else {\n", "file_path": "src/spu-server/src/config/spu_config.rs", "rank": 89, "score": 171183.80651883915 }, { "content": "fn decode_option_vec_u<T>(array: &mut Option<Vec<u8>>, src: &mut T, len: isize) -> Result<(), Error>\n\nwhere\n\n T: Buf,\n\n{\n\n if len < 0 {\n\n *array = None;\n\n return Ok(());\n\n }\n\n\n\n if len == 0 {\n\n *array = Some(Vec::new());\n\n return Ok(());\n\n }\n\n\n\n let mut buf = src.take(len as usize);\n\n let mut value: Vec<u8> = Vec::new();\n\n value.put(&mut buf);\n\n if value.len() != len as usize {\n\n return Err(Error::new(\n\n ErrorKind::UnexpectedEof,\n", "file_path": "src/kf-protocol/kf-protocol-core/src/decoder.rs", "rank": 90, "score": 170558.31026603552 }, { "content": "/// Parse join group response for member-id\n\nfn join_group_to_member_id(join_group_resp: &KfJoinGroupResponse) -> Result<String, CliError> {\n\n if join_group_resp.error_code != KfErrorCode::None\n\n && join_group_resp.error_code != KfErrorCode::MemberIdRequired\n\n {\n\n return Err(CliError::IoError(IoError::new(\n\n ErrorKind::InvalidData,\n\n format!(\"join group: {}\", join_group_resp.error_code.to_sentence()),\n\n )));\n\n }\n\n debug!(\"member-id: '{}'\", join_group_resp.member_id);\n\n\n\n Ok(join_group_resp.member_id.clone())\n\n}\n\n\n", "file_path": "src/cli/src/consume/kf/kf_fetch_topic_all.rs", "rank": 91, "score": 170332.7654478709 }, { "content": "/// Generate field derive based on versions, nullable and defaults\n\npub fn field_annotation(field: &SpecField) -> Option<FieldAnnotation> {\n\n let mut annotation = FieldAnnotation::default();\n\n\n\n // provision versions\n\n if !field.versions.is_zero_plus() {\n\n let (min_version, max_version) = field.versions.touples();\n\n annotation.min_version = Some(min_version);\n\n annotation.max_version = max_version;\n\n }\n\n\n\n // provision ignorable\n\n if field.ignorable.unwrap_or(false) {\n\n annotation.ignorable = Some(true);\n\n }\n\n\n\n // provision default\n\n if let Some(default) = &field.default {\n\n annotation.default = Some(default.value());\n\n }\n\n\n\n if annotation.min_version.is_some()\n\n || annotation.ignorable.is_some()\n\n || annotation.default.is_some()\n\n {\n\n Some(annotation)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/file_content.rs", "rank": 92, "score": 170244.663509051 }, { "content": "/// Streaming Controller dispatcher entry point, spawns new thread\n\npub fn run<K,C>(receiver: Receiver<ScRequest>, sc_controller: ScController<K,C>) \n\n where K: WSUpdateService + Send + Sync + 'static,\n\n C: SpuConnections + Send + Sync + 'static\n\n{\n\n info!(\"start SC[{}] dispatcher\", sc_controller.id());\n\n\n\n spawn(sc_request_loop(receiver, sc_controller);\n\n}\n\n\n\n/// SC dispatcher request loop, waits for a request request and dispatchers\n\n/// it for processing.\n\nasync fn sc_request_loop<K,C>(mut receiver: Receiver<ScRequest>, mut sc_controller: ScController<K,C>) \n\n where K: WSUpdateService , C: SpuConnections \n\n{\n\n loop {\n\n select! {\n\n receiver_req = receiver.next() => {\n\n match receiver_req {\n\n None => {\n\n info!(\"SC dispatcher receiver is removed. end\");\n", "file_path": "src/sc-core/src/core/dispatcher.rs", "rank": 93, "score": 168025.11345357035 }, { "content": "///\n\n/// Translates K8 events into metadata action.\n\n///\n\npub fn k8_event_stream_to_metadata_actions<S,E>(\n\n stream: TokenStreamResult<S::K8Spec,<S::K8Spec as K8Spec>::Status,E>,\n\n local_store: &LocalStore<S>\n\n) -> Actions<LSChange<S>> \n\n where \n\n S: Spec + Debug + PartialEq + Debug,\n\n <S as Spec>::K8Spec: Debug,\n\n S::Key: Debug + Display + Clone,\n\n S::Status: Debug + PartialEq,\n\n E: MetadataClientError\n\n{\n\n\n\n let (mut add_cnt, mut mod_cnt, mut del_cnt, mut skip_cnt) = (0, 0, 0, 0);\n\n let mut actions: Actions<LSChange<S>> = Actions::default();\n\n\n\n // loop through items and generate add/mod actions\n\n for token in stream.unwrap() {\n\n match token {\n\n Ok(watch_obj) => match watch_obj {\n\n K8Watch::ADDED(k8_obj) => {\n", "file_path": "src/sc-core/src/metadata/k8_events_to_actions.rs", "rank": 94, "score": 167837.95135964183 }, { "content": "/// Parse group coordinator response and generate Server Address\n\nfn group_coordinator_to_socket_addr(\n\n coordinator_resp: &KfFindCoordinatorResponse,\n\n) -> Result<String, CliError> {\n\n if coordinator_resp.error_code != KfErrorCode::None {\n\n return Err(CliError::IoError(IoError::new(\n\n ErrorKind::InvalidData,\n\n format!(\n\n \"find group coordinator: {}\",\n\n coordinator_resp.error_code.to_sentence()\n\n ),\n\n )));\n\n }\n\n\n\n let server_addr = ProfileConfig::host_port_to_socket_addr(&format!(\n\n \"{}:{}\",\n\n coordinator_resp.host, coordinator_resp.port\n\n ))?;\n\n\n\n debug!(\"group coordinator host/port: '{}'\", server_addr);\n\n\n\n Ok(server_addr)\n\n}\n\n\n", "file_path": "src/cli/src/consume/kf/kf_fetch_topic_all.rs", "rank": 95, "score": 167409.3123662811 }, { "content": "#[test]\n\nfn test_decode_version() {\n\n\n\n // version 0 record\n\n let data = [0x08];\n\n let record = TestRequest::decode_from(&mut Cursor::new(&data),0).expect(\"decode\");\n\n assert_eq!(record.value,8);\n\n assert_eq!(record.value2,0); // default\n\n\n\n let data = [0x08];\n\n assert!(TestRequest::decode_from(&mut Cursor::new(&data),1).is_err(),\"version 1 needs 3 bytes\"); \n\n\n\n let data = [0x08,0x01,0x05];\n\n let record = TestRequest::decode_from(&mut Cursor::new(&data),1).expect(\"decode\");\n\n assert_eq!(record.value,8);\n\n assert_eq!(record.value2,1); \n\n assert_eq!(record.value3,5); \n\n\n\n let data = [0x08,0x01,0x05];\n\n let record = TestRequest::decode_from(&mut Cursor::new(&data),3).expect(\"decode\");\n\n assert_eq!(record.value,8);\n\n assert_eq!(record.value2,0); \n\n assert_eq!(record.value3,1); // default, didn't consume\n\n\n\n}\n", "file_path": "src/kf-protocol/tests/api.rs", "rank": 96, "score": 166951.6578986265 }, { "content": "/// Convert online SPUs to Kafka Brokers\n\nfn flv_online_spus_to_kf_brokers(online_spus: &Vec<SpuKV>) -> Vec<MetadataResponseBroker> {\n\n \n\n online_spus.iter().map(|online_spu| {\n\n let public_ep = online_spu.public_endpoint();\n\n \n\n MetadataResponseBroker {\n\n node_id: *online_spu.id(),\n\n host: public_ep.host_string(),\n\n port: public_ep.port as i32,\n\n rack: online_spu.rack_clone(),\n\n }\n\n\n\n }).collect()\n\n \n\n}\n\n\n", "file_path": "src/sc-core/src/services/public_api/kf/metadata_req.rs", "rank": 97, "score": 166400.04867728532 }, { "content": "fn parse_struct(struct_name: &Ident,data: &DataStruct) -> TokenStream {\n\n \n\n match data.fields {\n\n Fields::Named(ref fields) => {\n\n let recurse = fields.named.iter().map(|f| {\n\n let fname = &f.ident;\n\n if f.attrs\n\n .iter()\n\n .flat_map(Attribute::interpret_meta)\n\n .find(|meta| meta.name() == \"varint\")\n\n .is_some()\n\n {\n\n quote! {\n\n \n\n log::trace!(\"start decoding varint field <{}>\",stringify!(#fname));\n\n let result = self.#fname.decode_varint(src);\n\n if result.is_ok() {\n\n log::trace!(\"decoding ok varint <{}> => {:?}\",stringify!(#fname),&self.#fname);\n\n } else {\n\n log::trace!(\"decoding varint error <{}> ==> {}\",stringify!(#fname),result.as_ref().unwrap_err());\n", "file_path": "src/kf-protocol/kf-protocol-derive/src/de.rs", "rank": 98, "score": 166016.29757026906 }, { "content": "/// Convert Json to Request Msg\n\npub fn parse_json_to_request(val: Value) -> Result<SpecMessage, Error> {\n\n let msg_type = get_msg_type(&val)?;\n\n\n\n match msg_type {\n\n SpecMessageType::Request => parse_message(&val, msg_type),\n\n SpecMessageType::Response => {\n\n return Err(Error::new(\n\n ErrorKind::InvalidData,\n\n format!(\"expected 'Request', found '{}'\", msg_type),\n\n ));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kf-protocol/kf-protocol-build/src/json_to_msg.rs", "rank": 99, "score": 165893.10597156818 } ]
Rust
src/lib/redis.rs
PavelZX/rust-actix-rest-api-boilerplate
3852a2dd8b941dfe18e5f990fedb72d2665c87fe
use mobc::Pool; use mobc::async_trait; use mobc::Manager; use redis::aio::Connection; use redis::Client; use std::time::Duration; use super::error; pub struct RedisConnectionManager { client: Client, } impl RedisConnectionManager { pub fn new(c: Client) -> Self { Self { client: c } } } #[async_trait] impl Manager for RedisConnectionManager { type Connection = Connection; type Error = redis::RedisError; async fn connect(&self) -> Result<Self::Connection, Self::Error> { let c = self.client.get_async_connection().await?; Ok(c) } async fn check(&self, mut conn: Self::Connection) -> Result<Self::Connection, Self::Error> { redis::cmd("PING").query_async(&mut conn).await?; Ok(conn) } } pub async fn conn(settings: &config::Config) -> Pool<RedisConnectionManager> { let host = settings.get::<String>("redis.host").unwrap(); let port = settings.get::<String>("redis.port").unwrap(); let password = settings.get::<String>("redis.password").unwrap(); let db = settings.get::<String>("redis.db").unwrap(); let pool_get_timeout_seconds = settings.get::<u64>("redis.pool_get_timeout_seconds").unwrap(); let pool_max_open = settings.get::<u64>("redis.pool_max_open").unwrap(); let pool_max_idle = settings.get::<u64>("redis.pool_max_idle").unwrap(); let pool_max_lifetime_seconds = settings.get::<u64>("redis.pool_max_lifetime_seconds").unwrap(); let client = redis::Client::open(&format!("redis://:{}@{}:{}/{}", password, host, port, db)[..]).unwrap(); let manager = RedisConnectionManager::new(client); let pool = Pool::builder() .get_timeout(Some(Duration::from_secs(pool_get_timeout_seconds))) .max_open(pool_max_open) .max_idle(pool_max_idle) .max_lifetime(Some(Duration::from_secs(pool_max_lifetime_seconds))) .build(manager); pool } pub async fn expire(key: String, value: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("EXPIRE").arg(key).arg(value).query_async::<_, i16>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn get_expire(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<i64, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("TTL").arg(key).query_async::<_, i64>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn del(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("DEL").arg(key).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn has_key(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<bool, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("EXISTS").arg(key).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => { if v > 0 { Ok(true) } else { Ok(false) } }, Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn set<T: redis::ToRedisArgs>(key: String, value: T, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("SET").arg(key).arg(value).query_async::<_, String>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn set_with_expire<T: redis::ToRedisArgs>(key: String, value: T, time: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("SET").arg(key).arg(value).arg("EX").arg(time).query_async::<_, String>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn get<T: redis::FromRedisValue>(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<T, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("GET").arg(key).query_async::<_, T>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hset<T: redis::ToRedisArgs>(key: String, item: String, value: T, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HSET").arg(key).arg(item).arg(value).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn hset_with_expire<T: redis::ToRedisArgs>(key: String, item: String, value: T, time: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HSET").arg(&key).arg(item).arg(value).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } if time > 0 { expire(key, time, &pool, &log).await?; } Ok(()) } pub async fn hget<T: redis::FromRedisValue>(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<T, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HGET").arg(key).arg(item).query_async::<_, T>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hhas_key(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<bool, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HEXISTS").arg(key).arg(item).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => { if v > 0 { Ok(true) } else { Ok(false) } }, Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hdel(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HDEL").arg(key).arg(item).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) }
use mobc::Pool; use mobc::async_trait; use mobc::Manager; use redis::aio::Connection; use redis::Client; use std::time::Duration; use super::error; pub struct RedisConnectionManager { client: Client, } impl RedisConnectionManager { pub fn new(c: Client) -> Self { Self { client: c } } } #[async_trait] impl Manager for RedisConnectionManager { type Connection = Connection; type Error = redis::RedisError; async fn connect(&self) -> Result<Self::Connection, Self::Error> { let c = self.client.get_async_connection().await?; Ok(c) } async fn check(&self, mut conn: Self::Connection) -> Result<Self::Connection, Self::Error> { redis::cmd("PING").query_async(&mut conn).await?; Ok(conn) } } pub async fn conn(settings: &config::Config) -> Pool<RedisConnectionManager> { let host = settings.get::<String>("redis.host").unwrap(); let port = settings.get::<String>("redis.port").unwrap(); let password = settings.get::<String>("redis.password").unwrap(); let db = settings.get::<String>("redis.db").unwrap(); let pool_get_timeout_seconds = settings.get::<u64>("redis.pool_get_timeout_seconds").unwrap(); let pool_max_open = settings.get::<u64>("redis.pool_max_open").unwrap(); let pool_max_idle = settings.get::<u64>("redis.pool_max_idle").unwrap(); let pool_max_lifetime_seconds = settings.get::<u64>("redis.pool_max_lifetime_seconds").unwrap(); let client = redis::Client::open(&format!("redis://:{}@{}:{}/{}", password, host, port, db)[..]).unwrap(); let manager = RedisConnectionManager::new(client); let pool = Pool::builder() .get_timeout(Some(Duration::from_secs(pool_get_timeout_seconds))) .max_open(pool_max_open) .max_idle(pool_max_idle) .max_lifetime(Some(Duration::from_secs(pool_max_lifetime_seconds))) .build(manager); pool } pub async fn expire(key: String, value: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("EXPIRE").arg(key).arg(value).query_async::<_, i16>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn get_expire(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<i64, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("TTL").arg(key).query_async::<_, i64>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn del(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("DEL").arg(key).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn has_key(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<bool, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("EXISTS").arg(key).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => { if v > 0 { Ok(true) } else { Ok(false) } }, Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn set<T: redis::ToRedisArgs>(key: String, value: T, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("SET").arg(key).arg(value).query_async::<_, String>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn set_with_expire<T: redis::ToRedisArgs>(key: String, value: T, time: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("SET").arg(key).arg(value).arg("EX").arg(time).query_async::<_, String>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn get<T: redis::FromRedisValue>(key: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<T, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("GET").arg(key).query_async::<_, T>(&mut con as &mut redis::aio::Connection).await;
} pub async fn hset<T: redis::ToRedisArgs>(key: String, item: String, value: T, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HSET").arg(key).arg(item).arg(value).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) } pub async fn hset_with_expire<T: redis::ToRedisArgs>(key: String, item: String, value: T, time: i64, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HSET").arg(&key).arg(item).arg(value).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } if time > 0 { expire(key, time, &pool, &log).await?; } Ok(()) } pub async fn hget<T: redis::FromRedisValue>(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<T, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HGET").arg(key).arg(item).query_async::<_, T>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hhas_key(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<bool, error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HEXISTS").arg(key).arg(item).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; match result { Ok(v) => { if v > 0 { Ok(true) } else { Ok(false) } }, Err(e) => { error!(log, "{}", e); Err(error::err500()) } } } pub async fn hdel(key: String, item: String, pool: &mobc::Pool<RedisConnectionManager>, log: &slog::Logger) -> Result<(), error::Error> { let mut con = pool.get().await.unwrap(); let result = redis::cmd("HDEL").arg(key).arg(item).query_async::<_, i32>(&mut con as &mut redis::aio::Connection).await; if let Err(err) = result { error!(log, "{}", err); return Err(error::err500()); } Ok(()) }
match result { Ok(v) => Ok(v), Err(e) => { error!(log, "{}", e); Err(error::err500()) } }
if_condition
[ { "content": "pub fn required_str(v: &Option<String>, name: &str) -> Result<String, error::Error> {\n\n not_none(v.as_ref(), name)?;\n\n\n\n let v = v.as_ref().unwrap().to_string();\n\n if v.chars().count() == 0 {\n\n return Err(error::new(400002, &format!(\"{} can not be empty\", name)[..], 422));\n\n }\n\n\n\n Ok(v)\n\n}\n\n\n", "file_path": "src/lib/validator.rs", "rank": 0, "score": 189895.4436553325 }, { "content": "pub fn uuid(v: &str, name: &str) -> Result<(), error::Error> {\n\n let re = Regex::new(r\"^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$\").unwrap();\n\n if !re.is_match(&(v.to_uppercase())[..]) {\n\n return Err(error::new(400002, &format!(\"{} wrong format\", name)[..], 422));\n\n }\n\n\n\n Ok(())\n\n}", "file_path": "src/lib/validator.rs", "rank": 1, "score": 155951.7530929033 }, { "content": "pub fn mobile(v: &str, name: &str) -> Result<(), error::Error> {\n\n let re = Regex::new(r\"^(\\+?0?86\\-?)?1[345789]\\d{9}$\").unwrap();\n\n if !re.is_match(v) {\n\n return Err(error::new(400002, &format!(\"{} error\", name)[..], 422));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/validator.rs", "rank": 2, "score": 155951.7530929033 }, { "content": "pub fn email(v: &str, name: &str) -> Result<(), error::Error> {\n\n let re = Regex::new(r\"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\.[a-zA-Z0-9-.]+$\").unwrap();\n\n if !re.is_match(v) {\n\n return Err(error::new(400002, &format!(\"{} incorrect format\", name)[..], 422));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/validator.rs", "rank": 3, "score": 155951.7530929033 }, { "content": "pub fn not_none<T>(v: Option<T>, name: &str) -> Result<(), error::Error> {\n\n if let None = v {\n\n return Err(error::new(400002, &format!(\"{} can not be empty\", name)[..], 422));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib/validator.rs", "rank": 4, "score": 145920.19628280646 }, { "content": "pub fn parse_token(token: &str) -> Result<Claims, error::Error> {\n\n let token = match decode::<Claims>(&token, &DecodingKey::from_secret(JWT_KEY.as_ref()), &Validation::default()) {\n\n Ok(v) => v,\n\n Err(_) => return Err(error::new(100403, \"Authentication failure\", 401))\n\n };\n\n\n\n let mut claims = token.claims;\n\n if let Some(v) = aes::decrypt(&claims.sub, AES_KEY) {\n\n claims.sub = v;\n\n } else {\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n\n\n Ok(claims)\n\n}\n\n\n\npub async fn verify(permission: &str, req: &HttpRequest, state: &web::Data<AppState>) -> Result<AuthorizationInfo, error::Error> {\n\n let token = match req.headers().get(\"Authorization\") {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n", "file_path": "src/lib/auth.rs", "rank": 5, "score": 141834.2836683115 }, { "content": "pub fn crypt_password(password: &str, salt: &uuid::Uuid) -> String {\n\n let pwd = format!(\"{}{}\", password, salt.to_string());\n\n let pwd = md5::compute(pwd);\n\n let pwd = format!(\"{:?}{}{}\", pwd, password, salt.to_string());\n\n let pwd = Sha256::new().chain_update(pwd).finalize();\n\n format!(\"{:x}\", pwd)\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct Claims {\n\n pub sub: String,\n\n pub iat: usize,\n\n pub exp: usize,\n\n pub jti: String,\n\n pub scopes: Vec<String>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Token {\n\n pub token: String,\n", "file_path": "src/lib/auth.rs", "rank": 6, "score": 139434.85953914854 }, { "content": "pub fn render_404<B>(mut res: dev::ServiceResponse<B>) -> Result<ErrorHandlerResponse<BoxBody>> {\n\n res.response_mut().headers_mut().insert(\n\n http::header::CONTENT_TYPE,\n\n http::header::HeaderValue::from_static(\"application/json\"),\n\n );\n\n\n\n let new_res = res.map_body(|_, _| {\n\n EitherBody::left(BoxBody::new(\"{\\\"errcode\\\": 404, \\\"errmsg\\\": \\\"Not Found\\\"}\"))\n\n });\n\n Ok(ErrorHandlerResponse::Response(new_res))\n\n}\n\n\n", "file_path": "src/lib/error.rs", "rank": 7, "score": 135436.51489599413 }, { "content": "pub fn render_500<B>(mut res: dev::ServiceResponse<B>) -> Result<ErrorHandlerResponse<BoxBody>> {\n\n res.response_mut().headers_mut().insert(\n\n http::header::CONTENT_TYPE,\n\n http::header::HeaderValue::from_static(\"application/json\"),\n\n );\n\n\n\n let new_res = res.map_body(|_, _| {\n\n EitherBody::left(BoxBody::new(\"{\\\"errcode\\\": 500, \\\"errmsg\\\": \\\"Internal Server Error\\\"}\"))\n\n });\n\n Ok(ErrorHandlerResponse::Response(new_res))\n\n}\n\n\n", "file_path": "src/lib/error.rs", "rank": 8, "score": 135436.51489599413 }, { "content": "pub fn render_400<B>(mut res: dev::ServiceResponse<B>) -> Result<ErrorHandlerResponse<BoxBody>> {\n\n res.response_mut().headers_mut().insert(\n\n http::header::CONTENT_TYPE,\n\n http::header::HeaderValue::from_static(\"application/json\"),\n\n );\n\n\n\n let new_res = res.map_body(|_, _| {\n\n EitherBody::left(BoxBody::new(\"{\\\"errcode\\\": 400, \\\"errmsg\\\": \\\"Bad Request\\\"}\"))\n\n });\n\n Ok(ErrorHandlerResponse::Response(new_res))\n\n}", "file_path": "src/lib/error.rs", "rank": 9, "score": 135436.51489599413 }, { "content": "pub fn render_405<B>(mut res: dev::ServiceResponse<B>) -> Result<ErrorHandlerResponse<BoxBody>> {\n\n res.response_mut().headers_mut().insert(\n\n http::header::CONTENT_TYPE,\n\n http::header::HeaderValue::from_static(\"application/json\"),\n\n );\n\n\n\n let new_res = res.map_body(|_, _| {\n\n EitherBody::left(BoxBody::new(\"{\\\"errcode\\\": 405, \\\"errmsg\\\": \\\"Method Not Allowed\\\"}\"))\n\n });\n\n Ok(ErrorHandlerResponse::Response(new_res))\n\n}\n\n\n", "file_path": "src/lib/error.rs", "rank": 10, "score": 135436.51489599413 }, { "content": "pub fn err500() -> Error {\n\n Error {\n\n errmsg: \"Internal Server Error\".to_string(),\n\n errcode: 500,\n\n status: 500,\n\n }\n\n}\n\n\n", "file_path": "src/lib/error.rs", "rank": 11, "score": 126378.7425133682 }, { "content": "pub fn get_client_info(state: &web::Data<AppState>, req: &HttpRequest, conn: &ConnectionInfo) -> ClientInfo {\n\n let mut ip = String::from(\"\");\n\n let mut user_agent = String::from(\"\");\n\n\n\n let is_behind_proxy = state.config.get::<bool>(\"app.behind_proxy\").unwrap();\n\n if is_behind_proxy {\n\n if let Some(val) = conn.realip_remote_addr() {\n\n let split = val.split(\":\");\n\n let vec: Vec<&str> = split.collect();\n\n if vec.len() >0 {\n\n ip = vec[0].to_string();\n\n }\n\n }\n\n } else {\n\n if let Some(val) = req.peer_addr() {\n\n ip = val.ip().to_string();\n\n };\n\n }\n\n\n\n if let Some(val) = req.headers().get(\"User-Agent\") {\n\n user_agent = val.to_str().unwrap_or_default().to_string();\n\n };\n\n \n\n ClientInfo { ip, user_agent }\n\n}\n", "file_path": "src/lib/client.rs", "rank": 12, "score": 125971.85967663693 }, { "content": "pub fn res(errcode: u32, errmsg: &str, status: u16) -> Result<web::HttpResponse, Error> {\n\n Err(Error {\n\n errmsg: errmsg.to_string(),\n\n errcode,\n\n status,\n\n })\n\n}\n\n\n", "file_path": "src/lib/error.rs", "rank": 13, "score": 119674.3847699733 }, { "content": "pub fn route(cfg: &mut web::ServiceConfig) {\n\n cfg.service(user::controller::get_info);\n\n cfg.service(user::controller::change_password);\n\n}", "file_path": "src/routes/user.rs", "rank": 14, "score": 102348.17315607138 }, { "content": "pub fn route(cfg: &mut web::ServiceConfig) {\n\n cfg.service(hello);\n\n}\n", "file_path": "src/routes/hello.rs", "rank": 15, "score": 102348.17315607138 }, { "content": "pub fn route(cfg: &mut web::ServiceConfig) {\n\n cfg.service(authorizations::controller::create_auth);\n\n cfg.service(authorizations::controller::refresh_auth);\n\n cfg.service(authorizations::controller::delete_auth);\n\n}", "file_path": "src/routes/authorizations.rs", "rank": 16, "score": 102348.17315607138 }, { "content": "pub fn encrypt(data: &str, key: &str) -> String {\n\n let iv_str = gen_string(16);\n\n let iv = iv_str.as_bytes();\n\n let cipher = Aes128Cbc::new_from_slices(key.as_bytes(), iv).unwrap();\n\n let ciphertext = cipher.encrypt_vec(data.as_bytes());\n\n let mut buffer = bytebuffer::ByteBuffer::from_bytes(iv);\n\n buffer.write_bytes(&ciphertext);\n\n base64::encode(buffer.to_bytes())\n\n}\n\n\n", "file_path": "src/lib/aes.rs", "rank": 17, "score": 100829.00261792267 }, { "content": "pub fn new(errcode: u32, errmsg: &str, status: u16) -> Error {\n\n Error {\n\n errmsg: errmsg.to_string(),\n\n errcode,\n\n status,\n\n }\n\n}\n\n\n", "file_path": "src/lib/error.rs", "rank": 18, "score": 100595.01399494265 }, { "content": "pub fn get_logger() -> Logger {\n\n fs::create_dir_all(\"data/logs\").unwrap();\n\n let log_path = \"data/logs/app.log\";\n\n let file = fs::OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .append(true)\n\n .open(log_path)\n\n .unwrap();\n\n\n\n let decorator = slog_term::TermDecorator::new().build();\n\n let drain1 = slog_term::FullFormat::new(decorator).build().fuse();\n\n\n\n let drain2 = slog_json::Json::new(file)\n\n .set_pretty(false)\n\n .set_newlines(true)\n\n .build()\n\n .fuse();\n\n\n\n let drain = slog_async::Async::new(slog::Duplicate::new(drain1, drain2).fuse()).build().fuse();\n", "file_path": "src/lib/log.rs", "rank": 19, "score": 98225.40957604481 }, { "content": "pub fn decrypt(data: &str, key: &str) -> Option<String> {\n\n let bytes = match base64::decode(data) {\n\n Ok(v) => v,\n\n Err(_) => return None\n\n };\n\n let cipher = match Aes128Cbc::new_from_slices(key.as_bytes(), &bytes[0..16]) {\n\n Ok(v) => v,\n\n Err(_) => return None,\n\n };\n\n let decrypt_byte = match cipher.decrypt_vec(&bytes[16..]) {\n\n Ok(v) => v,\n\n Err(_) => return None,\n\n };\n\n let decrypt_str = match String::from_utf8(decrypt_byte) {\n\n Ok(v) => v,\n\n Err(_) => return None,\n\n };\n\n \n\n Some(decrypt_str)\n\n}", "file_path": "src/lib/aes.rs", "rank": 20, "score": 97000.6735480935 }, { "content": "fn gen_string(size: usize) -> String {\n\n let mut rng = &mut rand::thread_rng();\n\n String::from_utf8(\n\n BASE_STR.as_bytes()\n\n .choose_multiple(&mut rng, size)\n\n .cloned()\n\n .collect()\n\n ).unwrap()\n\n}\n\n\n", "file_path": "src/lib/aes.rs", "rank": 21, "score": 84616.13095593033 }, { "content": "pub fn salt() -> uuid::Uuid {\n\n uuid::Uuid::new_v4()\n\n}\n\n\n", "file_path": "src/lib/auth.rs", "rank": 22, "score": 71764.49657969369 }, { "content": "pub fn create_access_token(user_id: i32, user_type: i16, config: &config::Config) -> Token {\n\n let expire = config.get::<i64>(\"auth.access_token_expire\").unwrap();\n\n\n\n let mut scopes = vec![String::from(\"ROLE_MEMBER\")];\n\n if user_type == 10 {\n\n scopes.push(String::from(\"ROLE_ADMIN\"));\n\n }\n\n\n\n let create_time = Utc::now();\n\n let expire_time = Utc::now() + Duration::seconds(expire);\n\n let jti = uuid::Uuid::new_v4();\n\n let sub = aes::encrypt(&user_id.to_string(), AES_KEY);\n\n\n\n let claim = Claims {\n\n sub,\n\n iat: create_time.timestamp() as usize,\n\n exp: expire_time.timestamp() as usize,\n\n jti: jti.to_string(),\n\n scopes,\n\n };\n", "file_path": "src/lib/auth.rs", "rank": 23, "score": 70815.11084303065 }, { "content": "#[derive(Serialize)]\n\nstruct Hello {\n\n msg: String,\n\n}\n\n\n\n#[get(\"/hello\")]\n\npub async fn hello(state: web::Data<AppState>) -> impl Responder {\n\n let name = state.config.get::<String>(\"app.name\").unwrap();\n\n info!(state.log, \"hello {}\", name);\n\n HttpResponse::Ok().json(Hello {msg: format!(\"hello {}\", name)})\n\n}\n", "file_path": "src/api/hello.rs", "rank": 24, "score": 50666.71965739585 }, { "content": "pub fn create_refresh_token(authorization_id: i32, refresh_token_jti: uuid::Uuid, config: &config::Config) -> Token {\n\n let expire = config.get::<i64>(\"auth.refresh_token_expire\").unwrap();\n\n let scopes = vec![String::from(\"ROLE_REFRESH_TOKEN\")];\n\n\n\n let create_time = Utc::now();\n\n let expire_time = Utc::now() + Duration::seconds(expire);\n\n let jti = refresh_token_jti;\n\n let sub = aes::encrypt(&authorization_id.to_string(), AES_KEY);\n\n\n\n let claim = Claims {\n\n sub,\n\n iat: create_time.timestamp() as usize,\n\n exp: expire_time.timestamp() as usize,\n\n jti: jti.to_string(),\n\n scopes,\n\n };\n\n\n\n let token = encode(&Header::new(Algorithm::HS256), &claim, &EncodingKey::from_secret(JWT_KEY.as_ref())).unwrap();\n\n\n\n Token {\n\n token,\n\n expire_time,\n\n create_time,\n\n expire,\n\n jti,\n\n }\n\n}\n\n\n", "file_path": "src/lib/auth.rs", "rank": 25, "score": 50010.81412441346 }, { "content": "#[derive(Serialize)]\n\nstruct ResTokenJson {\n\n id: String,\n\n access_token: String,\n\n expires_in: i64,\n\n refresh_token: String,\n\n created_at: String,\n\n updated_at: String,\n\n}\n\n\n\n// Create authorization\n\n#[post(\"/authorizations\")]\n\npub async fn create_auth(req_info: web::Json<CreateAuthReqJson>, state: web::Data<AppState>, req: HttpRequest, conn: ConnectionInfo) -> Result<web::HttpResponse, error::Error> {\n\n let username = validator::required_str(&req_info.username, \"username\")?;\n\n let password = validator::required_str(&req_info.password, \"password\")?;\n\n\n\n let client = client::get_client_info(&state, &req, &conn);\n\n\n\n let result = user::service::get_by_username(&username, &state).await?;\n\n let u = match result {\n\n None => {\n", "file_path": "src/api/authorizations/controller.rs", "rank": 26, "score": 47608.08057490268 }, { "content": "type Aes128Cbc = Cbc<Aes128, Pkcs7>;\n\n\n\nconst BASE_STR: &str = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789\";\n\n\n", "file_path": "src/lib/aes.rs", "rank": 27, "score": 42729.5383808335 }, { "content": "use actix_web::{web, HttpRequest, dev::ConnectionInfo};\n\nuse crate::AppState;\n\n\n\n#[derive(Debug)]\n\npub struct ClientInfo {\n\n pub ip: String,\n\n pub user_agent: String,\n\n}\n\n\n", "file_path": "src/lib/client.rs", "rank": 28, "score": 29198.565583109063 }, { "content": " \n\n let logger = slog::Logger::root(\n\n drain,\n\n o!(\n\n \"msg\" => slog::PushFnValue(move |record, ser| {\n\n ser.emit(record.msg())\n\n }),\n\n \"time\" => slog::PushFnValue(move |_ : &slog::Record, ser| {\n\n ser.emit(chrono::Utc::now().to_rfc3339())\n\n }),\n\n \"level\" => slog::FnValue(move |record| {\n\n record.level().as_str()\n\n }),\n\n \"file\" => slog::FnValue(move |record| {\n\n format!(\"{}:{}\", record.file(), record.line())\n\n }),\n\n ),\n\n );\n\n\n\n logger\n\n}", "file_path": "src/lib/log.rs", "rank": 29, "score": 29141.33271194198 }, { "content": "use slog::Logger;\n\nuse slog::Drain;\n\nuse std::fs;\n\n\n", "file_path": "src/lib/log.rs", "rank": 30, "score": 29136.298019431928 }, { "content": "use actix_web::http::StatusCode;\n\nuse actix_web::{web, ResponseError, dev, Result, http};\n\nuse actix_web::middleware::ErrorHandlerResponse;\n\nuse serde::Serialize;\n\nuse serde_json::{json, to_string_pretty};\n\nuse std::fmt::{Display, Formatter, Result as FmtResult};\n\nuse actix_http::body::{EitherBody, BoxBody};\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct Error {\n\n pub errmsg: String,\n\n pub errcode: u32,\n\n pub status: u16,\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter) -> FmtResult {\n\n write!(f, \"{}\", to_string_pretty(self).unwrap())\n\n }\n\n}\n\n\n\nimpl ResponseError for Error {\n\n fn error_response(&self) -> web::HttpResponse {\n\n let err_json = json!({ \"errcode\": self.errcode, \"errmsg\": self.errmsg });\n\n web::HttpResponse::build(StatusCode::from_u16(self.status).unwrap()).json(err_json)\n\n }\n\n}\n\n\n", "file_path": "src/lib/error.rs", "rank": 31, "score": 28579.87098719812 }, { "content": "use sqlx::postgres::PgPoolOptions;\n\nuse std::time::Duration;\n\n\n\npub async fn conn(settings: &config::Config) -> sqlx::Pool<sqlx::Postgres> {\n\n let user = settings.get::<String>(\"pg.user\").unwrap();\n\n let password = settings.get::<String>(\"pg.password\").unwrap();\n\n let host = settings.get::<String>(\"pg.host\").unwrap();\n\n let port = settings.get::<String>(\"pg.port\").unwrap();\n\n let dbname = settings.get::<String>(\"pg.dbname\").unwrap();\n\n let connect_timeout = settings.get::<u64>(\"pg.connect_timeout\").unwrap();\n\n let idle_timeout = settings.get::<u64>(\"pg.idle_timeout\").unwrap();\n\n let max = settings.get::<u32>(\"pg.max\").unwrap();\n\n\n\n let pool = PgPoolOptions::new()\n\n .max_connections(max)\n\n .idle_timeout(Duration::new(idle_timeout, 0))\n\n .connect_timeout(Duration::new(connect_timeout, 0))\n\n .connect(&format!(\"postgres://{}:{}@{}:{}/{}\", user, password, host, port, dbname)[..])\n\n .await\n\n .unwrap();\n\n \n\n pool\n\n}", "file_path": "src/lib/db/pg.rs", "rank": 32, "score": 28014.767755078385 }, { "content": "pub mod pg;", "file_path": "src/lib/db/mod.rs", "rank": 33, "score": 27993.714442896962 }, { "content": " let r = sqlx::query_as::<_, User>(\"SELECT * FROM users WHERE username=$1\")\n\n .bind(username)\n\n .fetch_optional(db)\n\n .await;\n\n \n\n match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}\n\n\n\npub async fn insert(user: &User, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<User, error::Error> {\n\n let mut sql1 = vec![String::from(\"uuid\")];\n\n let mut sql2 = vec![String::from(\"$1\")];\n\n let mut sql_index = 2;\n\n\n\n if let Some(_) = &user.username {\n", "file_path": "src/api/user/model.rs", "rank": 45, "score": 27.996908681321518 }, { "content": " pub db: sqlx::Pool<sqlx::Postgres>,\n\n pub redis: mobc::Pool<lib::redis::RedisConnectionManager>\n\n}\n\n\n\nasync fn index() -> Result<web::HttpResponse, error::Error> {\n\n Ok(HttpResponse::Ok().body(\"\"))\n\n}\n\n\n\n#[actix_web::main]\n\nasync fn main() -> std::io::Result<()> {\n\n // config\n\n let mut settings = config::Config::default();\n\n settings.merge(config::File::with_name(\"data/config/app.toml\")).unwrap();\n\n let port = settings.get::<String>(\"app.port\").unwrap();\n\n\n\n // log\n\n let logger = lib::log::get_logger();\n\n info!(logger, \"==> 🚀 {} listening at {}\", settings.get::<String>(\"app.name\").unwrap(), settings.get::<String>(\"app.port\").unwrap());\n\n\n\n // database\n", "file_path": "src/main.rs", "rank": 46, "score": 27.037673394987873 }, { "content": "use chrono::prelude::*;\n\nuse crate::lib::error;\n\nuse super::{User, UserInfo};\n\n\n\npub async fn get_by_id(id: i32, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<Option<User>, error::Error> {\n\n let r = sqlx::query_as::<_, User>(\"SELECT * FROM users WHERE id=$1\")\n\n .bind(id)\n\n .fetch_optional(db)\n\n .await;\n\n \n\n match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}\n\n\n\npub async fn get_by_username(username: &str, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<Option<User>, error::Error> {\n", "file_path": "src/api/user/model.rs", "rank": 47, "score": 26.738620007735474 }, { "content": " q = q.bind(last_login_ip);\n\n }\n\n if let Some(user_type) = &user.user_type {\n\n q = q.bind(user_type);\n\n }\n\n q = q.bind(id);\n\n\n\n let r = q.fetch_one(db).await;\n\n \n\n match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}\n\n\n\npub async fn update_last_login(login_time: DateTime<Utc>, ip: &str, user_id: i32, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<(), error::Error> {\n\n let r = sqlx::query(r#\"UPDATE users SET last_login_time=$1, last_login_ip=$2 WHERE id=$3\"#)\n", "file_path": "src/api/user/model.rs", "rank": 48, "score": 26.3799480254073 }, { "content": "use crate::lib::{client::ClientInfo, error};\n\nuse chrono::{DateTime, Utc};\n\nuse super::{AuthBlacklist, Authorization};\n\n\n\n// Add log\n\npub async fn insert_log(log_type: i16, msg: &str, user_id: i32, auth_id: i32, client: &ClientInfo, log_time: DateTime<Utc>, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<(), error::Error> {\n\n let r = sqlx::query(r#\"\n\n INSERT INTO authorizations_logs (user_id, log_type, ip, log_time, client_type, auth_id, log, user_agent)\n\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8)\"#)\n\n .bind(user_id)\n\n .bind(log_type)\n\n .bind(&client.ip)\n\n .bind(log_time)\n\n .bind(10)\n\n .bind(auth_id)\n\n .bind(msg)\n\n .bind(&client.user_agent)\n\n .execute(db)\n\n .await;\n\n \n", "file_path": "src/api/authorizations/model.rs", "rank": 49, "score": 26.24655607348086 }, { "content": " error!(log, \"{}\", err);\n\n return Err(error::err500());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n// Insert authorization\n\npub async fn insert_auth(authorization: &Authorization, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<Authorization, error::Error> {\n\n let r = sqlx::query_as::<_, Authorization>(r#\"\n\n INSERT INTO authorizations (user_id, uuid, client_type, refresh_token, create_time, access_token_id, access_token_exp, access_token_iat, is_enabled)\n\n\t VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)\n\n\t RETURNING *\"#)\n\n .bind(&authorization.user_id)\n\n .bind(&authorization.uuid)\n\n .bind(&authorization.client_type)\n\n .bind(&authorization.refresh_token)\n\n .bind(&authorization.create_time)\n\n .bind(&authorization.access_token_id)\n\n .bind(&authorization.access_token_exp)\n", "file_path": "src/api/authorizations/model.rs", "rank": 50, "score": 25.952563200219142 }, { "content": " if let Err(err) = r {\n\n error!(log, \"{}\", err);\n\n return Err(error::err500());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn get_user_info_by_id(id: i32, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<Option<UserInfo>, error::Error> {\n\n let r = sqlx::query_as::<_, UserInfo>(r#\"\n\n SELECT id, username, uuid, mobile, last_login_time, last_login_ip, user_type FROM users\n\n WHERE id = $1 AND is_del=0 AND is_enabled=1\"#)\n\n .bind(id)\n\n .fetch_optional(db)\n\n .await;\n\n \n\n match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}", "file_path": "src/api/user/model.rs", "rank": 51, "score": 25.77700144592264 }, { "content": " match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}\n\n\n\npub async fn update(user: &User, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<User, error::Error> {\n\n let id = match user.id {\n\n Some(v) => v,\n\n None => 0,\n\n };\n\n\n\n if id <= 0 {\n\n error!(log, \"update id error: {}\", id);\n\n return Err(error::err500());\n\n }\n\n\n", "file_path": "src/api/user/model.rs", "rank": 52, "score": 24.86893141610922 }, { "content": " error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}\n\n\n\n// Update authorization\n\npub async fn update_auth(authorization: &Authorization, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<Authorization, error::Error> {\n\n let id = match authorization.id {\n\n Some(v) => v,\n\n None => 0,\n\n };\n\n\n\n if id <= 0 {\n\n error!(log, \"update id error: {}\", id);\n\n return Err(error::err500());\n\n }\n\n\n\n let mut sql1 = vec![format!(\"update_time = $1\")];\n\n let mut sql_index = 2;\n", "file_path": "src/api/authorizations/model.rs", "rank": 53, "score": 24.664760678850097 }, { "content": " \n\n match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}\n\n\n\n// Obtain authorization information through uuid\n\npub async fn get_by_uuid(uuid: uuid::Uuid, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<Option<Authorization>, error::Error> {\n\n let r = sqlx::query_as::<_, Authorization>(\"SELECT * FROM authorizations WHERE uuid=$1\")\n\n .bind(uuid)\n\n .fetch_optional(db)\n\n .await;\n\n \n\n match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n", "file_path": "src/api/authorizations/model.rs", "rank": 54, "score": 24.376537686607463 }, { "content": "### Configuration\n\n\n\nConfiguration library selection [config](https://github.com/mehcode/config-rs),The configuration file is `data/config/app.toml`,Configuration data can be passed `web::Data` obtain:\n\n\n\n```\n\npub async fn hello(state: web::Data<AppState>) -> Result<web::HttpResponse, error::Error> {\n\n let name = state.config.get::<String>(\"app.name\").unwrap();\n\n ...\n\n}\n\n```\n\n\n\n### Log \n\n\n\nLog library selection [slog](https://github.com/slog-rs/slog),supports asynchronous, configured with log file and screen dual output. The log file is `data/logs/app.log`,actix Журнал можно вести следующими способами:\n\n\n\n```\n\npub async fn hello(state: web::Data<AppState>) -> Result<web::HttpResponse, error::Error> {\n\n ...\n\n info!(state.log, \"hello {}\", name);\n\n ...\n\n}\n\n```\n\n\n\n### Database\n\n\n\nDatabase operation library selection [sqlx](https://github.com/launchbadge/sqlx),In this example, postgres is configured to support various other commonly used databases. accessible `web::Data` Get the database connection pool. Pay attention to the use of asynchronous development/\n\n\n\n### Redis\n\n\n\nRedis operational library selection [redis](https://github.com/mitsuhiko/redis-rs),support asynchronous mode, use [mobc](https://github.com/importcjj/mobc) The configured connection pool. accessible `web::Data` Get the redis connection pool. Pay attention to the use of asynchronous development。\n\n\n\n```\n\nuse redis::AsyncCommands;\n\n...\n\n\n\npub async fn hello(state: web::Data<AppState>) -> Result<web::HttpResponse, error::Error> {\n\n let mut con = state.redis.get().await.unwrap();\n\n let val = con.get(\"my_key\").await.unwrap();\n\n ...\n\n}\n\n\n\n```\n\n\n", "file_path": "README.md", "rank": 55, "score": 24.232376196773007 }, { "content": " .bind(&authorization.access_token_iat)\n\n .bind(&authorization.is_enabled)\n\n .fetch_one(db)\n\n .await;\n\n \n\n match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}\n\n\n\n\n\n// Disable authorization\n\npub async fn disable_auth(id: i32, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<(), error::Error> {\n\n let r = sqlx::query(\"UPDATE authorizations SET is_enabled=0, update_time=$1 WHERE id=$2\")\n\n .bind(Utc::now())\n\n .bind(id)\n", "file_path": "src/api/authorizations/model.rs", "rank": 56, "score": 23.847603597138043 }, { "content": "\n\n Ok(result)\n\n}\n\n\n\npub async fn update(user: &User, state: &web::Data<AppState>) -> Result<User, error::Error> {\n\n let result = model::update(user, &state.db, &state.log).await?;\n\n\n\n Ok(result)\n\n}\n\n\n\npub async fn delete(id: i32, state: &web::Data<AppState>) -> Result<(), error::Error> {\n\n model::delete(id, &state.db, &state.log).await?;\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn update_last_login(login_time: DateTime<Utc>, user_id: i32, client: &ClientInfo, state: &web::Data<AppState>) -> Result<(), error::Error> {\n\n model::update_last_login(login_time, &client.ip, user_id, &state.db, &state.log).await?;\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn get_user_info_by_id(id: i32, state: &web::Data<AppState>) -> Result<Option<UserInfo>, error::Error> {\n\n let result = model::get_user_info_by_id(id, &state.db, &state.log).await?;\n\n\n\n Ok(result)\n\n}\n", "file_path": "src/api/user/service.rs", "rank": 57, "score": 23.44864641749147 }, { "content": " .bind(login_time)\n\n .bind(ip)\n\n .bind(user_id)\n\n .execute(db)\n\n .await;\n\n \n\n if let Err(err) = r {\n\n error!(log, \"{}\", err);\n\n return Err(error::err500());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn delete(id: i32, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<(), error::Error> {\n\n let r = sqlx::query(\"UPDATE users SET is_del = 1 WHERE id=$1\")\n\n .bind(id)\n\n .execute(db)\n\n .await;\n\n \n", "file_path": "src/api/user/model.rs", "rank": 58, "score": 23.051426088713498 }, { "content": "use actix_web::web;\n\nuse futures::future::{BoxFuture, join_all};\n\nuse crate::AppState;\n\nuse crate::{lib, lib::{client::ClientInfo, error}};\n\nuse crate::api::authorizations::model;\n\nuse chrono::prelude::*;\n\nuse super::{AuthBlacklist, Authorization};\n\nuse crate::api::user;\n\n\n\n// Add log\n\npub async fn insert_log(log_type: i16, msg: &str, user_id: i32, auth_id: i32, client: &ClientInfo, state: &web::Data<AppState>) -> Result<(), error::Error> {\n\n model::insert_log(log_type, msg, user_id, auth_id, client, Utc::now(), &state.db, &state.log).await?;\n\n \n\n Ok(())\n\n}\n\n\n\n// Add the user's login token to the blacklist\n\npub async fn add_black_list(auth_black_list: &AuthBlacklist, state: &web::Data<AppState>) -> Result<(), error::Error> {\n\n let task1 = model::insert_auth_black_list(&auth_black_list, &state.db, &state.log);\n\n let mut hold: Vec<BoxFuture<_>> = vec![Box::pin(task1)];\n", "file_path": "src/api/authorizations/service.rs", "rank": 59, "score": 22.685912914878514 }, { "content": "use actix_web::web;\n\nuse crate::AppState;\n\nuse crate::lib::{client::ClientInfo, error};\n\nuse super::{model, User, UserInfo};\n\nuse chrono::prelude::*;\n\n\n\npub async fn get_by_id(id: i32, state: &web::Data<AppState>) -> Result<Option<User>, error::Error> {\n\n let result = model::get_by_id(id, &state.db, &state.log).await?;\n\n\n\n Ok(result)\n\n}\n\n\n\npub async fn get_by_username(username: &str, state: &web::Data<AppState>) -> Result<Option<User>, error::Error> {\n\n let result = model::get_by_username(&username, &state.db, &state.log).await?;\n\n\n\n Ok(result)\n\n}\n\n\n\npub async fn insert(user: &User, state: &web::Data<AppState>) -> Result<User, error::Error> {\n\n let result = model::insert(user, &state.db, &state.log).await?;\n", "file_path": "src/api/user/service.rs", "rank": 60, "score": 22.663677728264624 }, { "content": " expires_in: access_token.expire,\n\n refresh_token: refresh_token.token,\n\n created_at: format!(\"{:?}\", access_token.create_time),\n\n updated_at: format!(\"{:?}\", update_time),\n\n }))\n\n}\n\n\n\n// Delete authorization\n\n#[delete(\"/authorizations/{id}\")]\n\npub async fn delete_auth(req: HttpRequest, state: web::Data<AppState>, conn: ConnectionInfo) -> Result<web::HttpResponse, error::Error> {\n\n let id: String = req.match_info().get(\"id\").unwrap().parse().unwrap();\n\n validator::uuid(&id, \"Authorization id\")?;\n\n\n\n let client = client::get_client_info(&state, &req, &conn);\n\n \n\n let auth_data = service::get_by_uuid(&id, &state).await?;\n\n let auth_data = match auth_data {\n\n None => {\n\n service::insert_log(1101, \"\", 0, 0, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n", "file_path": "src/api/authorizations/controller.rs", "rank": 61, "score": 21.368153652447425 }, { "content": " model::disable_auth(id, &state.db, &state.log).await?;\n\n\n\n Ok(())\n\n}\n\n\n\n// Obtain authorization information by id\n\npub async fn get_by_id(id: i32, state: &web::Data<AppState>) -> Result<Option<Authorization>, error::Error> {\n\n let result = model::get_by_id(id, &state.db, &state.log).await?;\n\n\n\n Ok(result)\n\n}\n\n\n\n// Obtain authorization information through uuid\n\npub async fn get_by_uuid(uuid: &str, state: &web::Data<AppState>) -> Result<Option<Authorization>, error::Error> {\n\n let uid = match uuid::Uuid::parse_str(uuid) {\n\n Err(_) => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Ok(v) => v\n\n };\n\n \n\n let result = model::get_by_uuid(uid, &state.db, &state.log).await?;\n", "file_path": "src/api/authorizations/service.rs", "rank": 62, "score": 21.196759979552613 }, { "content": " .execute(db)\n\n .await;\n\n \n\n if let Err(err) = r {\n\n error!(log, \"{}\", err);\n\n return Err(error::err500());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n// Obtain authorization information by id\n\npub async fn get_by_id(id: i32, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<Option<Authorization>, error::Error> {\n\n let r = sqlx::query_as::<_, Authorization>(r#\"\n\n SELECT a.*\n\n FROM authorizations a INNER JOIN users b ON a.user_id=b.id\n\n WHERE a.id=$1 AND b.is_enabled=1 AND b.is_del=0\"#)\n\n .bind(id)\n\n .fetch_optional(db)\n\n .await;\n", "file_path": "src/api/authorizations/model.rs", "rank": 63, "score": 20.283827787420353 }, { "content": " if let Err(err) = r {\n\n error!(log, \"{}\", err);\n\n return Err(error::err500());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n// Add the user's login token to the blacklist\n\npub async fn insert_auth_black_list(auth_black_list: &AuthBlacklist, db: &sqlx::Pool<sqlx::Postgres>, log: &slog::Logger) -> Result<(), error::Error> {\n\n let r = sqlx::query(r#\"\n\n INSERT INTO authorizations_blacklist (access_token_id, access_token_exp, user_id)\n\n\t VALUES ($1, $2, $3)\"#)\n\n .bind(&auth_black_list.access_token_id)\n\n .bind(&auth_black_list.access_token_exp)\n\n .bind(&auth_black_list.user_id)\n\n .execute(db)\n\n .await;\n\n \n\n if let Err(err) = r {\n", "file_path": "src/api/authorizations/model.rs", "rank": 64, "score": 19.8500395897894 }, { "content": " service::insert_log(1003, &username, 0, 0, &client, &state).await?;\n\n return Err(error::new(100400, \"Incorrect account or password\", 422));\n\n },\n\n Some(v) => v\n\n };\n\n\n\n let user_id = match u.id {\n\n None => return Err(error::new(100400, \"Incorrect account or password\", 422)),\n\n Some(v) => v,\n\n };\n\n\n\n let user_type = match u.user_type {\n\n None => return Err(error::new(100400, \"Incorrect account or password\", 422)),\n\n Some(v) => v,\n\n };\n\n\n\n let is_del = match u.is_del {\n\n None => return Err(error::new(100400, \"Incorrect account or password\", 422)),\n\n Some(v) => v,\n\n };\n", "file_path": "src/api/authorizations/controller.rs", "rank": 65, "score": 19.26212027424604 }, { "content": "\n\n// Refresh authorization\n\n#[put(\"/authorizations/{id}\")]\n\npub async fn refresh_auth(req: HttpRequest, state: web::Data<AppState>, conn: ConnectionInfo) -> Result<web::HttpResponse, error::Error> {\n\n let id: String = req.match_info().get(\"id\").unwrap().parse().unwrap();\n\n validator::uuid(&id, \"Authorization id\")?;\n\n\n\n let token = match req.headers().get(\"Authorization\") {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v.len() <= 6 {\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n let v = v.to_str().unwrap_or_default().to_string();\n\n if &v[..7] != \"Bearer \" {\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n String::from(&v[7..])\n\n }\n\n };\n", "file_path": "src/api/authorizations/controller.rs", "rank": 66, "score": 18.923158939367195 }, { "content": " let result = lib::redis::has_key(format!(\"auth_black_list_{}\", id), &state.redis, &state.log).await?;\n\n\n\n Ok(result)\n\n}\n\n\n\n// Create authorization\n\npub async fn create_auth(authorization: &Authorization, client: &ClientInfo, state: &web::Data<AppState>) -> Result<i32, error::Error> {\n\n let result = model::insert_auth(authorization, &state.db, &state.log).await?;\n\n if let Some(user_id) = authorization.user_id {\n\n user::service::update_last_login(Utc::now(), user_id, &client, &state).await?;\n\n }\n\n if let Some(v) = result.id {\n\n return Ok(v);\n\n }\n\n\n\n Ok(0)\n\n}\n\n\n\n// Revoke authorization\n\npub async fn revoke_auth(id: i32, state: &web::Data<AppState>) -> Result<(), error::Error> {\n", "file_path": "src/api/authorizations/service.rs", "rank": 67, "score": 18.863943301925218 }, { "content": "\n\n Ok(result)\n\n}\n\n\n\n// Update authorization\n\npub async fn update_auth(authorization: &Authorization, state: &web::Data<AppState>) -> Result<Authorization, error::Error> {\n\n let result = model::update_auth(authorization, &state.db, &state.log).await?;\n\n\n\n Ok(result)\n\n}", "file_path": "src/api/authorizations/service.rs", "rank": 68, "score": 17.626804538076602 }, { "content": " Some(v) => v,\n\n };\n\n let pwd = auth::crypt_password(&password, &salt);\n\n if user_password != pwd {\n\n service::insert_log(1001, \"\", user_id, 0, &client, &state).await?;\n\n return Err(error::new(100400, \"Incorrect account or password\", 422));\n\n }\n\n\n\n let auth = auth::create_auth(user_id, user_type, &client, &state).await?;\n\n service::insert_log(1, \"\", user_id, auth.auth_id, &client, &state).await?;\n\n\n\n Ok(HttpResponse::Ok().json(ResTokenJson {\n\n id: auth.refresh_token_id.to_string(),\n\n access_token: auth.access_token.token,\n\n expires_in: auth.access_token.expire,\n\n refresh_token: auth.refresh_token.token,\n\n created_at: format!(\"{:?}\", auth.access_token.create_time),\n\n updated_at: format!(\"{:?}\", auth.access_token.create_time),\n\n }))\n\n}\n", "file_path": "src/api/authorizations/controller.rs", "rank": 69, "score": 17.55373782048687 }, { "content": " if is_del != 0 {\n\n service::insert_log(1004, \"\", user_id, 0, &client, &state).await?;\n\n return Err(error::new(100400, \"Incorrect account or password\", 422));\n\n }\n\n\n\n let is_enabled = match u.is_enabled {\n\n None => return Err(error::new(100400, \"Incorrect account or password\", 422)),\n\n Some(v) => v,\n\n };\n\n if is_enabled != 1 {\n\n service::insert_log(1002, \"\", user_id, 0, &client, &state).await?;\n\n return Err(error::new(100400, \"Incorrect account or password\", 422));\n\n }\n\n\n\n let user_password = match u.password {\n\n None => return Err(error::new(100400, \"Incorrect account or password\", 422)),\n\n Some(v) => v,\n\n };\n\n let salt = match u.salt {\n\n None => return Err(error::new(100400, \"Incorrect account or password\", 422)),\n", "file_path": "src/api/authorizations/controller.rs", "rank": 70, "score": 17.465543058091356 }, { "content": "use actix_web::{web, put, get, HttpResponse, HttpRequest};\n\nuse actix_web::dev::ConnectionInfo;\n\nuse serde::{Deserialize};\n\nuse crate::AppState;\n\nuse crate::lib::{error, validator, client, auth};\n\nuse crate::api::user::{service, User};\n\nuse crate::api::authorizations;\n\nuse chrono::prelude::*;\n\n\n\n#[get(\"/user\")]\n\npub async fn get_info(req: HttpRequest, state: web::Data<AppState>) -> Result<web::HttpResponse, error::Error> {\n\n let auth_info = auth::verify(\"ROLE_MEMBER\", &req, &state).await?;\n\n\n\n let user_data = match service::get_user_info_by_id(auth_info.id, &state).await? {\n\n None => return Err(error::new(400007, \"Unable to obtain user information\", 422)),\n\n Some(v) => v\n\n };\n\n\n\n Ok(HttpResponse::Ok().json(user_data))\n\n}\n", "file_path": "src/api/user/controller.rs", "rank": 71, "score": 17.35420765782258 }, { "content": "\n\n#[derive(Deserialize)]\n\npub struct ChangePasswordReqJson {\n\n old_password: Option<String>,\n\n new_password: Option<String>,\n\n confirm_password: Option<String>,\n\n}\n\n\n\n#[put(\"/user/password\")]\n\npub async fn change_password(req_info: web::Json<ChangePasswordReqJson>, req: HttpRequest, state: web::Data<AppState>, conn: ConnectionInfo) -> Result<web::HttpResponse, error::Error> {\n\n let auth_info = auth::verify(\"ROLE_MEMBER\", &req, &state).await?;\n\n\n\n let old_password = validator::required_str(&req_info.old_password, \"old password\")?;\n\n let new_password = validator::required_str(&req_info.new_password, \"new password\")?;\n\n let confirm_password = validator::required_str(&req_info.confirm_password, \"confirm password\")?;\n\n\n\n if new_password != confirm_password {\n\n return Err(error::new(100301, \"The new password and the confirmed password are inconsistent\", 422));\n\n }\n\n\n", "file_path": "src/api/user/controller.rs", "rank": 72, "score": 17.157858833191035 }, { "content": " pub expire_time: DateTime<Utc>,\n\n pub create_time: DateTime<Utc>,\n\n pub expire: i64,\n\n pub jti: uuid::Uuid,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Auth {\n\n pub access_token: Token,\n\n pub refresh_token: Token,\n\n pub refresh_token_id: uuid::Uuid,\n\n pub auth_id: i32,\n\n}\n\n\n\npub async fn create_auth(user_id: i32, user_type: i16, client: &ClientInfo, state: &web::Data<AppState>) -> Result<Auth, error::Error> {\n\n let access_token = create_access_token(user_id, user_type, &state.config);\n\n\n\n let refresh_token_id = uuid::Uuid::new_v4();\n\n let refresh_token_jti = uuid::Uuid::new_v4();\n\n\n", "file_path": "src/lib/auth.rs", "rank": 73, "score": 16.947457230849874 }, { "content": " q = q.bind(access_token_iat);\n\n }\n\n q = q.bind(id);\n\n\n\n let r = q.fetch_one(db).await;\n\n \n\n match r {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n error!(log, \"{}\", e);\n\n Err(error::err500())\n\n }\n\n }\n\n}", "file_path": "src/api/authorizations/model.rs", "rank": 74, "score": 16.586177050421355 }, { "content": " let db_pool = lib::db::pg::conn(&settings).await;\n\n\n\n // redis\n\n let redis_pool = lib::redis::conn(&settings).await;\n\n\n\n HttpServer::new(move || {\n\n let cors = Cors::permissive();\n\n\n\n println!(\"==> 🚀 {} listening at {}\", settings.get::<String>(\"app.name\").unwrap(), settings.get::<String>(\"app.port\").unwrap());\n\n\n\n App::new()\n\n .app_data(web::Data::new(AppState {\n\n config: settings.clone(),\n\n log: logger.clone(),\n\n db: db_pool.clone(),\n\n redis: redis_pool.clone(),\n\n }))\n\n .wrap(\n\n ErrorHandlers::new()\n\n .handler(http::StatusCode::METHOD_NOT_ALLOWED, error::render_405)\n", "file_path": "src/main.rs", "rank": 75, "score": 16.376643780226278 }, { "content": " let salt = auth::salt();\n\n let pwd = auth::crypt_password(&new_password, &salt);\n\n\n\n let mut user = User::new();\n\n user.id = Some(auth_info.id);\n\n user.password = Some(pwd);\n\n user.salt = Some(salt);\n\n user.update_time = Some(Utc::now());\n\n\n\n let client = client::get_client_info(&state, &req, &conn);\n\n\n\n service::update(&user, &state).await?;\n\n authorizations::service::insert_log(5, \"\", auth_info.id, 0, &client, &state).await?;\n\n\n\n Ok(HttpResponse::Ok().body(\"\"))\n\n}\n", "file_path": "src/api/user/controller.rs", "rank": 76, "score": 16.06357048053951 }, { "content": " pub user_type: Option<i16>,\n\n}\n\n\n\nimpl User {\n\n pub fn new() -> Self {\n\n Self {\n\n id: None,\n\n uuid: None,\n\n username: None,\n\n password: None,\n\n salt: None,\n\n mobile: None,\n\n create_time: None,\n\n update_time: None,\n\n is_del: None,\n\n is_enabled: None,\n\n last_login_time: None,\n\n last_login_ip: None,\n\n user_type: None,\n\n }\n", "file_path": "src/api/user/mod.rs", "rank": 77, "score": 15.747810841394392 }, { "content": " if v.len() <= 6 {\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n let v = v.to_str().unwrap_or_default().to_string();\n\n if &v[..7] != \"Bearer \" {\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n String::from(&v[7..])\n\n }\n\n };\n\n\n\n let claims = parse_token(&token)?;\n\n let user_id = match claims.sub.parse::<i32>() {\n\n Err(_) => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Ok(v) => v\n\n };\n\n\n\n let mut scopes: Vec<String> = Vec::new();\n\n if permission.len() > 0 {\n\n let mut have_permission = false;\n", "file_path": "src/lib/auth.rs", "rank": 78, "score": 15.647200368512156 }, { "content": "### Wrong format\n\n\n\nuse JSON data structure。\n\n\n\n```\n\n{\"errcode\":100203,\"errmsg\":\"Captcha not found\"}\n\n```\n\n(We also return similar error codes uniformly for the back-end interfaces of other languages, so that the front-end doesn’t need to care what language the back-end uses, just use a unified error format.)\n\n\n\nactix available in `src/lib/error` the error type defined in the output error:\n\n\n\n```\n\nuse crate::lib::error;\n\n...\n\n\n\nreturn Err(error::new(400001, \"name cannot be empty\", 422));\n\n...\n\n\n\n```\n\n\n\n注:Temporarily use the 422 error code to output the error. In the current Actix version, the 400 error will be covered by the default custom error output, which will be resolved later。\n\n\n\n### Input verification\n\n\n\nFor the time being, the third-party library is not used for input verification, and it is processed in a simple way (I follow my usual method of scaffolding in other languages). Some input formats can be judged by the regularity in `src/lib/validator.rs`, and other needs can be added here Verification. In actix, use? To verify and pass errors。\n\n\n\n```\n\nvalidator::uuid(uuid_var, \"uuid\")?;\n\nvalidator::not_none(absent_number, \"numerical value\")?;\n\n...\n\n```\n\n\n\nIf you need to judge the input string as non-None and automatically convert Option to String,can be used `validator::required_str` function:\n\n\n\n```\n\nlet name = validator::required_str(name_param, \"name\")?;\n\n```\n", "file_path": "README.md", "rank": 79, "score": 15.45470182291068 }, { "content": " None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v.to_string() != id {\n\n service::insert_log(1059, \"\", user_id, auth_id, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n },\n\n };\n\n\n\n match auth_data.is_enabled {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v != 1 {\n\n service::insert_log(1060, \"\", user_id, auth_id, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n },\n\n };\n\n\n\n match auth_data.refresh_token {\n", "file_path": "src/api/authorizations/controller.rs", "rank": 80, "score": 15.417685224828482 }, { "content": " None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v.to_string() != claims.jti {\n\n service::insert_log(1060, \"\", user_id, auth_id, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n },\n\n };\n\n\n\n match user_data.is_enabled {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v != 1 {\n\n service::insert_log(1061, \"\", user_id, auth_id, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n },\n\n };\n\n\n\n match user_data.is_del {\n", "file_path": "src/api/authorizations/controller.rs", "rank": 81, "score": 15.417685224828482 }, { "content": "use actix_web::{web, post, put, delete, HttpResponse, HttpRequest};\n\nuse actix_web::dev::ConnectionInfo;\n\nuse serde::{Serialize, Deserialize};\n\nuse crate::AppState;\n\nuse crate::lib::{error, validator, client, auth};\n\nuse crate::api::user;\n\nuse super::{service, AuthBlacklist, Authorization};\n\nuse chrono::prelude::*;\n\n\n\n#[derive(Deserialize)]\n\npub struct CreateAuthReqJson {\n\n username: Option<String>,\n\n password: Option<String>,\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/api/authorizations/controller.rs", "rank": 82, "score": 14.932551097265435 }, { "content": " None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v != 0 {\n\n service::insert_log(1062, \"\", user_id, auth_id, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n },\n\n };\n\n\n\n let user_type = match user_data.user_type {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => v,\n\n };\n\n\n\n let access_token_id = match auth_data.access_token_id {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => v,\n\n };\n\n\n\n let access_token_exp = match auth_data.access_token_exp {\n", "file_path": "src/api/authorizations/controller.rs", "rank": 83, "score": 14.871749627593204 }, { "content": "\n\n let diff = (auth_black_list.access_token_exp.time() - Utc::now().time()).num_seconds();\n\n if diff > 0 {\n\n let task2 = lib::redis::set_with_expire(\n\n format!(\"auth_black_list_{}\", auth_black_list.access_token_id),\n\n auth_black_list.user_id,\n\n diff,\n\n &state.redis,\n\n &state.log\n\n );\n\n hold.push(Box::pin(task2));\n\n }\n\n\n\n join_all(hold).await;\n\n\n\n Ok(())\n\n}\n\n\n\n// Check if the id is in the blacklist\n\npub async fn is_in_black_list(id: &String, state: &web::Data<AppState>) -> Result<bool, error::Error> {\n", "file_path": "src/api/authorizations/service.rs", "rank": 84, "score": 14.383594233821695 }, { "content": " \n\n let client = client::get_client_info(&state, &req, &conn);\n\n\n\n let claims = auth::parse_token(&token)?;\n\n let mut have_permission = false;\n\n for v in claims.scopes {\n\n if v == \"ROLE_REFRESH_TOKEN\" {\n\n have_permission = true;\n\n break;\n\n }\n\n }\n\n if !have_permission {\n\n service::insert_log(1053, \"\", 0, 0, &client, &state).await?;\n\n return Err(error::new(100404, \"No permission\", 403));\n\n }\n\n\n\n let auth_id = claims.sub.parse::<i32>().unwrap();\n\n\n\n let auth_data = match service::get_by_id(auth_id, &state).await? {\n\n None => {\n", "file_path": "src/api/authorizations/controller.rs", "rank": 85, "score": 14.30825664850792 }, { "content": "\n\n sql1.push(String::from(\"user_type\"));\n\n sql2.push(format!(\"${}\", sql_index));\n\n\n\n let sql = format!(\"INSERT INTO users ({}) VALUES ({}) RETURNING *\", sql1.join(\",\"), sql2.join(\",\"));\n\n \n\n let mut q = sqlx::query_as::<_, User>(&sql);\n\n\n\n if let Some(uuid) = &user.uuid {\n\n q = q.bind(uuid);\n\n } else {\n\n q = q.bind(uuid::Uuid::new_v4());\n\n }\n\n\n\n if let Some(username) = &user.username {\n\n q = q.bind(username);\n\n }\n\n\n\n if let Some(password) = &user.password {\n\n q = q.bind(password);\n", "file_path": "src/api/user/model.rs", "rank": 86, "score": 13.772220055684588 }, { "content": "pub mod controller;\n\npub mod model;\n\npub mod service;\n\nuse serde::{Serialize};\n\nuse chrono::prelude::*;\n\n\n\n#[derive(Debug, sqlx::FromRow)]\n\npub struct User {\n\n pub id: Option<i32>,\n\n pub uuid: Option<uuid::Uuid>,\n\n pub username: Option<String>,\n\n pub password: Option<String>,\n\n pub salt: Option<uuid::Uuid>,\n\n pub mobile: Option<String>,\n\n pub create_time: Option<DateTime<Utc>>,\n\n pub update_time: Option<DateTime<Utc>>,\n\n pub is_del: Option<i16>,\n\n pub is_enabled: Option<i16>,\n\n pub last_login_time: Option<DateTime<Utc>>,\n\n pub last_login_ip: Option<String>,\n", "file_path": "src/api/user/mod.rs", "rank": 87, "score": 13.51608804257971 }, { "content": "pub mod api;\n\npub mod lib;\n\nmod routes;\n\n\n\n#[macro_use]\n\nextern crate slog;\n\nextern crate slog_term;\n\nextern crate slog_async;\n\nextern crate slog_json;\n\n\n\nuse actix_cors::Cors;\n\nuse actix_web::middleware::ErrorHandlers;\n\nuse actix_web::{http, web, App, HttpServer, Result, HttpResponse};\n\nuse lib::error;\n\nuse routes::{hello, authorizations, user};\n\n\n\n#[derive(Clone)]\n\npub struct AppState {\n\n pub config: config::Config,\n\n pub log: slog::Logger,\n", "file_path": "src/main.rs", "rank": 88, "score": 13.4454216787513 }, { "content": " service::insert_log(1058, \"\", 0, auth_id, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n },\n\n Some(v) => v\n\n };\n\n\n\n let user_id = match auth_data.user_id {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => v,\n\n };\n\n\n\n let user_data = match user::service::get_by_id(user_id, &state).await? {\n\n None => {\n\n service::insert_log(1058, \"\", 0, auth_id, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n },\n\n Some(v) => v\n\n };\n\n\n\n match auth_data.uuid {\n", "file_path": "src/api/authorizations/controller.rs", "rank": 89, "score": 13.364065459434249 }, { "content": "pub mod error;\n\npub mod log;\n\npub mod db;\n\npub mod redis;\n\npub mod validator;\n\npub mod client;\n\npub mod auth;\n\npub mod aes;", "file_path": "src/lib/mod.rs", "rank": 90, "score": 12.886754344545281 }, { "content": " },\n\n Some(v) => v\n\n };\n\n\n\n let user_id = match auth_data.user_id {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => v,\n\n };\n\n\n\n let auth_id = match auth_data.id {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => v,\n\n };\n\n\n\n match auth_data.is_enabled {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v != 1 {\n\n service::insert_log(1102, \"\", user_id, auth_id, &client, &state).await?;\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n", "file_path": "src/api/authorizations/controller.rs", "rank": 91, "score": 12.704776538089575 }, { "content": " return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n },\n\n };\n\n\n\n let old_salt = match user_data.salt {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => v,\n\n };\n\n\n\n let old_password_store = match user_data.password {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => v,\n\n };\n\n\n\n let old_pwd = auth::crypt_password(&old_password, &old_salt);\n\n if old_pwd != old_password_store {\n\n return Err(error::new(100407, \"error old password\", 422));\n\n }\n\n\n", "file_path": "src/api/user/controller.rs", "rank": 92, "score": 12.415975682950151 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, sqlx::FromRow, Serialize)]\n\npub struct UserInfo {\n\n pub id: i32,\n\n pub username: Option<String>,\n\n pub uuid: uuid::Uuid,\n\n pub mobile: Option<String>,\n\n pub last_login_time: Option<DateTime<Utc>>,\n\n pub last_login_ip: Option<String>,\n\n pub user_type: i16,\n\n}", "file_path": "src/api/user/mod.rs", "rank": 93, "score": 11.482026070469926 }, { "content": "# rust-actix-rest-api-boilerplate\n\nA Rust RESTful API server with actix-web\n\n\n\n## Install\n\n- Install [Rust](https://www.rust-lang.org/)\n\n- Install [Docker](https://www.docker.com/) (optional)\n\n- Install [Docker Compose](https://github.com/docker/compose/releases) (optional)\n\n\n\n\n\nInstall first [cargo-make](https://github.com/sagiegurari/cargo-make),Can use cargo make command to start some scripts\n\n\n\n```\n\n$ cargo install --no-default-features --force cargo-make\n\n```\n\n\n\n```\n\n$ git clone address of this library\n\n```\n\n\n\n## Start the development environment\n\n```\n\n$ cargo make dev\n\n```\n\n\n\nPort 8080 is enabled by default,accessible http://localhost:8080/hello test whether the startup is successful\n\n\n\n\n\n## Clean up\n\n```\n\n$ cargo make clean\n\n```\n\n\n\n## Bale\n\n```\n\n$ cargo make build\n\n```\n\n\n\n## mac cross compile linux\n\n```\n\n$ cargo make buildlinux\n\n```\n\n\n\n## Start the local development environment database (Docker, optional)\n\n````\n\n$ docker-compose up\n\n````\n\n\n\nPass through docker-compose start up postgres and redis,postgres the port is 5432, redis the port is 6379. The database can be operated and debugged through the local client tool connection\n\n\n\n## Stop the local development environment database (Docker, optional)\n\n````\n\n$ docker-compose down\n\n````\n\n\n\n## Instruction\n\n\n\nConciseness\n\n\n\n### About web frameworks actix-web\n\n\n\n[actix-web](https://actix.rs/)It is a fast asynchronous web framework under rust。Used by the underlying asynchronous library [Tokio](https://tokio.rs/),Pay attention to the use of asynchronous development when developing。\n\n\n\nDatabase connection pool,redis examples of connection pools and configuration files,exist actix web pass at startup `app_data` Incoming actix。\n\n\n\n```\n\nApp::new()\n\n .app_data(web::Data::new(AppState {\n\n config: settings.clone(),\n\n log: logger.clone(),\n\n db: db_pool.clone(),\n\n redis: redis_pool.clone(),\n\n }))\n\n...\n\n```\n\n\n", "file_path": "README.md", "rank": 94, "score": 11.067194482597825 }, { "content": " id: Some(auth_id),\n\n user_id: None,\n\n uuid: None,\n\n client_type: None,\n\n refresh_token: Some(refresh_token_jti),\n\n create_time: None,\n\n update_time: Some(update_time),\n\n last_refresh_time: Some(update_time),\n\n access_token_id: Some(access_token.jti),\n\n access_token_exp: Some(access_token.expire_time),\n\n access_token_iat: Some(access_token.create_time),\n\n is_enabled: None,\n\n };\n\n\n\n service::update_auth(&authorization, &state).await?;\n\n service::insert_log(2, \"\", user_id, auth_id, &client, &state).await?;\n\n\n\n Ok(HttpResponse::Ok().json(ResTokenJson {\n\n id,\n\n access_token: access_token.token,\n", "file_path": "src/api/authorizations/controller.rs", "rank": 95, "score": 10.922404335318308 }, { "content": "pub mod controller;\n\npub mod model;\n\npub mod service;\n\n\n\nuse chrono::prelude::*;\n\n\n\n#[derive(Debug)]\n\npub struct AuthBlacklist {\n\n pub id: Option<i32>,\n\n pub access_token_id: uuid::Uuid,\n\n pub access_token_exp: DateTime<Utc>,\n\n pub user_id: i32,\n\n}\n\n\n\n#[derive(Debug, sqlx::FromRow)]\n\npub struct Authorization {\n\n pub id: Option<i32>,\n\n pub user_id: Option<i32>,\n\n pub uuid: Option<uuid::Uuid>,\n\n pub client_type: Option<i16>,\n", "file_path": "src/api/authorizations/mod.rs", "rank": 96, "score": 10.596744283810978 }, { "content": " for v in claims.scopes {\n\n if v == permission {\n\n have_permission = true;\n\n }\n\n scopes.push(v);\n\n }\n\n if !have_permission {\n\n return Err(error::new(100404, \"No permission\", 403));\n\n }\n\n }\n\n\n\n match authorizations::service::is_in_black_list(&claims.jti, &state).await {\n\n Err(_) => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Ok(v) => {\n\n if v {\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n }\n\n };\n\n\n\n let authorization_info = AuthorizationInfo {\n\n id: user_id,\n\n scopes,\n\n };\n\n\n\n Ok(authorization_info)\n\n}", "file_path": "src/lib/auth.rs", "rank": 97, "score": 10.58964238607251 }, { "content": " let user_data = match service::get_by_id(auth_info.id, &state).await? {\n\n None => return Err(error::new(400007, \"Unable to obtain user information\", 422)),\n\n Some(v) => v\n\n };\n\n\n\n // 用户被删除\n\n match user_data.is_del {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v != 0 {\n\n return Err(error::new(100403, \"Authentication failure\", 401));\n\n }\n\n },\n\n };\n\n\n\n // 用户被禁用\n\n match user_data.is_enabled {\n\n None => return Err(error::new(100403, \"Authentication failure\", 401)),\n\n Some(v) => {\n\n if v != 1 {\n", "file_path": "src/api/user/controller.rs", "rank": 98, "score": 9.155811372340072 }, { "content": " sql1.push(format!(\"is_enabled = ${}\", sql_index));\n\n sql_index += 1;\n\n }\n\n if let Some(_) = &user.last_login_time {\n\n sql1.push(format!(\"last_login_time = ${}\", sql_index));\n\n sql_index += 1;\n\n }\n\n if let Some(_) = &user.last_login_ip {\n\n sql1.push(format!(\"last_login_ip = ${}\", sql_index));\n\n sql_index += 1;\n\n }\n\n if let Some(_) = &user.user_type {\n\n sql1.push(format!(\"user_type = ${}\", sql_index));\n\n sql_index += 1;\n\n }\n\n\n\n let sql = format!(\"UPDATE users SET {} WHERE id = ${} RETURNING *\", sql1.join(\",\"), sql_index);\n\n\n\n let mut q = sqlx::query_as::<_, User>(&sql);\n\n\n", "file_path": "src/api/user/model.rs", "rank": 99, "score": 8.651463154767093 } ]
Rust
src/osu/versions/no_sliders_no_leniency/mod.rs
RealistikDash/akat-rust-pp
90653f6da82ff981da250a55427dd40aa6ea5b2b
#![cfg(feature = "no_sliders_no_leniency")] use super::super::DifficultyAttributes; mod difficulty_object; mod osu_object; mod skill; mod skill_kind; mod slider_state; use difficulty_object::DifficultyObject; use osu_object::OsuObject; use skill::Skill; use skill_kind::SkillKind; use slider_state::SliderState; use crate::{parse::HitObjectKind, Beatmap, Mods, StarResult, Strains}; const OBJECT_RADIUS: f32 = 64.0; const SECTION_LEN: f32 = 400.0; const DIFFICULTY_MULTIPLIER: f32 = 0.0675; const NORMALIZED_RADIUS: f32 = 52.0; pub fn stars(map: &Beatmap, mods: impl Mods, passed_objects: Option<usize>) -> StarResult { let take = passed_objects.unwrap_or_else(|| map.hit_objects.len()); let attributes = map.attributes().mods(mods); let hitwindow = super::difficulty_range(attributes.od).floor() / attributes.clock_rate; let od = (80.0 - hitwindow) / 6.0; if take < 2 { return StarResult::Osu(DifficultyAttributes { ar: attributes.ar, od, ..Default::default() }); } let radius = OBJECT_RADIUS * (1.0 - 0.7 * (attributes.cs - 5.0) / 5.0) / 2.0; let mut scaling_factor = NORMALIZED_RADIUS / radius; if radius < 30.0 { let small_circle_bonus = (30.0 - radius).min(5.0) / 50.0; scaling_factor *= 1.0 + small_circle_bonus; } let clock_rate = attributes.clock_rate; let mut max_combo = 0; let mut state = SliderState::new(&map); let mut hit_objects = map .hit_objects .iter() .take(take) .filter_map(|h| match &h.kind { HitObjectKind::Circle => { max_combo += 1; Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Slider { pixel_len, repeats, .. } => { max_combo += state.count_ticks(h.start_time, *pixel_len, *repeats, &map); Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Spinner { .. } => { max_combo += 1; Some(OsuObject::new(h.pos, h.start_time, true, clock_rate)) } HitObjectKind::Hold { .. } => None, }); let mut aim = Skill::new(SkillKind::Aim); let mut speed = Skill::new(SkillKind::Speed); let mut prev_prev = None; let mut prev = hit_objects.next().unwrap(); let mut prev_vals = None; let mut current_section_end = (prev.time / SECTION_LEN).ceil() * SECTION_LEN; let curr = hit_objects.next().unwrap(); let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; for curr in hit_objects { let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { aim.save_current_peak(); aim.start_new_section_from(current_section_end); speed.save_current_peak(); speed.start_new_section_from(current_section_end); current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; } aim.save_current_peak(); speed.save_current_peak(); let aim_strain = aim.difficulty_value().sqrt() * DIFFICULTY_MULTIPLIER; let speed_strain = speed.difficulty_value().sqrt() * DIFFICULTY_MULTIPLIER; let stars = aim_strain + speed_strain + (aim_strain - speed_strain).abs() / 2.0; StarResult::Osu(DifficultyAttributes { stars, ar: attributes.ar, od, speed_strain, aim_strain, max_combo, n_circles: map.n_circles as usize, n_spinners: map.n_spinners as usize, }) } pub fn strains(map: &Beatmap, mods: impl Mods) -> Strains { let attributes = map.attributes().mods(mods); if map.hit_objects.len() < 2 { return Strains::default(); } let radius = OBJECT_RADIUS * (1.0 - 0.7 * (attributes.cs - 5.0) / 5.0) / 2.0; let mut scaling_factor = NORMALIZED_RADIUS / radius; if radius < 30.0 { let small_circle_bonus = (30.0 - radius).min(5.0) / 50.0; scaling_factor *= 1.0 + small_circle_bonus; } let clock_rate = attributes.clock_rate; let mut hit_objects = map.hit_objects.iter().filter_map(|h| match &h.kind { HitObjectKind::Circle | HitObjectKind::Slider { .. } => { Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Spinner { .. } => { Some(OsuObject::new(h.pos, h.start_time, true, clock_rate)) } HitObjectKind::Hold { .. } => None, }); let mut aim = Skill::new(SkillKind::Aim); let mut speed = Skill::new(SkillKind::Speed); let mut prev_prev = None; let mut prev = hit_objects.next().unwrap(); let mut prev_vals = None; let mut current_section_end = (prev.time / SECTION_LEN).ceil() * SECTION_LEN; let curr = hit_objects.next().unwrap(); let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; for curr in hit_objects { let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { aim.save_current_peak(); aim.start_new_section_from(current_section_end); speed.save_current_peak(); speed.start_new_section_from(current_section_end); current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; } aim.save_current_peak(); speed.save_current_peak(); let strains = aim .strain_peaks .into_iter() .zip(speed.strain_peaks.into_iter()) .map(|(aim, speed)| aim + speed) .collect(); Strains { section_length: SECTION_LEN, strains, } }
#![cfg(feature = "no_sliders_no_leniency")] use super::super::DifficultyAttributes; mod difficulty_object; mod osu_object; mod skill; mod skill_kind; mod slider_state; use difficulty_object::DifficultyObject; use osu_object::OsuObject; use skill::Skill; use skill_kind::SkillKind; use slider_state::SliderState; use crate::{parse::HitObjectKind, Beatmap, Mods, StarResult, Strains}; const OBJECT_RADIUS: f32 = 64.0; const SECTION_LEN: f32 = 400.0; const DIFFICULTY_MULTIPLIER: f32 = 0.0675; const NORMALIZED_RADIUS: f32 = 52.0; pub fn stars(map: &Beatmap, mods: impl Mods, passed_objects: Option<usize>) -> StarResult { let take = passed_objects.unwrap_or_else(|| map.hit_objects.len()); let attributes = map.attributes().mods(mods); let hitwindow = super::difficulty_range(attributes.od).floor() / attributes.clock_rate; let od = (80.0 - hitwindow) / 6.0; if take < 2 { return StarResult::Osu(DifficultyAttributes { ar: attributes.ar, od, ..Default::default() }); } let radius = OBJECT_RADIUS * (1.0 - 0.7 * (attributes.cs - 5.0) / 5.0) / 2.0; let mut scaling_factor = NORMALIZED_RADIUS / radius; if radius < 30.0 { let small_circle_bonus = (30.0 - radius).min(5.0) / 50.0; scaling_factor *= 1.0 + small_circle_bonus; } let clock_rate = attributes.clock_rate; let mut max_combo = 0; let mut state = SliderState::new(&map); let mut hit_objects = map .hit_objects .iter() .take(take) .filter_map(|h| match &h.kind { HitObjectKind::Circle => { max_combo += 1; Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Slider { pixel_len, repeats, .. } => { max_combo += state.count_ticks(h.start_time, *pixel_len, *repeats, &map); Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Spinner { .. } => { max_combo += 1; Some(OsuObject::new(h.pos, h.start_time, true, clock_rate)) } HitObjectKind::Hold { .. } => None, }); let mut aim = Skill::new(SkillKind::Aim); let mut speed = Skill::new(SkillKind::Speed); let mut prev_prev = None; let mut prev = hit_objects.next().unwrap(); let mut prev_vals = None; let mut current_section_end = (prev.time / SECTION_LEN).ceil() * SECTION_LEN; let curr = hit_objects.next().unwrap(); let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; for curr in hit_objects { let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { aim.save_current_peak(); aim.start_new_section_from(current_section_end); speed.save_current_peak(); speed.start_new_section_from(current_section_end); current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; } aim.save_current_peak(); speed.save_current_peak(); let aim_strain = aim.difficulty_value().sqrt() * DIFFICULTY_MULTIPLIER; let speed_strain = speed.difficulty_value().sqrt() * DIFFICULTY_MULTIPLIER; let stars = aim_strain + speed_strain + (aim_strain - speed_strain).abs() / 2.0; StarResult::Osu(DifficultyAttributes { stars, ar: attributes.ar, od, speed_strain, aim_strain, max_combo, n_circles: map.n_circles as usize, n_spinners: map.n_spinners as usize, }) } pub fn strains(map: &Beatmap, mods: impl Mods) -> Strains { let attributes = map.attributes().mods(mods);
if map.hit_objects.len() < 2 { return Strains::default(); } let radius = OBJECT_RADIUS * (1.0 - 0.7 * (attributes.cs - 5.0) / 5.0) / 2.0; let mut scaling_factor = NORMALIZED_RADIUS / radius; if radius < 30.0 { let small_circle_bonus = (30.0 - radius).min(5.0) / 50.0; scaling_factor *= 1.0 + small_circle_bonus; } let clock_rate = attributes.clock_rate; let mut hit_objects = map.hit_objects.iter().filter_map(|h| match &h.kind { HitObjectKind::Circle | HitObjectKind::Slider { .. } => { Some(OsuObject::new(h.pos, h.start_time, false, clock_rate)) } HitObjectKind::Spinner { .. } => { Some(OsuObject::new(h.pos, h.start_time, true, clock_rate)) } HitObjectKind::Hold { .. } => None, }); let mut aim = Skill::new(SkillKind::Aim); let mut speed = Skill::new(SkillKind::Speed); let mut prev_prev = None; let mut prev = hit_objects.next().unwrap(); let mut prev_vals = None; let mut current_section_end = (prev.time / SECTION_LEN).ceil() * SECTION_LEN; let curr = hit_objects.next().unwrap(); let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; for curr in hit_objects { let h = DifficultyObject::new(&curr, &prev, prev_vals, prev_prev, scaling_factor); while h.base.time > current_section_end { aim.save_current_peak(); aim.start_new_section_from(current_section_end); speed.save_current_peak(); speed.start_new_section_from(current_section_end); current_section_end += SECTION_LEN; } aim.process(&h); speed.process(&h); prev_prev = Some(prev); prev_vals = Some((h.jump_dist, h.strain_time)); prev = curr; } aim.save_current_peak(); speed.save_current_peak(); let strains = aim .strain_peaks .into_iter() .zip(speed.strain_peaks.into_iter()) .map(|(aim, speed)| aim + speed) .collect(); Strains { section_length: SECTION_LEN, strains, } }
function_block-function_prefix_line
[]
Rust
src/instruction.rs
kevinheavey/solders
82171e0d34b913efed9f0eb4e5421bc99d3f000e
use pyo3::{basic::CompareOp, prelude::*, types::PyBytes}; use serde::{Deserialize, Serialize}; use solana_sdk::{ instruction::{ AccountMeta as AccountMetaOriginal, CompiledInstruction as CompiledInstructionOriginal, Instruction as InstructionOriginal, }, pubkey::Pubkey as PubkeyOriginal, }; use crate::{handle_py_err, pubkey::Pubkey, RichcmpEqualityOnly}; #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Debug, Clone)] pub struct AccountMeta(AccountMetaOriginal); #[pymethods] impl AccountMeta { #[new] pub fn new(pubkey: &Pubkey, is_signer: bool, is_writable: bool) -> Self { let underlying_pubkey = pubkey.into(); let underlying = if is_writable { AccountMetaOriginal::new(underlying_pubkey, is_signer) } else { AccountMetaOriginal::new_readonly(underlying_pubkey, is_signer) }; underlying.into() } #[getter] pub fn pubkey(&self) -> Pubkey { self.0.pubkey.into() } #[getter] pub fn is_signer(&self) -> bool { self.0.is_signer } #[getter] pub fn is_writable(&self) -> bool { self.0.is_writable } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } } impl RichcmpEqualityOnly for AccountMeta {} impl From<AccountMetaOriginal> for AccountMeta { fn from(am: AccountMetaOriginal) -> Self { Self(am) } } impl From<AccountMeta> for AccountMetaOriginal { fn from(am: AccountMeta) -> Self { am.0 } } #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] pub struct Instruction(pub InstructionOriginal); #[pymethods] impl Instruction { #[new] pub fn new(program_id: &Pubkey, data: &[u8], accounts: Vec<AccountMeta>) -> Self { let underlying_accounts: Vec<AccountMetaOriginal> = accounts.into_iter().map(|x| x.0).collect(); let underlying = InstructionOriginal::new_with_bytes(program_id.into(), data, underlying_accounts); underlying.into() } #[getter] pub fn program_id(&self) -> Pubkey { self.0.program_id.into() } #[getter] pub fn data<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.data) } #[getter] pub fn accounts(&self) -> Vec<AccountMeta> { self.0 .accounts .clone() .into_iter() .map(AccountMeta) .collect() } #[setter] pub fn set_accounts(&mut self, accounts: Vec<AccountMeta>) { self.0.accounts = accounts .into_iter() .map(AccountMetaOriginal::from) .collect(); } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } pub fn serialize<'a>(&self, py: Python<'a>) -> &'a PyBytes { let ser = bincode::serialize(&self).unwrap(); PyBytes::new(py, &ser) } #[staticmethod] pub fn deserialize(data: &[u8]) -> PyResult<Self> { let deser = bincode::deserialize::<Self>(data); handle_py_err(deser) } } impl RichcmpEqualityOnly for Instruction {} impl From<InstructionOriginal> for Instruction { fn from(ix: InstructionOriginal) -> Self { Self(ix) } } impl From<Instruction> for InstructionOriginal { fn from(ix: Instruction) -> InstructionOriginal { ix.0 } } impl AsRef<InstructionOriginal> for Instruction { fn as_ref(&self) -> &InstructionOriginal { &self.0 } } #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] pub struct CompiledInstruction(CompiledInstructionOriginal); #[pymethods] impl CompiledInstruction { #[new] pub fn new(program_id_index: u8, data: &[u8], accounts: &[u8]) -> Self { CompiledInstructionOriginal::new_from_raw_parts( program_id_index, data.to_vec(), accounts.to_vec(), ) .into() } pub fn program_id(&self, program_ids: Vec<Pubkey>) -> Pubkey { let underlying_pubkeys: Vec<PubkeyOriginal> = program_ids.iter().map(PubkeyOriginal::from).collect(); let underlying = *self.0.program_id(&underlying_pubkeys); underlying.into() } #[getter] pub fn program_id_index(&self) -> u8 { self.0.program_id_index } #[getter] pub fn accounts<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.accounts) } #[setter] pub fn set_accounts(&mut self, accounts: Vec<u8>) { self.0.accounts = accounts } #[getter] pub fn data<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.data) } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } pub fn serialize<'a>(&self, py: Python<'a>) -> &'a PyBytes { let ser = bincode::serialize(&self).unwrap(); PyBytes::new(py, &ser) } #[staticmethod] pub fn deserialize(data: &[u8]) -> PyResult<Self> { let deser = bincode::deserialize::<Self>(data); handle_py_err(deser) } } impl RichcmpEqualityOnly for CompiledInstruction {} impl From<CompiledInstructionOriginal> for CompiledInstruction { fn from(ix: CompiledInstructionOriginal) -> Self { Self(ix) } } impl From<CompiledInstruction> for CompiledInstructionOriginal { fn from(ix: CompiledInstruction) -> Self { ix.0 } } impl AsRef<CompiledInstructionOriginal> for CompiledInstruction { fn as_ref(&self) -> &CompiledInstructionOriginal { &self.0 } }
use pyo3::{basic::CompareOp, prelude::*, types::PyBytes}; use serde::{Deserialize, Serialize}; use solana_sdk::{ instruction::{ AccountMeta as AccountMetaOriginal, CompiledInstruction as CompiledInstructionOriginal, Instruction as InstructionOriginal, }, pubkey::Pubkey as PubkeyOriginal, }; use crate::{handle_py_err, pubkey::Pubkey, RichcmpEqualityOnly}; #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Debug, Clone)] pub struct AccountMeta(AccountMetaOriginal); #[pymethods] impl AccountMeta { #[new] pub fn new(pubkey: &Pubkey, is_signer: bool, is_writable: bool) -> Self { let underlying_pubkey = pubkey.into(); let underlying = if is_writable { AccountMetaOriginal::new(underlying_pubkey, is_signer) } else { AccountMetaOriginal::new_readonly(underlying_pubkey, is_signer) }; underlying.into() } #[getter] pub fn pubkey(&self) -> Pubkey { self.0.pubkey.into() } #[getter] pub fn is_signer(&self) -> bool { self.0.is_signer } #[getter] pub fn is_writable(&self) -> bool { self.0.is_writable } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } } impl RichcmpEqualityOnly for AccountMeta {} impl From<AccountMetaOriginal> for AccountMeta { fn from(am: AccountMetaOriginal) -> Self { Self(am) } } impl From<AccountMeta> for AccountMetaOriginal { fn from(am: AccountMeta) -> Self { am.0 } } #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] pub struct Instruction(pub InstructionOriginal); #[pymethods] impl Instruction { #[new] pub fn new(program_id: &Pubkey, data: &[u8], accounts: Vec<Accoun
#[getter] pub fn program_id(&self) -> Pubkey { self.0.program_id.into() } #[getter] pub fn data<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.data) } #[getter] pub fn accounts(&self) -> Vec<AccountMeta> { self.0 .accounts .clone() .into_iter() .map(AccountMeta) .collect() } #[setter] pub fn set_accounts(&mut self, accounts: Vec<AccountMeta>) { self.0.accounts = accounts .into_iter() .map(AccountMetaOriginal::from) .collect(); } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } pub fn serialize<'a>(&self, py: Python<'a>) -> &'a PyBytes { let ser = bincode::serialize(&self).unwrap(); PyBytes::new(py, &ser) } #[staticmethod] pub fn deserialize(data: &[u8]) -> PyResult<Self> { let deser = bincode::deserialize::<Self>(data); handle_py_err(deser) } } impl RichcmpEqualityOnly for Instruction {} impl From<InstructionOriginal> for Instruction { fn from(ix: InstructionOriginal) -> Self { Self(ix) } } impl From<Instruction> for InstructionOriginal { fn from(ix: Instruction) -> InstructionOriginal { ix.0 } } impl AsRef<InstructionOriginal> for Instruction { fn as_ref(&self) -> &InstructionOriginal { &self.0 } } #[pyclass(module = "solders", subclass)] #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] pub struct CompiledInstruction(CompiledInstructionOriginal); #[pymethods] impl CompiledInstruction { #[new] pub fn new(program_id_index: u8, data: &[u8], accounts: &[u8]) -> Self { CompiledInstructionOriginal::new_from_raw_parts( program_id_index, data.to_vec(), accounts.to_vec(), ) .into() } pub fn program_id(&self, program_ids: Vec<Pubkey>) -> Pubkey { let underlying_pubkeys: Vec<PubkeyOriginal> = program_ids.iter().map(PubkeyOriginal::from).collect(); let underlying = *self.0.program_id(&underlying_pubkeys); underlying.into() } #[getter] pub fn program_id_index(&self) -> u8 { self.0.program_id_index } #[getter] pub fn accounts<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.accounts) } #[setter] pub fn set_accounts(&mut self, accounts: Vec<u8>) { self.0.accounts = accounts } #[getter] pub fn data<'a>(&self, py: Python<'a>) -> &'a PyBytes { PyBytes::new(py, &self.0.data) } pub fn __repr__(&self) -> String { format!("{:#?}", self) } pub fn __str__(&self) -> String { format!("{:?}", self) } pub fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { self.richcmp(other, op) } pub fn serialize<'a>(&self, py: Python<'a>) -> &'a PyBytes { let ser = bincode::serialize(&self).unwrap(); PyBytes::new(py, &ser) } #[staticmethod] pub fn deserialize(data: &[u8]) -> PyResult<Self> { let deser = bincode::deserialize::<Self>(data); handle_py_err(deser) } } impl RichcmpEqualityOnly for CompiledInstruction {} impl From<CompiledInstructionOriginal> for CompiledInstruction { fn from(ix: CompiledInstructionOriginal) -> Self { Self(ix) } } impl From<CompiledInstruction> for CompiledInstructionOriginal { fn from(ix: CompiledInstruction) -> Self { ix.0 } } impl AsRef<CompiledInstructionOriginal> for CompiledInstruction { fn as_ref(&self) -> &CompiledInstructionOriginal { &self.0 } }
tMeta>) -> Self { let underlying_accounts: Vec<AccountMetaOriginal> = accounts.into_iter().map(|x| x.0).collect(); let underlying = InstructionOriginal::new_with_bytes(program_id.into(), data, underlying_accounts); underlying.into() }
function_block-function_prefixed
[ { "content": "#[pyfunction]\n\npub fn transfer_many(from_pubkey: &Pubkey, to_lamports: Vec<(Pubkey, u64)>) -> Vec<Instruction> {\n\n let to_lamports_converted: Vec<(PubkeyOriginal, u64)> = to_lamports\n\n .into_iter()\n\n .map(|x| (PubkeyOriginal::from(x.0), x.1))\n\n .collect();\n\n convert_instructions_from_original(transfer_many_original(\n\n from_pubkey.as_ref(),\n\n &to_lamports_converted,\n\n ))\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 0, "score": 146788.38473806993 }, { "content": "#[pyfunction]\n\npub fn create_account(params: CreateAccountParams) -> Instruction {\n\n create_account_original(\n\n params.from_pubkey.as_ref(),\n\n params.to_pubkey.as_ref(),\n\n params.lamports,\n\n params.space,\n\n params.owner.as_ref(),\n\n )\n\n .into()\n\n}\n\n\n\n#[derive(FromPyObject, IntoPyObject)]\n\npub struct CreateAccountWithSeedParams {\n\n from_pubkey: Pubkey,\n\n to_pubkey: Pubkey,\n\n base: Pubkey,\n\n seed: String,\n\n lamports: u64,\n\n space: u64,\n\n owner: Pubkey,\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 1, "score": 141966.34117109724 }, { "content": "#[pyfunction]\n\npub fn decode_create_account(instruction: Instruction) -> PyResult<CreateAccountParams> {\n\n let keys = instruction.0.accounts;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::CreateAccount {\n\n lamports,\n\n space,\n\n owner,\n\n } => Ok(CreateAccountParams {\n\n from_pubkey: keys[0].pubkey.into(),\n\n to_pubkey: keys[1].pubkey.into(),\n\n lamports,\n\n space,\n\n owner: owner.into(),\n\n }),\n\n _ => Err(PyValueError::new_err(\"Not a CreateAccount instruction\")),\n\n }\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 2, "score": 140052.8899522494 }, { "content": "#[pyfunction]\n\npub fn initialize_nonce_account(params: InitializeNonceAccountParams) -> Instruction {\n\n create_nonce_account(\n\n &Pubkey::default(),\n\n &params.nonce_pubkey,\n\n &params.authority,\n\n 0,\n\n )\n\n .1\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 3, "score": 136179.17464172473 }, { "content": "#[pyfunction]\n\npub fn create_account_with_seed(params: CreateAccountWithSeedParams) -> Instruction {\n\n create_account_with_seed_original(\n\n params.from_pubkey.as_ref(),\n\n params.to_pubkey.as_ref(),\n\n params.base.as_ref(),\n\n &params.seed,\n\n params.lamports,\n\n params.space,\n\n params.owner.as_ref(),\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 4, "score": 136179.17464172473 }, { "content": "#[pyfunction]\n\npub fn withdraw_nonce_account(params: WithdrawNonceAccountParams) -> Instruction {\n\n withdraw_nonce_account_original(\n\n params.nonce_pubkey.as_ref(),\n\n params.authorized_pubkey.as_ref(),\n\n params.to_pubkey.as_ref(),\n\n params.lamports,\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 5, "score": 136179.17464172473 }, { "content": "#[pyfunction]\n\npub fn advance_nonce_account(params: AdvanceNonceAccountParams) -> Instruction {\n\n advance_nonce_account_original(\n\n params.nonce_pubkey.as_ref(),\n\n params.authorized_pubkey.as_ref(),\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 6, "score": 136179.17464172473 }, { "content": "#[pyfunction]\n\npub fn create_nonce_account(\n\n from_pubkey: &Pubkey,\n\n nonce_pubkey: &Pubkey,\n\n authority: &Pubkey,\n\n lamports: u64,\n\n) -> (Instruction, Instruction) {\n\n let ixs = create_nonce_account_original(\n\n from_pubkey.as_ref(),\n\n nonce_pubkey.as_ref(),\n\n authority.as_ref(),\n\n lamports,\n\n );\n\n (ixs[0].clone().into(), ixs[1].clone().into())\n\n}\n\n\n\n#[derive(FromPyObject, IntoPyObject)]\n\npub struct InitializeNonceAccountParams {\n\n nonce_pubkey: Pubkey,\n\n authority: Pubkey,\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 7, "score": 131364.37592314044 }, { "content": "#[pyfunction]\n\npub fn decode_create_account_with_seed(\n\n instruction: Instruction,\n\n) -> PyResult<CreateAccountWithSeedParams> {\n\n let keys = instruction.0.accounts;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::CreateAccountWithSeed {\n\n base,\n\n seed,\n\n lamports,\n\n space,\n\n owner,\n\n } => Ok(CreateAccountWithSeedParams {\n\n from_pubkey: keys[0].pubkey.into(),\n\n to_pubkey: keys[1].pubkey.into(),\n\n base: base.into(),\n\n seed,\n\n lamports,\n", "file_path": "src/system_program.rs", "rank": 8, "score": 128436.27030174462 }, { "content": "#[pyfunction]\n\npub fn decode_initialize_nonce_account(\n\n instruction: Instruction,\n\n) -> PyResult<InitializeNonceAccountParams> {\n\n let nonce_pubkey = instruction.0.accounts[0].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::InitializeNonceAccount(authority) => {\n\n Ok(InitializeNonceAccountParams {\n\n authority: authority.into(),\n\n nonce_pubkey: nonce_pubkey.into(),\n\n })\n\n }\n\n _ => Err(PyValueError::new_err(\n\n \"Not an InitializeNonceAccount instruction\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 9, "score": 128436.27030174462 }, { "content": "#[pyfunction]\n\npub fn decode_advance_nonce_account(\n\n instruction: Instruction,\n\n) -> PyResult<AdvanceNonceAccountParams> {\n\n let keys = instruction.0.accounts;\n\n let nonce_pubkey = keys[0].pubkey;\n\n let authorized_pubkey = keys[2].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::AdvanceNonceAccount => Ok(AdvanceNonceAccountParams {\n\n authorized_pubkey: authorized_pubkey.into(),\n\n nonce_pubkey: nonce_pubkey.into(),\n\n }),\n\n _ => Err(PyValueError::new_err(\n\n \"Not an AdvanceNonceAccount instruction\",\n\n )),\n\n }\n\n}\n\n\n\n#[derive(FromPyObject, IntoPyObject)]\n\npub struct WithdrawNonceAccountParams {\n\n nonce_pubkey: Pubkey,\n\n authorized_pubkey: Pubkey,\n\n to_pubkey: Pubkey,\n\n lamports: u64,\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 10, "score": 128436.27030174462 }, { "content": "#[pyfunction]\n\npub fn create_nonce_account_with_seed(\n\n from_pubkey: &Pubkey,\n\n nonce_pubkey: &Pubkey,\n\n base: &Pubkey,\n\n seed: &str,\n\n authority: &Pubkey,\n\n lamports: u64,\n\n) -> (Instruction, Instruction) {\n\n let ixs = create_nonce_account_with_seed_original(\n\n from_pubkey.as_ref(),\n\n nonce_pubkey.as_ref(),\n\n base.as_ref(),\n\n seed,\n\n authority.as_ref(),\n\n lamports,\n\n );\n\n (ixs[0].clone().into(), ixs[1].clone().into())\n\n}\n\n\n\n#[derive(FromPyObject, IntoPyObject)]\n\npub struct AdvanceNonceAccountParams {\n\n nonce_pubkey: Pubkey,\n\n authorized_pubkey: Pubkey,\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 11, "score": 128436.27030174462 }, { "content": "#[pyfunction]\n\npub fn decode_withdraw_nonce_account(\n\n instruction: Instruction,\n\n) -> PyResult<WithdrawNonceAccountParams> {\n\n let keys = instruction.0.accounts;\n\n let nonce_pubkey = keys[0].pubkey;\n\n let to_pubkey = keys[1].pubkey;\n\n let authorized_pubkey = keys[4].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::WithdrawNonceAccount(lamports) => {\n\n Ok(WithdrawNonceAccountParams {\n\n authorized_pubkey: authorized_pubkey.into(),\n\n nonce_pubkey: nonce_pubkey.into(),\n\n to_pubkey: to_pubkey.into(),\n\n lamports,\n\n })\n\n }\n\n _ => Err(PyValueError::new_err(\n\n \"Not a WithdrawNonceAccount instruction\",\n\n )),\n\n }\n\n}\n", "file_path": "src/system_program.rs", "rank": 12, "score": 128436.27030174462 }, { "content": "#[pyfunction]\n\npub fn assign(params: AssignParams) -> Instruction {\n\n assign_original(params.pubkey.as_ref(), params.owner.as_ref()).into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 13, "score": 112461.9380102915 }, { "content": "#[pyfunction]\n\npub fn allocate(params: AllocateParams) -> Instruction {\n\n allocate_original(params.pubkey.as_ref(), params.space).into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 14, "score": 112461.9380102915 }, { "content": "#[pyfunction]\n\npub fn transfer(params: TransferParams) -> Instruction {\n\n transfer_original(\n\n params.from_pubkey.as_ref(),\n\n params.to_pubkey.as_ref(),\n\n params.lamports,\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 15, "score": 112461.9380102915 }, { "content": "#[pyfunction]\n\npub fn decode_transfer(instruction: Instruction) -> PyResult<TransferParams> {\n\n let keys = instruction.0.accounts;\n\n let from_pubkey = keys[0].pubkey;\n\n let to_pubkey = keys[1].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::Transfer { lamports } => Ok(TransferParams {\n\n from_pubkey: from_pubkey.into(),\n\n to_pubkey: to_pubkey.into(),\n\n lamports,\n\n }),\n\n _ => Err(PyValueError::new_err(\"Not a Transfer instruction\")),\n\n }\n\n}\n\n\n\n#[derive(FromPyObject, IntoPyObject)]\n\npub struct TransferWithSeedParams {\n\n from_pubkey: Pubkey,\n\n from_base: Pubkey,\n\n from_seed: String,\n\n from_owner: Pubkey,\n\n to_pubkey: Pubkey,\n\n lamports: u64,\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 16, "score": 111725.96631597436 }, { "content": "#[pyfunction]\n\npub fn decode_allocate(instruction: Instruction) -> PyResult<AllocateParams> {\n\n let pubkey = instruction.0.accounts[0].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::Allocate { space } => Ok(AllocateParams {\n\n pubkey: pubkey.into(),\n\n space,\n\n }),\n\n _ => Err(PyValueError::new_err(\"Not an Allocate instruction\")),\n\n }\n\n}\n\n\n\n#[derive(FromPyObject, IntoPyObject)]\n\npub struct AllocateWithSeedParams {\n\n address: Pubkey,\n\n base: Pubkey,\n\n seed: String,\n\n space: u64,\n\n owner: Pubkey,\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 17, "score": 111725.96631597436 }, { "content": "#[pyfunction]\n\npub fn decode_assign(instruction: Instruction) -> PyResult<AssignParams> {\n\n let pubkey = instruction.0.accounts[0].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::Assign { owner } => Ok(AssignParams {\n\n pubkey: pubkey.into(),\n\n owner: owner.into(),\n\n }),\n\n _ => Err(PyValueError::new_err(\"Not an Assign instruction\")),\n\n }\n\n}\n\n\n\n#[derive(FromPyObject, IntoPyObject)]\n\npub struct AssignWithSeedParams {\n\n address: Pubkey,\n\n base: Pubkey,\n\n seed: String,\n\n owner: Pubkey,\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 18, "score": 111725.96631597436 }, { "content": "fn to_py_value_err(err: &impl ToString) -> PyErr {\n\n PyValueError::new_err(err.to_string())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 111554.99192502552 }, { "content": "#[pyfunction]\n\npub fn decode_assign_with_seed(instruction: Instruction) -> PyResult<AssignWithSeedParams> {\n\n let address = instruction.0.accounts[0].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::AssignWithSeed { base, seed, owner } => {\n\n Ok(AssignWithSeedParams {\n\n address: address.into(),\n\n base: base.into(),\n\n seed,\n\n owner: owner.into(),\n\n })\n\n }\n\n _ => Err(PyValueError::new_err(\"Not an AssignWithSeed instruction\")),\n\n }\n\n}\n\n\n\n#[derive(FromPyObject, IntoPyObject)]\n\npub struct TransferParams {\n\n from_pubkey: Pubkey,\n\n to_pubkey: Pubkey,\n\n lamports: u64,\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 20, "score": 107621.27073162448 }, { "content": "#[pyfunction]\n\npub fn decode_transfer_with_seed(instruction: Instruction) -> PyResult<TransferWithSeedParams> {\n\n let keys = instruction.0.accounts;\n\n let from_pubkey = keys[0].pubkey;\n\n let from_base = keys[1].pubkey;\n\n let to_pubkey = keys[2].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::TransferWithSeed {\n\n lamports,\n\n from_seed,\n\n from_owner,\n\n } => Ok(TransferWithSeedParams {\n\n from_pubkey: from_pubkey.into(),\n\n from_base: from_base.into(),\n\n to_pubkey: to_pubkey.into(),\n\n from_seed,\n\n from_owner: from_owner.into(),\n\n lamports,\n", "file_path": "src/system_program.rs", "rank": 21, "score": 107621.27073162448 }, { "content": "#[pyfunction]\n\npub fn decode_allocate_with_seed(instruction: Instruction) -> PyResult<AllocateWithSeedParams> {\n\n let address = instruction.0.accounts[0].pubkey;\n\n let parsed_data = handle_py_err(bincode::deserialize::<SystemInstructionOriginal>(\n\n instruction.0.data.as_slice(),\n\n ))?;\n\n match parsed_data {\n\n SystemInstructionOriginal::AllocateWithSeed {\n\n base,\n\n seed,\n\n space,\n\n owner,\n\n } => Ok(AllocateWithSeedParams {\n\n address: address.into(),\n\n base: base.into(),\n\n seed,\n\n space,\n\n owner: owner.into(),\n\n }),\n\n _ => Err(PyValueError::new_err(\"Not an AllocateWithSeed instruction\")),\n\n }\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 22, "score": 107621.27073162448 }, { "content": "#[pyfunction]\n\npub fn allocate_with_seed(params: AllocateWithSeedParams) -> Instruction {\n\n allocate_with_seed_original(\n\n params.address.as_ref(),\n\n params.base.as_ref(),\n\n &params.seed,\n\n params.space,\n\n params.owner.as_ref(),\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 23, "score": 107489.80973463017 }, { "content": "#[pyfunction]\n\npub fn assign_with_seed(params: AssignWithSeedParams) -> Instruction {\n\n assign_with_seed_original(\n\n params.address.as_ref(),\n\n params.base.as_ref(),\n\n &params.seed,\n\n params.owner.as_ref(),\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 24, "score": 107489.80973463017 }, { "content": "#[pyfunction]\n\npub fn transfer_with_seed(params: TransferWithSeedParams) -> Instruction {\n\n transfer_with_seed_original(\n\n params.from_pubkey.as_ref(),\n\n params.from_base.as_ref(),\n\n params.from_seed,\n\n params.from_owner.as_ref(),\n\n params.to_pubkey.as_ref(),\n\n params.lamports,\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 25, "score": 107489.80973463017 }, { "content": "fn convert_optional_pubkey(pubkey: Option<&Pubkey>) -> Option<&PubkeyOriginal> {\n\n pubkey.map(|p| p.as_ref())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 26, "score": 105625.94391953814 }, { "content": "fn calculate_hash(t: &impl Hash) -> u64 {\n\n let mut s = DefaultHasher::new();\n\n t.hash(&mut s);\n\n s.finish()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 27, "score": 90480.15891403577 }, { "content": "fn richcmp_type_error(op: &str) -> PyErr {\n\n let msg = format!(\"{} not supported.\", op);\n\n PyTypeError::new_err(msg)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 28, "score": 90214.13069325527 }, { "content": "fn convert_instructions(instructions: Vec<Instruction>) -> Vec<InstructionOriginal> {\n\n instructions\n\n .into_iter()\n\n .map(solana_sdk::instruction::Instruction::from)\n\n .collect()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 29, "score": 83297.11302291368 }, { "content": "def test_accounts_setter() -> None:\n\n ix = Instruction(\n\n Pubkey.default(), b\"1\", [AccountMeta(Pubkey.new_unique(), True, True)]\n\n )\n\n new_pubkey = Pubkey.new_unique()\n\n new_accounts = [AccountMeta(Pubkey.new_unique(), True, True)]\n\n ix.accounts = new_accounts\n", "file_path": "tests/test_instruction.py", "rank": 30, "score": 77609.99709245234 }, { "content": "fn convert_instructions_from_original(ixs: Vec<InstructionOriginal>) -> Vec<Instruction> {\n\n ixs.into_iter().map(Instruction::from).collect()\n\n}\n\n\n", "file_path": "src/system_program.rs", "rank": 31, "score": 77608.2944034309 }, { "content": "def test_wire_format_and_deserialize() -> None:\n\n \"\"\"Test serialize/derialize transaction to/from wire format.\"\"\"\n\n transfer = system_program.transfer(\n\n dict(\n\n from_pubkey=SENDER.pubkey(),\n\n to_pubkey=RECIPIENT,\n\n lamports=49,\n\n )\n\n )\n\n message = Message([transfer], SENDER.pubkey())\n\n expected_txn = Transaction.new_unsigned(message)\n\n expected_txn.sign([SENDER], BLOCKHASH)\n\n wire_txn = b64decode(\n\n b\"AVuErQHaXv0SG0/PchunfxHKt8wMRfMZzqV0tkC5qO6owYxWU2v871AoWywGoFQr4z+q/7mE8lIufNl/kxj+nQ0BAAEDE5j2\"\n\n b\"LG0aRXxRumpLXz29L2n8qTIWIY3ImX5Ba9F9k8r9Q5/Mtmcn8onFxt47xKj+XdXXd3C8j/FcPu7csUrz/AAAAAAAAAAAAAAA\"\n\n b\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAxJrndgN4IFTxep3s6kO0ROug7bEsbx0xxuDkqEvwUusBAgIAAQwCAAAAMQAAAAAAAAA=\"\n\n )\n\n txn = Transaction.deserialize(wire_txn)\n\n assert txn == expected_txn\n", "file_path": "tests/test_transaction.py", "rank": 32, "score": 76645.65744393176 }, { "content": "pub fn create_sysvar_mod(py: Python<'_>) -> PyResult<&PyModule> {\n\n let sysvar_mod = PyModule::new(py, \"sysvar\")?;\n\n let sysvars = vec![\n\n (\"CLOCK\", sysvar_original::clock::ID),\n\n (\n\n \"RECENT_BLOCKHASHES\",\n\n sysvar_original::recent_blockhashes::ID,\n\n ),\n\n (\"RENT\", sysvar_original::rent::ID),\n\n (\"REWARDS\", sysvar_original::rewards::ID),\n\n (\"STAKE_HISTORY\", sysvar_original::stake_history::ID),\n\n (\"EPOCH_SCHEDULE\", sysvar_original::epoch_schedule::ID),\n\n (\"INSTRUCTIONS\", sysvar_original::instructions::ID),\n\n (\"SLOT_HASHES\", sysvar_original::slot_hashes::ID),\n\n ];\n\n for sysvar in sysvars {\n\n sysvar_mod.add(sysvar.0, Pubkey(sysvar.1))?\n\n }\n\n Ok(sysvar_mod)\n\n}\n", "file_path": "src/sysvar.rs", "rank": 33, "score": 75510.42144357953 }, { "content": "def test_accounts_setter_compiled_ix() -> None:\n\n ix = CompiledInstruction(0, b\"1\", b\"123\")\n\n new_accounts = b\"456\"\n\n ix.accounts = new_accounts\n\n assert ix.accounts == new_accounts\n\n new_accounts_as_list = list(b\"foo\")\n\n ix.accounts = cast(bytes, new_accounts_as_list)\n", "file_path": "tests/test_instruction.py", "rank": 34, "score": 72840.54924014515 }, { "content": "pub fn create_system_program_mod(py: Python<'_>) -> PyResult<&PyModule> {\n\n let system_program_mod = PyModule::new(py, \"_system_program\")?;\n\n system_program_mod.add(\"ID\", Pubkey(system_program::ID))?;\n\n let funcs = [\n\n wrap_pyfunction!(create_account, system_program_mod)?,\n\n wrap_pyfunction!(decode_create_account, system_program_mod)?,\n\n wrap_pyfunction!(create_account_with_seed, system_program_mod)?,\n\n wrap_pyfunction!(decode_create_account_with_seed, system_program_mod)?,\n\n wrap_pyfunction!(assign, system_program_mod)?,\n\n wrap_pyfunction!(decode_assign, system_program_mod)?,\n\n wrap_pyfunction!(assign_with_seed, system_program_mod)?,\n\n wrap_pyfunction!(decode_assign_with_seed, system_program_mod)?,\n\n wrap_pyfunction!(transfer, system_program_mod)?,\n\n wrap_pyfunction!(decode_transfer, system_program_mod)?,\n\n wrap_pyfunction!(transfer_with_seed, system_program_mod)?,\n\n wrap_pyfunction!(decode_transfer_with_seed, system_program_mod)?,\n\n wrap_pyfunction!(allocate, system_program_mod)?,\n\n wrap_pyfunction!(decode_allocate, system_program_mod)?,\n\n wrap_pyfunction!(allocate_with_seed, system_program_mod)?,\n\n wrap_pyfunction!(decode_allocate_with_seed, system_program_mod)?,\n", "file_path": "src/system_program.rs", "rank": 35, "score": 72744.056281861 }, { "content": "def test_tx_uses_ro_nonce_account() -> None:\n\n from_keypair = Keypair()\n\n from_pubkey = from_keypair.pubkey()\n\n nonce_keypair = Keypair()\n\n nonce_pubkey = nonce_keypair.pubkey()\n\n account_metas = [\n\n AccountMeta(nonce_pubkey, False, False),\n\n AccountMeta(RECENT_BLOCKHASHES, False, False),\n\n AccountMeta(nonce_pubkey, True, False),\n\n ]\n\n advance_nonce_account_idx = b\"\\x04\\x00\\x00\\x00\"\n\n nonce_instruction = Instruction(\n\n system_program.ID,\n\n advance_nonce_account_idx,\n\n account_metas,\n\n )\n\n tx = Transaction.new_signed_with_payer(\n\n [nonce_instruction],\n\n from_pubkey,\n\n [from_keypair, nonce_keypair],\n\n Hash.default(),\n\n )\n", "file_path": "tests/test_transaction.py", "rank": 36, "score": 71254.40318343783 }, { "content": "def test_get_nonce_pub_from_ix_no_accounts_fail() -> None:\n\n (_, _, tx) = nonced_transfer_tx()\n\n nonce_ix = tx.uses_durable_nonce()\n\n assert nonce_ix is not None\n\n nonce_ix = tx.uses_durable_nonce()\n\n assert nonce_ix is not None\n\n nonce_ix.accounts = b\"\"\n", "file_path": "tests/test_transaction.py", "rank": 37, "score": 68732.21040824789 }, { "content": "#[pymodule]\n\nfn solders(py: Python, m: &PyModule) -> PyResult<()> {\n\n let hash_mod = PyModule::new(py, \"hash\")?;\n\n hash_mod.add_class::<SolderHash>()?;\n\n hash_mod.add(\"ParseHashError\", py.get_type::<ParseHashError>())?;\n\n let instruction_mod = PyModule::new(py, \"instruction\")?;\n\n instruction_mod.add_class::<AccountMeta>()?;\n\n instruction_mod.add_class::<Instruction>()?;\n\n instruction_mod.add_class::<CompiledInstruction>()?;\n\n let pubkey_mod = PyModule::new(py, \"pubkey\")?;\n\n pubkey_mod.add_class::<Pubkey>()?;\n\n let keypair_mod = PyModule::new(py, \"keypair\")?;\n\n keypair_mod.add_class::<Keypair>()?;\n\n let signature_mod = PyModule::new(py, \"signature\")?;\n\n signature_mod.add_class::<Signature>()?;\n\n let message_mod = PyModule::new(py, \"message\")?;\n\n message_mod.add_class::<Message>()?;\n\n message_mod.add_class::<MessageHeader>()?;\n\n let transaction_mod = PyModule::new(py, \"transaction\")?;\n\n transaction_mod.add_class::<Transaction>()?;\n\n transaction_mod.add(\"SanitizeError\", py.get_type::<SanitizeError>())?;\n", "file_path": "src/lib.rs", "rank": 38, "score": 65092.75098759734 }, { "content": "def create_account(params: CreateAccountParams) -> Instruction:\n", "file_path": "solders/system_program.py", "rank": 39, "score": 60435.064448131925 }, { "content": "fn handle_py_err<T: Into<P>, E: ToString + Into<PyErrWrapper>, P>(\n\n res: Result<T, E>,\n\n) -> PyResult<P> {\n\n res.map_or_else(|e| Err(to_py_err(e)), |v| Ok(v.into()))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 40, "score": 59407.78512416841 }, { "content": "class CreateAccountParams(TypedDict):\n\n from_pubkey: Pubkey\n\n to_pubkey: Pubkey\n\n lamports: int\n\n space: int\n", "file_path": "solders/system_program.py", "rank": 41, "score": 58809.3348771575 }, { "content": "def advance_nonce_account(params: AdvanceNonceAccountParams) -> Instruction:\n", "file_path": "solders/system_program.py", "rank": 42, "score": 58809.3348771575 }, { "content": "def decode_create_account(instruction: Instruction) -> CreateAccountParams:\n", "file_path": "solders/system_program.py", "rank": 43, "score": 58809.3348771575 }, { "content": "def withdraw_nonce_account(params: WithdrawNonceAccountParams) -> Instruction:\n", "file_path": "solders/system_program.py", "rank": 44, "score": 58809.3348771575 }, { "content": "def initialize_nonce_account(params: InitializeNonceAccountParams) -> Instruction:\n", "file_path": "solders/system_program.py", "rank": 45, "score": 58809.3348771575 }, { "content": "def create_account_with_seed(params: CreateAccountWithSeedParams) -> Instruction:\n", "file_path": "solders/system_program.py", "rank": 46, "score": 58809.3348771575 }, { "content": "def decode_withdraw_nonce_account(\n\n instruction: Instruction,\n\n) -> WithdrawNonceAccountParams:\n", "file_path": "solders/system_program.py", "rank": 47, "score": 57272.00975235208 }, { "content": "def decode_initialize_nonce_account(\n\n instruction: Instruction,\n\n) -> InitializeNonceAccountParams:\n\n return cast(\n\n InitializeNonceAccountParams, _decode_initialize_nonce_account(instruction)\n", "file_path": "solders/system_program.py", "rank": 48, "score": 57272.00975235208 }, { "content": "class CreateAccountWithSeedParams(TypedDict):\n\n from_pubkey: Pubkey\n\n to_pubkey: Pubkey\n\n base: Pubkey\n\n seed: str\n\n lamports: int\n\n space: int\n", "file_path": "solders/system_program.py", "rank": 49, "score": 57272.00975235208 }, { "content": "class WithdrawNonceAccountParams(TypedDict):\n\n nonce_pubkey: Pubkey\n\n authorized_pubkey: Pubkey\n\n to_pubkey: Pubkey\n", "file_path": "solders/system_program.py", "rank": 50, "score": 57272.00975235208 }, { "content": "class AdvanceNonceAccountParams(TypedDict):\n\n nonce_pubkey: Pubkey\n", "file_path": "solders/system_program.py", "rank": 51, "score": 57272.00975235208 }, { "content": "def decode_create_account_with_seed(\n\n instruction: Instruction,\n\n) -> CreateAccountWithSeedParams:\n\n return cast(\n\n CreateAccountWithSeedParams, _decode_create_account_with_seed(instruction)\n", "file_path": "solders/system_program.py", "rank": 52, "score": 57272.00975235208 }, { "content": "class InitializeNonceAccountParams(TypedDict):\n\n nonce_pubkey: Pubkey\n", "file_path": "solders/system_program.py", "rank": 53, "score": 57272.00975235208 }, { "content": "def decode_advance_nonce_account(instruction: Instruction) -> AdvanceNonceAccountParams:\n", "file_path": "solders/system_program.py", "rank": 54, "score": 57272.00975235208 }, { "content": "from pytest import raises, mark\n\nfrom solders.pubkey import Pubkey\n\n\n\non_curve_data = [\n\n (\n\n b\"\\xc1M\\xce\\x1e\\xa4\\x86<\\xf1\\xbc\\xfc\\x12\\xf4\\xf2\\xe2Y\"\n\n b\"\\xf4\\x8d\\xe4V\\xb7\\xf9\\xd4\\\\!{\\x04\\x89j\\x1f\\xfeA\\xdc\",\n\n True,\n\n ),\n\n (\n\n b\"6\\x8d-\\x96\\xcf\\xe7\\x93G~\\xe0\\x17r\\\\\\x9c%\\x9a\\xab\\xa6\"\n\n b\"\\xa9\\xede\\x02\\xbf\\x83=\\x10,P\\xfbh\\x8ev\",\n\n True,\n\n ),\n\n (\n\n b\"\\x00y\\xf0\\x82\\xa6\\x1c\\xc7N\\xa5\\xe2\\xab\\xedd\\xbb\\xf7_2\"\n\n b\"\\xfb\\xddSz\\xff\\xf7RW\\xedg\\x16\\xc9\\xe3r\\x99\",\n\n False,\n\n ),\n\n]\n\n\n\n\n\ndef test_wrong_size():\n\n with raises(ValueError) as excinfo:\n\n Pubkey(bytes([0] * 33))\n\n msg = \"expected a sequence of length 32 (got 33)\"\n\n assert excinfo.value.args[0] == msg\n\n\n\n\n\[email protected](\"test_input,expected\", on_curve_data)\n\ndef test_is_on_curve_method(test_input, expected):\n\n pubkey = Pubkey(test_input)\n\n result = pubkey.is_on_curve()\n\n assert result is expected\n\n\n\n\n\ndef test_length_classattr():\n\n assert Pubkey.LENGTH == 32\n\n\n\n\n\ndef test_bytes_representation():\n\n data = (\n\n b\"6\\x8d-\\x96\\xcf\\xe7\\x93G~\\xe0\\x17r\\\\\\x9c%\\x9a\\xab\\xa6\"\n\n b\"\\xa9\\xede\\x02\\xbf\\x83=\\x10,P\\xfbh\\x8ev\"\n\n )\n\n pubkey = Pubkey(data)\n\n assert bytes(pubkey) == data\n\n\n\n\n\ndef test_equality():\n\n assert Pubkey.default() == Pubkey.default()\n\n\n\n\n\ndef test_create_with_seed():\n\n \"\"\"Test create with seed\"\"\"\n\n default_public_key = Pubkey.from_string(\"11111111111111111111111111111111\")\n\n derived_key = Pubkey.create_with_seed(\n\n default_public_key, \"limber chicken: 4/45\", default_public_key\n\n )\n\n expected = Pubkey.from_string(\"9h1HyLCW5dZnBVap8C5egQ9Z6pHyjsh5MNy83iPqqRuq\")\n\n assert derived_key == expected\n\n\n\n\n\ndef test_create_program_address():\n\n \"\"\"Test create program address.\"\"\"\n\n program_id = Pubkey.from_string(\"BPFLoader1111111111111111111111111111111111\")\n\n program_address = Pubkey.create_program_address([b\"\", bytes([1])], program_id)\n\n assert program_address == Pubkey.from_string(\n\n \"3gF2KMe9KiC6FNVBmfg9i267aMPvK37FewCip4eGBFcT\"\n\n )\n\n\n\n program_address = Pubkey.create_program_address([bytes(\"☉\", \"utf-8\")], program_id)\n\n assert program_address == Pubkey.from_string(\n\n \"7ytmC1nT1xY4RfxCV2ZgyA7UakC93do5ZdyhdF3EtPj7\"\n\n )\n\n\n\n seeds = [bytes(\"Talking\", \"utf8\"), bytes(\"Squirrels\", \"utf8\")]\n\n program_address = Pubkey.create_program_address(seeds, program_id)\n\n assert program_address == Pubkey.from_string(\n\n \"HwRVBufQ4haG5XSgpspwKtNd3PC9GM9m1196uJW36vds\"\n\n )\n\n\n\n program_address = Pubkey.create_program_address(\n\n [bytes(Pubkey.from_string(\"SeedPubey1111111111111111111111111111111111\"))],\n\n program_id,\n\n )\n\n assert program_address == Pubkey.from_string(\n\n \"GUs5qLUfsEHkcMB9T38vjr18ypEhRuNWiePW2LoK4E3K\"\n\n )\n\n\n\n program_address_2 = Pubkey.create_program_address(\n\n [bytes(\"Talking\", \"utf8\")], program_id\n\n )\n\n assert program_address_2 != program_address\n\n\n\n # https://github.com/solana-labs/solana/issues/11950\n\n seeds = [\n\n bytes(Pubkey.from_string(\"H4snTKK9adiU15gP22ErfZYtro3aqR9BTMXiH3AwiUTQ\")),\n\n bytes.fromhex(\"0200000000000000\"),\n\n ]\n\n program_address = Pubkey.create_program_address(\n\n seeds, Pubkey.from_string(\"4ckmDgGdxQoPDLUkDT3vHgSAkzA3QRdNq5ywwY4sUSJn\")\n\n )\n\n assert program_address == Pubkey.from_string(\n\n \"12rqwuEgBYiGhBrDJStCiqEtzQpTTiZbh7teNVLuYcFA\"\n\n )\n\n\n\n\n\ndef to_uint8_bytes(val: int) -> bytes:\n\n \"\"\"Convert an integer to uint8.\"\"\"\n\n return val.to_bytes(1, byteorder=\"little\")\n\n\n\n\n\ndef test_find_program_address():\n\n \"\"\"Test create associated_token_address.\"\"\"\n\n program_id = Pubkey.from_string(\"BPFLoader1111111111111111111111111111111111\")\n\n program_address, nonce = Pubkey.find_program_address([b\"\"], program_id)\n\n assert program_address == Pubkey.create_program_address(\n\n [b\"\", to_uint8_bytes(nonce)], program_id\n\n )\n\n\n\n\n\ndef test_set_operations() -> None:\n\n \"\"\"Tests that a publickey is now hashable with the appropriate set operations.\"\"\"\n\n public_key_primary = Pubkey(bytes([0] * 32))\n\n public_key_secondary = Pubkey(bytes([1] * 32))\n\n public_key_duplicate = Pubkey(bytes(public_key_secondary))\n\n public_key_set = {public_key_primary, public_key_secondary, public_key_duplicate}\n\n assert hash(public_key_primary) != hash(public_key_secondary)\n\n assert hash(public_key_secondary) == hash(public_key_duplicate)\n\n assert len(public_key_set) == 2\n", "file_path": "tests/test_pubkey.py", "rank": 55, "score": 56643.206750824625 }, { "content": "from typing import cast, Union\n\nfrom pytest import mark, raises\n\nfrom solders.instruction import Instruction, CompiledInstruction, AccountMeta\n\nfrom solders.pubkey import Pubkey\n\nfrom solders.errors import BincodeError\n\n\n\n\n\ndef test_accounts_setter() -> None:\n\n ix = Instruction(\n\n Pubkey.default(), b\"1\", [AccountMeta(Pubkey.new_unique(), True, True)]\n\n )\n\n new_pubkey = Pubkey.new_unique()\n\n new_accounts = [AccountMeta(Pubkey.new_unique(), True, True)]\n\n ix.accounts = new_accounts\n\n assert ix.accounts == new_accounts\n\n\n\n\n\ndef test_accounts_setter_compiled_ix() -> None:\n\n ix = CompiledInstruction(0, b\"1\", b\"123\")\n\n new_accounts = b\"456\"\n\n ix.accounts = new_accounts\n\n assert ix.accounts == new_accounts\n\n new_accounts_as_list = list(b\"foo\")\n\n ix.accounts = cast(bytes, new_accounts_as_list)\n\n assert ix.accounts == bytes(new_accounts_as_list)\n\n\n\n\n\[email protected](\"to_deserialize\", [Instruction, CompiledInstruction])\n\ndef test_bincode_error(to_deserialize: Union[Instruction, CompiledInstruction]) -> None:\n\n with raises(BincodeError) as excinfo:\n\n Instruction.deserialize(b\"foo\")\n\n assert excinfo.value.args[0] == \"io error: unexpected end of file\"\n", "file_path": "tests/test_instruction.py", "rank": 56, "score": 56286.46041317626 }, { "content": "from solders.pubkey import Pubkey\n\nfrom solders.instruction import AccountMeta\n\n\n\nPUBKEY = Pubkey.default()\n\n\n\n\n\ndef test_eq() -> None:\n\n am1 = AccountMeta(PUBKEY, True, True)\n\n am2 = AccountMeta(PUBKEY, True, True)\n\n assert am1 == am2\n\n\n\n\n\ndef test_attributes() -> None:\n\n am = AccountMeta(PUBKEY, True, True)\n\n assert am.pubkey == PUBKEY\n\n assert am.is_signer\n\n assert am.is_writable\n", "file_path": "tests/test_account_meta.py", "rank": 57, "score": 54247.4735060826 }, { "content": "def test_from_string(signature: Signature, signature_base58_str: str):\n", "file_path": "tests/test_signature.py", "rank": 58, "score": 53083.43997750902 }, { "content": "def test_from_string() -> None:\n", "file_path": "tests/test_hash.py", "rank": 59, "score": 53083.43997750902 }, { "content": "struct PyErrWrapper(PyErr);\n\n\n\nimpl From<PyErrWrapper> for PyErr {\n\n fn from(e: PyErrWrapper) -> Self {\n\n e.0\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 60, "score": 52875.11502292667 }, { "content": "def to_uint8_bytes(val: int) -> bytes:\n\n \"\"\"Convert an integer to uint8.\"\"\"\n", "file_path": "tests/test_pubkey.py", "rank": 61, "score": 52808.79249790036 }, { "content": "def test_equality():\n", "file_path": "tests/test_pubkey.py", "rank": 62, "score": 52808.79249790036 }, { "content": "def test_new_unique():\n", "file_path": "tests/test_hash.py", "rank": 63, "score": 51495.8477264442 }, { "content": "def test_sdk_serialize() -> None:\n\n assert create_sample_transaction().serialize() == bytes(\n\n [\n\n 1,\n\n 71,\n\n 59,\n\n 9,\n\n 187,\n\n 190,\n\n 129,\n\n 150,\n\n 165,\n\n 21,\n\n 33,\n\n 158,\n\n 72,\n\n 87,\n\n 110,\n\n 144,\n\n 120,\n\n 79,\n\n 238,\n\n 132,\n\n 134,\n\n 105,\n\n 39,\n\n 102,\n\n 116,\n\n 209,\n\n 29,\n\n 229,\n\n 154,\n\n 36,\n\n 105,\n\n 44,\n\n 172,\n\n 118,\n\n 131,\n\n 22,\n\n 124,\n\n 131,\n\n 179,\n\n 142,\n\n 176,\n\n 27,\n\n 117,\n\n 160,\n\n 89,\n\n 102,\n\n 224,\n\n 204,\n\n 1,\n\n 252,\n\n 141,\n\n 2,\n\n 136,\n\n 0,\n\n 37,\n\n 218,\n\n 225,\n\n 129,\n\n 92,\n\n 154,\n\n 250,\n\n 59,\n\n 97,\n\n 178,\n\n 10,\n\n 1,\n\n 0,\n\n 1,\n\n 3,\n\n 156,\n\n 227,\n\n 116,\n\n 193,\n\n 215,\n\n 38,\n\n 142,\n\n 22,\n\n 8,\n\n 14,\n\n 229,\n\n 239,\n\n 119,\n\n 93,\n\n 5,\n\n 218,\n\n 161,\n\n 35,\n\n 3,\n\n 33,\n\n 0,\n\n 36,\n\n 100,\n\n 158,\n\n 252,\n\n 33,\n\n 161,\n\n 97,\n\n 185,\n\n 62,\n\n 89,\n\n 99,\n\n 1,\n\n 1,\n\n 1,\n\n 4,\n\n 5,\n\n 6,\n\n 7,\n\n 8,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 9,\n\n 8,\n\n 7,\n\n 6,\n\n 5,\n\n 4,\n\n 1,\n\n 1,\n\n 1,\n\n 2,\n\n 2,\n\n 2,\n\n 4,\n\n 5,\n\n 6,\n\n 7,\n\n 8,\n\n 9,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 1,\n\n 9,\n\n 8,\n\n 7,\n\n 6,\n\n 5,\n\n 4,\n\n 2,\n\n 2,\n\n 2,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 1,\n\n 2,\n\n 2,\n\n 0,\n\n 1,\n\n 3,\n\n 1,\n\n 2,\n\n 3,\n\n ]\n", "file_path": "tests/test_transaction.py", "rank": 64, "score": 51437.90488207453 }, { "content": "def test_transaction_serialize() -> None:\n\n tx = create_sample_transaction()\n\n ser = tx.serialize()\n\n deser = Transaction.deserialize(ser)\n", "file_path": "tests/test_transaction.py", "rank": 65, "score": 51437.90488207453 }, { "content": "def test_from_string_too_short(signature_base58_str: str):\n\n with raises(ValueError) as excinfo:\n\n Signature.from_string(signature_base58_str[:4])\n", "file_path": "tests/test_signature.py", "rank": 66, "score": 51345.54328785672 }, { "content": "def test_from_string_error(test_input: str, expected_err: str) -> None:\n\n with raises(ParseHashError) as excinfo:\n\n Hash.from_string(test_input)\n", "file_path": "tests/test_hash.py", "rank": 67, "score": 51345.54328785672 }, { "content": "def test_from_string_too_long(signature_base58_str: str):\n\n signature_base58_str_doubled = signature_base58_str * 2\n\n with raises(ValueError) as excinfo:\n\n Signature.from_string(signature_base58_str_doubled)\n", "file_path": "tests/test_signature.py", "rank": 68, "score": 51345.54328785672 }, { "content": "def test_set_operations() -> None:\n\n \"\"\"Tests that a publickey is now hashable with the appropriate set operations.\"\"\"\n\n public_key_primary = Pubkey(bytes([0] * 32))\n\n public_key_secondary = Pubkey(bytes([1] * 32))\n\n public_key_duplicate = Pubkey(bytes(public_key_secondary))\n\n public_key_set = {public_key_primary, public_key_secondary, public_key_duplicate}\n\n assert hash(public_key_primary) != hash(public_key_secondary)\n\n assert hash(public_key_secondary) == hash(public_key_duplicate)\n", "file_path": "tests/test_pubkey.py", "rank": 69, "score": 51079.887481467325 }, { "content": "def test_length_classattr():\n", "file_path": "tests/test_pubkey.py", "rank": 70, "score": 51079.887481467325 }, { "content": "def test_wrong_size():\n\n with raises(ValueError) as excinfo:\n\n Pubkey(bytes([0] * 33))\n\n msg = \"expected a sequence of length 32 (got 33)\"\n", "file_path": "tests/test_pubkey.py", "rank": 71, "score": 51079.887481467325 }, { "content": "def test_bytes_representation():\n\n data = (\n\n b\"6\\x8d-\\x96\\xcf\\xe7\\x93G~\\xe0\\x17r\\\\\\x9c%\\x9a\\xab\\xa6\"\n\n b\"\\xa9\\xede\\x02\\xbf\\x83=\\x10,P\\xfbh\\x8ev\"\n\n )\n\n pubkey = Pubkey(data)\n", "file_path": "tests/test_pubkey.py", "rank": 72, "score": 51079.887481467325 }, { "content": "def test_is_on_curve_method(test_input, expected):\n\n pubkey = Pubkey(test_input)\n\n result = pubkey.is_on_curve()\n", "file_path": "tests/test_pubkey.py", "rank": 73, "score": 51079.887481467325 }, { "content": "def test_create_with_seed():\n\n \"\"\"Test create with seed\"\"\"\n\n default_public_key = Pubkey.from_string(\"11111111111111111111111111111111\")\n\n derived_key = Pubkey.create_with_seed(\n\n default_public_key, \"limber chicken: 4/45\", default_public_key\n\n )\n\n expected = Pubkey.from_string(\"9h1HyLCW5dZnBVap8C5egQ9Z6pHyjsh5MNy83iPqqRuq\")\n", "file_path": "tests/test_pubkey.py", "rank": 74, "score": 51079.887481467325 }, { "content": "fn handle_py_value_err<T: Into<P>, E: ToString, P>(res: Result<T, E>) -> PyResult<P> {\n\n res.map_or_else(|e| Err(to_py_value_err(&e)), |v| Ok(v.into()))\n\n}\n\n\n\ncreate_exception!(solders, BincodeError, PyException);\n\n\n\nimpl From<Box<ErrorKind>> for PyErrWrapper {\n\n fn from(e: Box<ErrorKind>) -> Self {\n\n Self(BincodeError::new_err(e.to_string()))\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 75, "score": 50834.35373162639 }, { "content": "def test_bincode_error(to_deserialize: Union[Instruction, CompiledInstruction]) -> None:\n\n with raises(BincodeError) as excinfo:\n\n Instruction.deserialize(b\"foo\")\n", "file_path": "tests/test_instruction.py", "rank": 76, "score": 50758.17965749709 }, { "content": "def test_attributes() -> None:\n\n am = AccountMeta(PUBKEY, True, True)\n\n assert am.pubkey == PUBKEY\n\n assert am.is_signer\n", "file_path": "tests/test_account_meta.py", "rank": 77, "score": 50695.461629378064 }, { "content": "def test_eq() -> None:\n\n am1 = AccountMeta(PUBKEY, True, True)\n\n am2 = AccountMeta(PUBKEY, True, True)\n", "file_path": "tests/test_account_meta.py", "rank": 78, "score": 50695.461629378064 }, { "content": "def test_serialize_unsigned_transaction() -> None:\n\n \"\"\"Test to serialize an unsigned transaction.\"\"\"\n\n transfer = system_program.transfer(\n\n dict(\n\n from_pubkey=SENDER.pubkey(),\n\n to_pubkey=RECIPIENT,\n\n lamports=49,\n\n )\n\n )\n\n message = Message([transfer])\n\n txn = Transaction.new_unsigned(message)\n\n assert (\n\n txn.signatures == [Signature.default()] * message.header.num_required_signatures\n\n )\n\n assert Transaction.deserialize(txn.serialize()) == txn\n\n\n\n message_with_payer = Message([transfer], SENDER.pubkey())\n\n txn_with_payer = Transaction.new_signed_with_payer(\n\n [transfer], SENDER.pubkey(), [SENDER], BLOCKHASH\n\n )\n\n # Properly signed transaction succeeds\n\n assert len(txn_with_payer.message.instructions) == 1\n\n expected_serialization = b64decode(\n\n b\"AVuErQHaXv0SG0/PchunfxHKt8wMRfMZzqV0tkC5qO6owYxWU2v871AoWywGoFQr4z+q/7mE8lIufNl/kxj+nQ0BAAEDE5j2\"\n\n b\"LG0aRXxRumpLXz29L2n8qTIWIY3ImX5Ba9F9k8r9Q5/Mtmcn8onFxt47xKj+XdXXd3C8j/FcPu7csUrz/AAAAAAAAAAAAAAA\"\n\n b\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAxJrndgN4IFTxep3s6kO0ROug7bEsbx0xxuDkqEvwUusBAgIAAQwCAAAAMQAAAAAAAAA=\"\n\n )\n\n assert txn_with_payer.serialize() == expected_serialization\n", "file_path": "tests/test_transaction.py", "rank": 79, "score": 49810.75304217862 }, { "content": "def test_from_string_non_base58(signature_base58_str: str):\n\n bad_str = \"I\" + signature_base58_str[1:]\n\n with raises(ValueError) as excinfo:\n\n Signature.from_string(bad_str)\n", "file_path": "tests/test_signature.py", "rank": 80, "score": 49717.83307257813 }, { "content": "def test_create_program_address():\n\n \"\"\"Test create program address.\"\"\"\n\n program_id = Pubkey.from_string(\"BPFLoader1111111111111111111111111111111111\")\n\n program_address = Pubkey.create_program_address([b\"\", bytes([1])], program_id)\n\n assert program_address == Pubkey.from_string(\n\n \"3gF2KMe9KiC6FNVBmfg9i267aMPvK37FewCip4eGBFcT\"\n\n )\n\n\n\n program_address = Pubkey.create_program_address([bytes(\"☉\", \"utf-8\")], program_id)\n\n assert program_address == Pubkey.from_string(\n\n \"7ytmC1nT1xY4RfxCV2ZgyA7UakC93do5ZdyhdF3EtPj7\"\n\n )\n\n\n\n seeds = [bytes(\"Talking\", \"utf8\"), bytes(\"Squirrels\", \"utf8\")]\n\n program_address = Pubkey.create_program_address(seeds, program_id)\n\n assert program_address == Pubkey.from_string(\n\n \"HwRVBufQ4haG5XSgpspwKtNd3PC9GM9m1196uJW36vds\"\n\n )\n\n\n\n program_address = Pubkey.create_program_address(\n\n [bytes(Pubkey.from_string(\"SeedPubey1111111111111111111111111111111111\"))],\n\n program_id,\n\n )\n\n assert program_address == Pubkey.from_string(\n\n \"GUs5qLUfsEHkcMB9T38vjr18ypEhRuNWiePW2LoK4E3K\"\n\n )\n\n\n\n program_address_2 = Pubkey.create_program_address(\n\n [bytes(\"Talking\", \"utf8\")], program_id\n\n )\n\n assert program_address_2 != program_address\n\n\n\n # https://github.com/solana-labs/solana/issues/11950\n\n seeds = [\n\n bytes(Pubkey.from_string(\"H4snTKK9adiU15gP22ErfZYtro3aqR9BTMXiH3AwiUTQ\")),\n\n bytes.fromhex(\"0200000000000000\"),\n\n ]\n\n program_address = Pubkey.create_program_address(\n\n seeds, Pubkey.from_string(\"4ckmDgGdxQoPDLUkDT3vHgSAkzA3QRdNq5ywwY4sUSJn\")\n\n )\n\n assert program_address == Pubkey.from_string(\n\n \"12rqwuEgBYiGhBrDJStCiqEtzQpTTiZbh7teNVLuYcFA\"\n", "file_path": "tests/test_pubkey.py", "rank": 81, "score": 49460.5988475397 }, { "content": "def test_find_program_address():\n\n \"\"\"Test create associated_token_address.\"\"\"\n\n program_id = Pubkey.from_string(\"BPFLoader1111111111111111111111111111111111\")\n\n program_address, nonce = Pubkey.find_program_address([b\"\"], program_id)\n\n assert program_address == Pubkey.create_program_address(\n\n [b\"\", to_uint8_bytes(nonce)], program_id\n", "file_path": "tests/test_pubkey.py", "rank": 82, "score": 49460.5988475397 }, { "content": "def test_sort_account_metas() -> None:\n\n \"\"\"Test AccountMeta sorting.\"\"\"\n\n\n\n # S6EA7XsNyxg4yx4DJRMm7fP21jgZb1fuzBAUGhgVtkP\n\n signer_one = Keypair.from_seed(\n\n bytes(\n\n [\n\n 216,\n\n 214,\n\n 184,\n\n 213,\n\n 199,\n\n 75,\n\n 129,\n\n 160,\n\n 237,\n\n 96,\n\n 96,\n\n 228,\n\n 46,\n\n 251,\n\n 146,\n\n 3,\n\n 71,\n\n 162,\n\n 37,\n\n 117,\n\n 121,\n\n 70,\n\n 143,\n\n 16,\n\n 128,\n\n 78,\n\n 53,\n\n 189,\n\n 222,\n\n 230,\n\n 165,\n\n 249,\n\n ]\n\n )\n\n )\n\n\n\n # BKdt9U6V922P17ui81dzLoqgSY2B5ds1UD13rpwFB2zi\n\n receiver_one = Keypair.from_seed(\n\n bytes(\n\n [\n\n 3,\n\n 140,\n\n 94,\n\n 243,\n\n 0,\n\n 38,\n\n 92,\n\n 138,\n\n 52,\n\n 79,\n\n 153,\n\n 83,\n\n 42,\n\n 236,\n\n 220,\n\n 82,\n\n 227,\n\n 187,\n\n 101,\n\n 104,\n\n 126,\n\n 159,\n\n 103,\n\n 100,\n\n 29,\n\n 183,\n\n 242,\n\n 68,\n\n 144,\n\n 184,\n\n 114,\n\n 211,\n\n ]\n\n )\n\n )\n\n\n\n # DtDZCnXEN69n5W6rN5SdJFgedrWdK8NV9bsMiJekNRyu\n\n signer_two = Keypair.from_seed(\n\n bytes(\n\n [\n\n 177,\n\n 182,\n\n 154,\n\n 154,\n\n 5,\n\n 145,\n\n 253,\n\n 138,\n\n 211,\n\n 126,\n\n 222,\n\n 195,\n\n 21,\n\n 64,\n\n 117,\n\n 211,\n\n 225,\n\n 47,\n\n 115,\n\n 31,\n\n 247,\n\n 242,\n\n 80,\n\n 195,\n\n 38,\n\n 8,\n\n 236,\n\n 155,\n\n 255,\n\n 27,\n\n 20,\n\n 142,\n\n ]\n\n )\n\n )\n\n\n\n # FXgds3n6SNCoVVV4oELSumv8nKzAfqSgmeu7cNPikKFT\n\n receiver_two = Keypair.from_seed(\n\n bytes(\n\n [\n\n 180,\n\n 204,\n\n 139,\n\n 131,\n\n 244,\n\n 6,\n\n 180,\n\n 121,\n\n 191,\n\n 193,\n\n 45,\n\n 109,\n\n 198,\n\n 50,\n\n 163,\n\n 140,\n\n 34,\n\n 4,\n\n 172,\n\n 76,\n\n 129,\n\n 45,\n\n 194,\n\n 83,\n\n 192,\n\n 112,\n\n 76,\n\n 58,\n\n 32,\n\n 174,\n\n 49,\n\n 248,\n\n ]\n\n )\n\n )\n\n\n\n # C2UwQHqJ3BmEJHSMVmrtZDQGS2fGv8fZrWYGi18nHF5k\n\n signer_three = Keypair.from_seed(\n\n bytes(\n\n [\n\n 29,\n\n 79,\n\n 73,\n\n 16,\n\n 137,\n\n 117,\n\n 183,\n\n 2,\n\n 131,\n\n 0,\n\n 209,\n\n 142,\n\n 134,\n\n 100,\n\n 190,\n\n 35,\n\n 95,\n\n 220,\n\n 200,\n\n 163,\n\n 247,\n\n 237,\n\n 161,\n\n 70,\n\n 226,\n\n 223,\n\n 100,\n\n 148,\n\n 49,\n\n 202,\n\n 154,\n\n 180,\n\n ]\n\n )\n\n )\n\n\n\n # 8YPqwYXZtWPd31puVLEUPamS4wTv6F89n8nXDA5Ce2Bg\n\n receiver_three = Keypair.from_seed(\n\n bytes(\n\n [\n\n 167,\n\n 102,\n\n 49,\n\n 166,\n\n 202,\n\n 0,\n\n 132,\n\n 182,\n\n 239,\n\n 182,\n\n 252,\n\n 59,\n\n 25,\n\n 103,\n\n 76,\n\n 217,\n\n 65,\n\n 215,\n\n 210,\n\n 159,\n\n 168,\n\n 50,\n\n 10,\n\n 229,\n\n 144,\n\n 231,\n\n 221,\n\n 74,\n\n 182,\n\n 161,\n\n 52,\n\n 193,\n\n ]\n\n )\n\n )\n\n instructions = [\n\n system_program.transfer(\n\n dict(\n\n from_pubkey=signer_one.pubkey(),\n\n to_pubkey=receiver_one.pubkey(),\n\n lamports=2_000_000,\n\n )\n\n ),\n\n system_program.transfer(\n\n dict(\n\n from_pubkey=signer_two.pubkey(),\n\n to_pubkey=receiver_two.pubkey(),\n\n lamports=2_000_000,\n\n )\n\n ),\n\n system_program.transfer(\n\n dict(\n\n from_pubkey=signer_three.pubkey(),\n\n to_pubkey=receiver_three.pubkey(),\n\n lamports=2_000_000,\n\n )\n\n ),\n\n ]\n\n fee_payer = signer_one\n\n message = Message.new_with_blockhash(instructions, fee_payer.pubkey(), BLOCKHASH)\n\n sorted_signers = sorted(\n\n [x.pubkey() for x in [signer_one, signer_two, signer_three]],\n\n key=lambda x: str(x),\n\n )\n\n sorted_signers_excluding_fee_payer = [\n\n x for x in sorted_signers if str(x) != str(fee_payer.pubkey())\n\n ]\n\n sorted_receivers = sorted(\n\n [x.pubkey() for x in [receiver_one, receiver_two, receiver_three]],\n\n key=lambda x: str(x),\n\n )\n\n txn = Transaction.new_unsigned(message)\n\n tx_msg = txn.message\n\n\n\n js_msg_b64_check = b\"AwABBwZtbiRMvgQjcE2kVx9yon8XqPSO5hwc2ApflnOZMu0Qo9G5/xbhB0sp8/03Rv9x4MKSkQ+k4LB6lNLvCgKZ/ju/aw+EyQpTObVa3Xm+NA1gSTzutgFCTfkDto/0KtuIHHAMpKRb92NImxKeWQJ2/291j6nTzFj1D6nW25p7TofHmVsGt8uFnTv7+8vsWZ0uN7azdxa+jCIIm4WzKK+4uKfX39t5UA7S1soBQaJkTGOQkSbBo39gIjDkbW0TrevslgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAxJrndgN4IFTxep3s6kO0ROug7bEsbx0xxuDkqEvwUusDBgIABAwCAAAAgIQeAAAAAAAGAgIFDAIAAACAhB4AAAAAAAYCAQMMAgAAAICEHgAAAAAA\" # noqa: E501 pylint: disable=line-too-long\n\n\n\n assert b64encode(tx_msg.serialize()) == js_msg_b64_check\n\n\n\n # Transaction should organize AccountMetas by PublicKey\n\n assert tx_msg.account_keys[0] == fee_payer.pubkey()\n\n assert tx_msg.account_keys[1] == sorted_signers_excluding_fee_payer[0]\n\n assert tx_msg.account_keys[2] == sorted_signers_excluding_fee_payer[1]\n\n assert tx_msg.account_keys[3] == sorted_receivers[0]\n\n assert tx_msg.account_keys[4] == sorted_receivers[1]\n", "file_path": "tests/test_transaction.py", "rank": 83, "score": 49091.804310228494 }, { "content": "def test_create_account():\n\n \"\"\"Test creating a transaction for create account.\"\"\"\n\n params = sp.CreateAccountParams(\n\n from_pubkey=Keypair().pubkey(),\n\n to_pubkey=Keypair().pubkey(),\n\n lamports=123,\n\n space=1,\n\n owner=Pubkey.default(),\n\n )\n", "file_path": "tests/test_system_program.py", "rank": 84, "score": 49091.720092447744 }, { "content": "def test_refs_invalid_account() -> None:\n\n key = Keypair()\n\n instructions = [CompiledInstruction(1, b\"\", bytes([2]))]\n\n tx = Transaction.new_with_compiled_instructions(\n\n [key],\n\n [],\n\n Hash.default(),\n\n [Pubkey.default()],\n\n instructions,\n\n )\n\n assert get_program_id(tx, 0) == Pubkey.default()\n\n with raises(SanitizeError) as excinfo:\n\n tx.sanitize()\n", "file_path": "tests/test_transaction.py", "rank": 85, "score": 49088.359717927095 }, { "content": "pub trait RichcmpEqualityOnly: PartialEq {\n\n fn richcmp(&self, other: &Self, op: CompareOp) -> PyResult<bool> {\n\n match op {\n\n CompareOp::Eq => Ok(self == other),\n\n CompareOp::Ne => Ok(self != other),\n\n CompareOp::Lt => Err(richcmp_type_error(\"<\")),\n\n CompareOp::Gt => Err(richcmp_type_error(\">\")),\n\n CompareOp::Le => Err(richcmp_type_error(\"<=\")),\n\n CompareOp::Ge => Err(richcmp_type_error(\">=\")),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 86, "score": 48684.759020156605 }, { "content": "def test_transaction_minimum_serialized_size() -> None:\n\n alice_keypair = Keypair()\n\n alice_pubkey = alice_keypair.pubkey()\n\n bob_pubkey = Pubkey.new_unique()\n\n params = system_program.TransferParams(\n\n from_pubkey=alice_pubkey, to_pubkey=bob_pubkey, lamports=42\n\n )\n\n ix = system_program.transfer(params)\n\n u32_size = 4\n\n u64_size = 8\n\n expected_data_size = u32_size + u64_size\n\n assert expected_data_size == 12\n\n assert len(ix.data) == expected_data_size, \"unexpected system instruction size\"\n\n\n\n expected_instruction_size = 1 + 1 + len(ix.accounts) + 1 + expected_data_size\n\n assert expected_instruction_size == 17\n\n\n\n message = Message([ix], alice_pubkey)\n\n assert (\n\n len(message.instructions[0].serialize()) == expected_instruction_size\n\n ), \"unexpected Instruction.serialized_size\"\n\n\n\n tx = Transaction([alice_keypair], message, Hash.default())\n\n\n\n len_size = 1\n\n num_required_sigs_size = 1\n\n num_readonly_accounts_size = 2\n\n blockhash_size = 32\n\n signature_size = Signature.LENGTH\n\n pubkey_size = 32\n\n tx_sigs_len = len(tx.signatures)\n\n assert tx_sigs_len == 1\n\n account_keys_len = len(tx.message.account_keys)\n\n assert account_keys_len == 3\n\n expected_transaction_size = (\n\n len_size\n\n + (tx_sigs_len * signature_size)\n\n + num_required_sigs_size\n\n + num_readonly_accounts_size\n\n + len_size\n\n + (account_keys_len * pubkey_size)\n\n + blockhash_size\n\n + len_size\n\n + expected_instruction_size\n\n )\n\n assert expected_transaction_size == 215\n\n assert (\n\n len(tx.serialize()) == expected_transaction_size\n", "file_path": "tests/test_transaction.py", "rank": 87, "score": 48276.83782682464 }, { "content": "def test_tx_uses_nonce_ok() -> None:\n\n (_, _, tx) = nonced_transfer_tx()\n", "file_path": "tests/test_transaction.py", "rank": 88, "score": 48242.17387211119 }, { "content": "def test_tx_keypair_pubkey_mismatch() -> None:\n\n from_keypair = Keypair()\n\n from_pubkey = from_keypair.pubkey()\n\n to_pubkey = Pubkey.new_unique()\n\n instructions = [\n\n system_program.transfer(\n\n {\"from_pubkey\": from_pubkey, \"to_pubkey\": to_pubkey, \"lamports\": 42}\n\n )\n\n ]\n\n tx = Transaction.new_with_payer(instructions, from_pubkey)\n\n unused_keypair = Keypair()\n\n with raises(SignerError) as excinfo:\n\n tx.partial_sign([from_keypair, unused_keypair], Hash.default())\n", "file_path": "tests/test_transaction.py", "rank": 89, "score": 47940.82204181317 }, { "content": "def test_off_curve_pubkey_verify_fails():\n\n # Golden point off the ed25519 curve\n\n off_curve_bytes = b58decode(b\"9z5nJyQar1FUxVJxpBXzon6kHehbomeYiDaLi9WAMhCq\")\n\n pubkey = Pubkey(off_curve_bytes)\n\n signature = Signature.default()\n", "file_path": "tests/test_signature.py", "rank": 90, "score": 47940.82204181317 }, { "content": "pub trait RichcmpEqOnlyPrecalculated: PartialEq {\n\n fn richcmp(&self, eq_val: bool, op: CompareOp) -> PyResult<bool> {\n\n match op {\n\n CompareOp::Eq => Ok(eq_val),\n\n CompareOp::Ne => Ok(!eq_val),\n\n CompareOp::Lt => Err(richcmp_type_error(\"<\")),\n\n CompareOp::Gt => Err(richcmp_type_error(\">\")),\n\n CompareOp::Le => Err(richcmp_type_error(\"<=\")),\n\n CompareOp::Ge => Err(richcmp_type_error(\">=\")),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 91, "score": 47705.493837599584 }, { "content": "def test_transaction_instruction_with_duplicate_keys() -> None:\n\n program_id = Pubkey.default()\n\n keypair0 = Keypair()\n\n id0 = keypair0.pubkey()\n\n id1 = Pubkey.new_unique()\n\n ix = Instruction(\n\n program_id,\n\n ZERO_BYTES,\n\n [\n\n AccountMeta(id0, True, True),\n\n AccountMeta(id1, False, True),\n\n AccountMeta(id0, False, True),\n\n AccountMeta(id1, False, True),\n\n ],\n\n )\n\n message = Message([ix], id0)\n\n tx = Transaction.new_unsigned(message)\n\n tx.sign([keypair0], Hash.default())\n\n assert tx.message.instructions[0] == CompiledInstruction(\n\n 2, ZERO_BYTES, bytes([0, 1, 0, 1])\n\n )\n", "file_path": "tests/test_transaction.py", "rank": 92, "score": 47638.88446326992 }, { "content": "def test_create_account_with_seed():\n\n \"\"\"Test creating a an account with seed.\"\"\"\n\n params = sp.CreateAccountWithSeedParams(\n\n from_pubkey=Keypair().pubkey(),\n\n to_pubkey=Pubkey(bytes([3]).rjust(Pubkey.LENGTH, b\"\\0\")),\n\n base=Pubkey(bytes([1]).rjust(Pubkey.LENGTH, b\"\\0\")),\n\n seed=\"gqln\",\n\n lamports=123,\n\n space=4,\n\n owner=Pubkey(bytes([2]).rjust(Pubkey.LENGTH, b\"\\0\")),\n\n )\n\n assert (\n\n sp.decode_create_account_with_seed(sp.create_account_with_seed(params))\n\n == params\n", "file_path": "tests/test_system_program.py", "rank": 93, "score": 47583.4653036502 }, { "content": "def test_initialize_nonce_account():\n\n params = sp.InitializeNonceAccountParams(\n\n nonce_pubkey=Keypair().pubkey(), authority=Keypair().pubkey()\n\n )\n\n assert (\n\n sp.decode_initialize_nonce_account(sp.initialize_nonce_account(params))\n\n == params\n", "file_path": "tests/test_system_program.py", "rank": 94, "score": 47580.0207113488 }, { "content": "def test_withdraw_nonce_account():\n\n params = sp.WithdrawNonceAccountParams(\n\n nonce_pubkey=Keypair().pubkey(),\n\n authorized_pubkey=Keypair().pubkey(),\n\n to_pubkey=Keypair().pubkey(),\n\n lamports=42,\n\n )\n", "file_path": "tests/test_system_program.py", "rank": 95, "score": 47580.0207113488 }, { "content": "def test_advance_nonce_account():\n\n params = sp.AdvanceNonceAccountParams(\n\n nonce_pubkey=Keypair().pubkey(), authorized_pubkey=Keypair().pubkey()\n\n )\n", "file_path": "tests/test_system_program.py", "rank": 96, "score": 47580.0207113488 }, { "content": "def test_create_nonce_account() -> None:\n\n from_pubkey = Pubkey.new_unique()\n\n nonce_pubkey = Pubkey.new_unique()\n\n authorized = nonce_pubkey\n\n ixs = sp.create_nonce_account(from_pubkey, nonce_pubkey, authorized, 42)\n\n assert len(ixs) == 2\n\n ix = ixs[0]\n\n assert ix.program_id == sp.ID\n\n pubkeys = [am.pubkey for am in ix.accounts]\n\n assert from_pubkey in pubkeys\n", "file_path": "tests/test_system_program.py", "rank": 97, "score": 47580.0207113488 }, { "content": "def test_get_nonce_pub_from_ix_ok() -> None:\n\n (_, nonce_pubkey, tx) = nonced_transfer_tx()\n\n nonce_ix = tx.uses_durable_nonce()\n\n assert nonce_ix is not None\n", "file_path": "tests/test_transaction.py", "rank": 98, "score": 46330.809908538955 }, { "content": "def test_tx_uses_nonce_empty_ix_fail() -> None:\n", "file_path": "tests/test_transaction.py", "rank": 99, "score": 45449.14057698207 } ]
Rust
alvr/settings-schema-derive/src/lib.rs
SonicZY/ALVR
4beff45eec4af6f0683439948d0e1091ce7130c0
mod higher_order; mod ty; use darling::{ast::Fields, util::Flag, FromDeriveInput, FromField, FromMeta, FromVariant}; use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use std::string::ToString; use syn::{DeriveInput, Error, Ident, Lit, Type, Visibility}; type TResult<T = TokenStream2> = Result<T, TokenStream>; fn error<T, TT: ToTokens>(message: &str, tokens: TT) -> TResult<T> { Err( Error::new_spanned(tokens, format!("[SettingsSchema] {}", message)) .to_compile_error() .into(), ) } fn suffix_ident(ty_ident: &Ident, suffix: &str) -> Ident { Ident::new( &format!("{}{}", ty_ident.to_string(), suffix), ty_ident.span(), ) } #[derive(FromField)] #[darling(attributes(schema))] struct FieldMeta { vis: Visibility, ident: Option<Ident>, ty: Type, #[darling(multiple)] #[darling(rename = "placeholder")] placeholders: Vec<String>, #[darling(multiple)] higher_order: Vec<higher_order::HigherOrderSetting>, #[darling(default)] advanced: Flag, #[darling(default)] switch_advanced: Flag, #[darling(default)] min: Option<Lit>, #[darling(default)] max: Option<Lit>, #[darling(default)] step: Option<Lit>, #[darling(default)] gui: Option<ty::NumericGuiType>, } #[derive(FromMeta)] enum ChoiceControlType { Dropdown, ButtonGroup, } #[derive(FromVariant)] #[darling(attributes(schema), supports(unit, newtype, named))] struct VariantMeta { ident: Ident, fields: darling::ast::Fields<FieldMeta>, } #[derive(FromDeriveInput)] #[darling(attributes(schema), supports(struct_named, enum_any))] struct DeriveInputMeta { data: darling::ast::Data<VariantMeta, FieldMeta>, #[darling(default)] gui: Option<ChoiceControlType>, } struct SchemaData { default_fields_ts: TokenStream2, schema_code_ts: TokenStream2, aux_objects_ts: Option<TokenStream2>, } fn named_fields_schema(meta: Vec<FieldMeta>) -> TResult<SchemaData> { let mut vis = vec![]; let mut idents = vec![]; let mut tys_ts = vec![]; let mut keys = vec![]; let mut entry_types_ts = vec![]; for meta in meta { for ph in &meta.placeholders { keys.push(ph.clone()); entry_types_ts.push(quote!(settings_schema::EntryType::Placeholder)) } for setting in &meta.higher_order { let higher_order::Entry { key, entry_type_ts } = higher_order::schema(setting)?; keys.push(key); entry_types_ts.push(entry_type_ts); } let ident = meta.ident.as_ref().unwrap().clone(); let advanced = meta.advanced.is_some(); let ty::SchemaData { default_ty_ts, schema_code_ts, } = ty::schema(&meta.ty, &meta)?; vis.push(meta.vis); idents.push(ident.clone()); tys_ts.push(default_ty_ts); keys.push(ident.to_string()); entry_types_ts.push(quote!( EntryType::Data(EntryData { advanced: #advanced, content: { let default = default.#ident; #schema_code_ts } }) )); } Ok(SchemaData { default_fields_ts: quote!(#(#vis #idents: #tys_ts,)*), schema_code_ts: quote!(SchemaNode::Section( vec![#((#keys.into(), #entry_types_ts)),*] )), aux_objects_ts: None, }) } fn variants_schema( gui_type: Option<ChoiceControlType>, vis: &Visibility, ident: &Ident, meta: Vec<VariantMeta>, ) -> TResult<SchemaData> { let mut variants = vec![]; let mut data_variants = vec![]; let mut data_tys_ts = vec![]; let mut keys = vec![]; let mut entry_data_ts = vec![]; let mut aux_variants_structs_ts = vec![]; let gui_ts = match gui_type { None => quote!(None), Some(ChoiceControlType::Dropdown) => { quote!(Some(ChoiceControlType::Dropdown)) } Some(ChoiceControlType::ButtonGroup) => { quote!(Some(ChoiceControlType::ButtonGroup)) } }; for meta in meta { let variant_ident = meta.ident; let snake_case_variant_ident = Ident::new(&variant_ident.to_string(), variant_ident.span()); variants.push(variant_ident.clone()); keys.push(variant_ident.to_string()); match meta.fields.style { darling::ast::Style::Tuple => { let field_meta = &meta.fields.fields[0]; if !field_meta.higher_order.is_empty() { error( "'higher_order' attributes not supported in this position", &variant_ident, )?; } if !field_meta.placeholders.is_empty() { error( "'placeholder' attributes not supported in this position", &variant_ident, )?; } let advanced = field_meta.advanced.is_some(); let ty::SchemaData { default_ty_ts, schema_code_ts, } = ty::schema(&field_meta.ty, &field_meta)?; data_variants.push(snake_case_variant_ident.clone()); data_tys_ts.push(default_ty_ts); entry_data_ts.push(quote!(Some(settings_schema::EntryData { advanced: #advanced, content: { let default = default.#snake_case_variant_ident; #schema_code_ts } }))); } darling::ast::Style::Struct => { let default_ty_ts = suffix_ident(&suffix_ident(ident, &variant_ident.to_string()), "Default") .to_token_stream(); let SchemaData { default_fields_ts, schema_code_ts, .. } = named_fields_schema(meta.fields.fields)?; data_variants.push(snake_case_variant_ident.clone()); data_tys_ts.push(default_ty_ts.clone()); entry_data_ts.push(quote!(Some(settings_schema::EntryData { advanced: false, content: { let default = default.#snake_case_variant_ident; #schema_code_ts } }))); aux_variants_structs_ts.push(quote! { #[derive( settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq )] #vis struct #default_ty_ts { #default_fields_ts } }); } darling::ast::Style::Unit => { entry_data_ts.push(quote!(None)); } } } let default_variant_ty = suffix_ident(&ident, "DefaultVariant"); Ok(SchemaData { default_fields_ts: quote! { #(#vis #data_variants: #data_tys_ts,)* #vis variant: #default_variant_ty, }, schema_code_ts: quote!(SchemaNode::Choice(SchemaChoice { default: settings_schema::to_json_value(default.variant) .unwrap() .as_str() .unwrap() .into(), variants: vec![#((#keys.into(), #entry_data_ts)),*], gui: #gui_ts })), aux_objects_ts: Some(quote! { #(#aux_variants_structs_ts)* #[derive(settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq)] #vis enum #default_variant_ty { #(#variants,)* } }), }) } fn schema(derive_input: DeriveInput) -> TResult { if !derive_input.generics.params.is_empty() { return error("Generics not supported", &derive_input.generics); } let meta: DeriveInputMeta = FromDeriveInput::from_derive_input(&derive_input).map_err(|e| e.write_errors())?; let gui_type = meta.gui; let vis = derive_input.vis; let derive_input_ident = derive_input.ident; let default_ty_ident = suffix_ident(&derive_input_ident, "Default"); let SchemaData { default_fields_ts, schema_code_ts, aux_objects_ts, } = match meta.data { darling::ast::Data::Enum(variants) => { variants_schema(gui_type, &vis, &derive_input_ident, variants)? } darling::ast::Data::Struct(Fields { fields, .. }) => named_fields_schema(fields)?, }; Ok(quote! { #aux_objects_ts #[allow(non_snake_case)] #[derive(settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq)] #vis struct #default_ty_ident { #default_fields_ts } impl #derive_input_ident { #vis fn schema(default: #default_ty_ident) -> settings_schema::SchemaNode { use settings_schema::*; #schema_code_ts } } }) } #[proc_macro_derive(SettingsSchema, attributes(schema))] pub fn create_settings_schema_fn_and_default_ty(input: TokenStream) -> TokenStream { let input = syn::parse_macro_input!(input as DeriveInput); match schema(input) { Ok(tokens) => tokens.into(), Err(e) => e, } }
mod higher_order; mod ty; use darling::{ast::Fields, util::Flag, FromDeriveInput, FromField, FromMeta, FromVariant}; use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens}; use std::string::ToString; use syn::{DeriveInput, Error, Ident, Lit, Type, Visibility}; type TResult<T = TokenStream2> = Result<T, TokenStream>; fn error<T, TT: ToTokens>(message: &str, tokens: TT) -> TResult<T> { Err( Error::new_spanned(tokens, format!("[SettingsSchema] {}", message)) .to_compile_error() .into(), ) } fn suffix_ident(ty_ident: &Ident, suffix: &str) -> Ident { Ident::new( &format!("{}{}", ty_ident.to_string(), suffix), ty_ident.span(), ) } #[derive(FromField)] #[darling(attributes(schema))] struct FieldMeta { vis: Visibility, ident: Option<Ident>, ty: Type, #[darling(multiple)] #[darling(rename = "placeholder")] placeholders: Vec<String>, #[darling(multiple)] higher_order: Vec<higher_order::HigherOrderSetting>, #[darling(default)] advanced: Flag, #[darling(default)] switch_advanced: Flag, #[darling(default)] min: Option<Lit>, #[darling(default)] max: Option<Lit>, #[darling(default)] step: Option<Lit>, #[darling(default)] gui: Option<ty::NumericGuiType>, } #[derive(FromMeta)] enum ChoiceControlType { Dropdown, ButtonGroup, } #[derive(FromVariant)] #[darling(attributes(schema), supports(unit, newtype, named))] struct VariantMeta { ident: Ident, fields: darling::ast::Fields<FieldMeta>, } #[derive(FromDeriveInput)] #[darling(attributes(schema), supports(struct_named, enum_any))] struct DeriveInputMeta { data: darling::ast::Data<VariantMeta, FieldMeta>, #[darling(default)] gui: Option<ChoiceControlType>, } struct SchemaData { default_fields_ts: TokenStream2, schema_code_ts: TokenStream2, aux_objects_ts: Option<TokenStream2>, } fn named_fields_schema(meta: Vec<FieldMeta>) -> TResult<SchemaData> { let mut vis = vec![]; let mut idents = vec![]; let mut tys_ts = vec![]; let mut keys = vec![]; let mut entry_types_ts = vec![]; for meta in meta { for ph in &meta.placeholders { keys.push(ph.clone()); entry_types_ts.push(quote!(settings_schema::EntryType::Placeholder)) } for setting in &meta.higher_order { let higher_order::Entry { key, entry_type_ts } = higher_order::schema(setting)?; keys.push(key); entry_types_ts.push(entry_type_ts); } let ident = meta.ident.as_ref().unwrap().clone(); let advanced = meta.advanced.is_some(); let ty::SchemaData { default_ty_ts, schema_code_ts, } = ty::schema(&meta.ty, &meta)?; vis.push(meta.vis); idents.push(ident.clone()); tys_ts.push(default_ty_ts); keys.push(ident.to_string()); entry_types_ts.push(quote!( EntryType::Data(EntryData { advanced: #advanced, content: { let default = default.#ident; #schema_code_ts } }) )); } Ok(SchemaData { default_fields_ts: quote!(#(#vis #idents: #tys_ts,)*), schema_code_ts: quote!(SchemaNode::Section( vec![#((#keys.into(), #entry_types_ts)),*] )), aux_objects_ts: None, }) } fn variants_schema( gui_type: Option<ChoiceControlType>, vis: &Visibility, ident: &Ident, meta: Vec<VariantMeta>, ) -> TResult<SchemaData> { let mut variants = vec![]; let mut data_variants = vec![]; let mut data_tys_ts = vec![]; let mut keys = vec![]; let mut entry_data_ts = vec![]; let mut aux_variants_structs_ts = vec![]; let gui_ts = match gui_type { None => quote!(None), Some(ChoiceControlType::Dropdown) => { quote!(Some(ChoiceControlType::Dropdown)) } Some(ChoiceControlType::ButtonGroup) => { quote!(Some(ChoiceControlType::ButtonGroup)) } }; for meta in meta { let variant_ident = meta.ident; let snake_case_variant_ident = Ident::new(&variant_ident.to_string(), variant_ident.span()); variants.push(variant_ident.clone()); keys.push(variant_ident.to_string()); match meta.fields.style { darling::ast::Style::Tuple => { let field_meta = &meta.fields.fields[0]; if !field_meta.higher_order.is_empty() { error( "'higher_order' attributes not supported in this position", &variant_ident, )?; } if !field_meta.placeholders.is_empty() { error( "'placeholder' attributes not supported in this position", &variant_ident, )?; } let advanced = field_meta.advanced.is_some(); let ty::SchemaData { default_ty_ts, schema_code_ts, } = ty::schema(&field_meta.ty, &field_meta)?; data_variants.push(snake_case_variant_ident.clone()); data_tys_ts.push(default_ty_ts); entry_data_ts.push(quote!(Some(settings_schema::EntryData { advanced: #advanced, content: { let default = default.#snake_case_variant_ident; #schema_code_ts } }))); }
fn schema(derive_input: DeriveInput) -> TResult { if !derive_input.generics.params.is_empty() { return error("Generics not supported", &derive_input.generics); } let meta: DeriveInputMeta = FromDeriveInput::from_derive_input(&derive_input).map_err(|e| e.write_errors())?; let gui_type = meta.gui; let vis = derive_input.vis; let derive_input_ident = derive_input.ident; let default_ty_ident = suffix_ident(&derive_input_ident, "Default"); let SchemaData { default_fields_ts, schema_code_ts, aux_objects_ts, } = match meta.data { darling::ast::Data::Enum(variants) => { variants_schema(gui_type, &vis, &derive_input_ident, variants)? } darling::ast::Data::Struct(Fields { fields, .. }) => named_fields_schema(fields)?, }; Ok(quote! { #aux_objects_ts #[allow(non_snake_case)] #[derive(settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq)] #vis struct #default_ty_ident { #default_fields_ts } impl #derive_input_ident { #vis fn schema(default: #default_ty_ident) -> settings_schema::SchemaNode { use settings_schema::*; #schema_code_ts } } }) } #[proc_macro_derive(SettingsSchema, attributes(schema))] pub fn create_settings_schema_fn_and_default_ty(input: TokenStream) -> TokenStream { let input = syn::parse_macro_input!(input as DeriveInput); match schema(input) { Ok(tokens) => tokens.into(), Err(e) => e, } }
darling::ast::Style::Struct => { let default_ty_ts = suffix_ident(&suffix_ident(ident, &variant_ident.to_string()), "Default") .to_token_stream(); let SchemaData { default_fields_ts, schema_code_ts, .. } = named_fields_schema(meta.fields.fields)?; data_variants.push(snake_case_variant_ident.clone()); data_tys_ts.push(default_ty_ts.clone()); entry_data_ts.push(quote!(Some(settings_schema::EntryData { advanced: false, content: { let default = default.#snake_case_variant_ident; #schema_code_ts } }))); aux_variants_structs_ts.push(quote! { #[derive( settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq )] #vis struct #default_ty_ts { #default_fields_ts } }); } darling::ast::Style::Unit => { entry_data_ts.push(quote!(None)); } } } let default_variant_ty = suffix_ident(&ident, "DefaultVariant"); Ok(SchemaData { default_fields_ts: quote! { #(#vis #data_variants: #data_tys_ts,)* #vis variant: #default_variant_ty, }, schema_code_ts: quote!(SchemaNode::Choice(SchemaChoice { default: settings_schema::to_json_value(default.variant) .unwrap() .as_str() .unwrap() .into(), variants: vec![#((#keys.into(), #entry_data_ts)),*], gui: #gui_ts })), aux_objects_ts: Some(quote! { #(#aux_variants_structs_ts)* #[derive(settings_schema::Serialize, settings_schema::Deserialize, Clone, PartialEq)] #vis enum #default_variant_ty { #(#variants,)* } }), }) }
function_block-function_prefix_line
[ { "content": "fn custom_leaf_type_schema(ty_ident: &Ident, field: &FieldMeta) -> TResult {\n\n forbid_numeric_attrs(field, \"custom\")?;\n\n\n\n Ok(quote!(#ty_ident::schema(default)))\n\n}\n\n\n\n// Generate a default representation type and corresponding schema instantiation code.\n\n// This function calls itself recursively to parse the whole compound type. The recursion degree is\n\n// only 1: only types that have only one type argument can be parsed. Still custom types cannot have\n\n// type arguments, so they are always the leaf type.\n\n// The meta parameter contains the attributes associated to the curent field: they are forwarded\n\n// as-is in every recursion step. Most of the attributes are used for numerical leaf types, but\n\n// there is also the `switch_default` flag that is used by each Switch type inside the type chain.\n\npub(crate) fn schema(ty: &Type, meta: &FieldMeta) -> Result<SchemaData, TokenStream> {\n\n match &ty {\n\n Type::Array(TypeArray { len, elem, .. }) => {\n\n let SchemaData {\n\n default_ty_ts,\n\n schema_code_ts,\n\n } = schema(elem, meta)?;\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 1, "score": 401125.7839920767 }, { "content": "fn forbid_numeric_attrs(field: &FieldMeta, type_str: &str) -> TResult<()> {\n\n let maybe_invalid_arg = field\n\n .min\n\n .as_ref()\n\n .or_else(|| field.max.as_ref())\n\n .or_else(|| field.step.as_ref());\n\n\n\n let tokens = if let Some(arg) = maybe_invalid_arg {\n\n arg.to_token_stream()\n\n } else if field.gui.is_some() {\n\n quote!()\n\n } else {\n\n return Ok(());\n\n };\n\n\n\n error(\n\n &format!(\"Unexpected argument for {} type\", type_str),\n\n tokens,\n\n )\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 2, "score": 390919.0428299131 }, { "content": "fn integer_type_schema(field: &FieldMeta) -> TResult {\n\n let min_ts = maybe_integer_literal(field.min.as_ref())?;\n\n let max_ts = maybe_integer_literal(field.max.as_ref())?;\n\n let step_ts = maybe_integer_literal(field.step.as_ref())?;\n\n let gui_ts = maybe_numeric_gui(field.gui.as_ref());\n\n\n\n Ok(quote!(SchemaNode::Integer(SchemaNumeric {\n\n default: default as _,\n\n min: #min_ts,\n\n max: #max_ts,\n\n step: #step_ts,\n\n gui: #gui_ts,\n\n })))\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 5, "score": 347774.8477994959 }, { "content": "fn string_type_schema(field: &FieldMeta) -> TResult {\n\n forbid_numeric_attrs(field, \"String\")?;\n\n\n\n Ok(quote!(SchemaNode::Text(default)))\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 6, "score": 347774.8477994959 }, { "content": "fn bool_type_schema(field: &FieldMeta) -> TResult {\n\n forbid_numeric_attrs(field, \"bool\")?;\n\n\n\n Ok(quote!(SchemaNode::Boolean(default)))\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 7, "score": 347774.8477994959 }, { "content": "fn float_type_schema(field: &FieldMeta) -> TResult {\n\n let min_ts = maybe_float_literal(field.min.as_ref())?;\n\n let max_ts = maybe_float_literal(field.max.as_ref())?;\n\n let step_ts = maybe_float_literal(field.step.as_ref())?;\n\n let gui_ts = maybe_numeric_gui(field.gui.as_ref());\n\n\n\n Ok(quote!(SchemaNode::Float(SchemaNumeric {\n\n default: default as _,\n\n min: #min_ts,\n\n max: #max_ts,\n\n step: #step_ts,\n\n gui: #gui_ts,\n\n })))\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 8, "score": 347774.8477994959 }, { "content": "fn maybe_numeric_gui(gui: Option<&NumericGuiType>) -> proc_macro2::TokenStream {\n\n if let Some(gui) = gui {\n\n match gui {\n\n NumericGuiType::TextBox => quote!(Some(NumericGuiType::TextBox)),\n\n NumericGuiType::UpDown => quote!(Some(NumericGuiType::UpDown)),\n\n NumericGuiType::Slider => quote!(Some(NumericGuiType::Slider)),\n\n }\n\n } else {\n\n quote!(None)\n\n }\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 9, "score": 320723.87837106513 }, { "content": "pub fn use_setting_name_trans(subkey: &str) -> String {\n\n let manager = use_translation();\n\n\n\n let mut route_segments = (*use_context::<SettingsTransContext>().unwrap()).0.clone();\n\n route_segments.push(subkey.to_owned());\n\n\n\n let route = route_segments.join(\"-\");\n\n\n\n if let Ok(name) = manager.get_fallible(&route) {\n\n name.into()\n\n } else {\n\n subkey.into()\n\n }\n\n}\n\n\n\npub struct SettingsTrans {\n\n pub name: String,\n\n pub help: Option<String>,\n\n pub notice: Option<String>,\n\n}\n\n\n", "file_path": "alvr/dashboard/src/translation.rs", "rank": 13, "score": 274721.6592435231 }, { "content": "pub fn session_settings_default() -> SettingsDefault {\n\n SettingsDefault {\n\n video: VideoDescDefault {\n\n adapter_index: 0,\n\n preferred_fps: 72.,\n\n render_resolution: FrameSizeDefault {\n\n variant: FrameSizeDefaultVariant::Scale,\n\n Scale: 0.75,\n\n Absolute: FrameSizeAbsoluteDefault {\n\n width: 2880,\n\n height: 1600,\n\n },\n\n },\n\n recommended_target_resolution: FrameSizeDefault {\n\n variant: FrameSizeDefaultVariant::Scale,\n\n Scale: 0.75,\n\n Absolute: FrameSizeAbsoluteDefault {\n\n width: 2880,\n\n height: 1600,\n\n },\n", "file_path": "alvr/common/src/data/legacy_settings.rs", "rank": 14, "score": 256422.3187625102 }, { "content": "#[cfg(any(windows, target_os = \"linux\", target_os = \"android\"))]\n\npub fn create_identity(hostname: Option<String>) -> StrResult<PrivateIdentity> {\n\n let hostname = hostname.unwrap_or(format!(\"{}.client.alvr\", rand::random::<u16>()));\n\n\n\n let certificate = trace_err!(rcgen::generate_simple_self_signed([hostname.clone()]))?;\n\n\n\n Ok(PrivateIdentity {\n\n hostname,\n\n certificate_pem: trace_err!(certificate.serialize_pem())?,\n\n key_pem: certificate.serialize_private_key_pem(),\n\n })\n\n}\n", "file_path": "alvr/common/src/data/mod.rs", "rank": 15, "score": 256135.63980281007 }, { "content": "pub fn use_trans(key: &str) -> String {\n\n let manager = use_translation();\n\n manager.get(key).as_ref().to_owned()\n\n}\n\n\n\n#[derive(Properties, Clone, PartialEq)]\n\npub struct SettingsTransPathProviderProps {\n\n pub children: Children,\n\n}\n\n\n", "file_path": "alvr/dashboard/src/translation.rs", "rank": 16, "score": 251137.24028860749 }, { "content": "// SessionSettings is similar to Settings but it contains every branch, even unused ones. This is\n\n// the settings representation that the UI uses.\n\ntype SessionSettings = settings::SettingsDefault;\n\n\n", "file_path": "alvr/common/src/data/legacy_session.rs", "rank": 17, "score": 244489.0008728743 }, { "content": "pub fn use_setting_trans(subkey: &str) -> SettingsTrans {\n\n let manager = use_translation();\n\n\n\n let mut route_segments = (*use_context::<Vec<String>>().expect(\"Trans context\")).clone();\n\n route_segments.push(subkey.to_owned());\n\n\n\n let route = route_segments.join(\"-\");\n\n\n\n if let Ok(name) = manager.get_fallible(&route) {\n\n SettingsTrans {\n\n name: name.as_ref().to_owned(),\n\n help: manager.get_attribute_fallible(&route, \"help\").ok(),\n\n notice: manager.get_attribute_fallible(&route, \"notice\").ok(),\n\n }\n\n } else {\n\n SettingsTrans {\n\n name: subkey.to_owned(),\n\n help: None,\n\n notice: None,\n\n }\n\n }\n\n}\n", "file_path": "alvr/dashboard/src/translation.rs", "rank": 18, "score": 241939.91310921722 }, { "content": "fn get_only_type_argument(arguments: &PathArguments) -> &Type {\n\n if let PathArguments::AngleBracketed(args_block) = &arguments {\n\n if let GenericArgument::Type(ty) = args_block.args.first().unwrap() {\n\n return ty;\n\n }\n\n }\n\n // Fail cases are already handled by the compiler\n\n unreachable!()\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 19, "score": 241335.23297597046 }, { "content": "pub fn use_advanced() -> bool {\n\n use_context::<AdvancedContext>().unwrap().0\n\n}\n\n\n", "file_path": "alvr/dashboard/src/components/settings.rs", "rank": 20, "score": 231778.31737488313 }, { "content": " enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT\n\n {\n\n eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,\n\n eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,\n\n ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )\n\n {\n\n switch ( value )\n\n {\n\n case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return \"General\";\n\n case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return \"Validation\";\n\n case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return \"Performance\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 24, "score": 217741.61914054165 }, { "content": "fn maybe_float_literal(literal: Option<&Lit>) -> TResult {\n\n if let Some(literal) = literal {\n\n if let Lit::Float(lit_float) = literal {\n\n Ok(quote!(Some(#lit_float as _)))\n\n } else {\n\n error(\"Expected float literal\", literal)\n\n }\n\n } else {\n\n Ok(quote!(None))\n\n }\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 25, "score": 216576.5322905856 }, { "content": "fn maybe_integer_literal(literal: Option<&Lit>) -> TResult {\n\n if let Some(literal) = literal {\n\n if let Lit::Int(lit_int) = literal {\n\n Ok(quote!(Some(#lit_int)))\n\n } else {\n\n error(\"Expected integer literal\", literal)\n\n }\n\n } else {\n\n Ok(quote!(None))\n\n }\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/ty.rs", "rank": 26, "score": 216576.5322905856 }, { "content": "#[cfg(windows)]\n\nfn dynlib_fname(name: &str) -> String {\n\n format!(\"{}.dll\", name)\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 27, "score": 210600.68050634407 }, { "content": "// session_settings does not get validated here, it must be already valid\n\nfn json_session_settings_to_settings(\n\n session_settings: &json::Value,\n\n schema: &SchemaNode,\n\n) -> json::Value {\n\n match schema {\n\n SchemaNode::Section(entries) => json::Value::Object(\n\n entries\n\n .iter()\n\n .filter_map(|(field_name, maybe_data)| {\n\n if let EntryType::Data(data_schema) = maybe_data {\n\n Some((field_name, data_schema))\n\n } else {\n\n None\n\n }\n\n })\n\n .map(|(field_name, data_schema)| {\n\n (\n\n field_name.clone(),\n\n json_session_settings_to_settings(\n\n &session_settings[field_name],\n", "file_path": "alvr/common/src/data/session.rs", "rank": 28, "score": 205805.8437894272 }, { "content": "#[function_component(SettingsContent)]\n\npub fn settings_content(\n\n props: &SettingProps<Vec<(String, SchemaNode)>, HashMap<String, json::Value>>,\n\n) -> Html {\n\n struct TabData {\n\n name: String,\n\n schema: SchemaNode,\n\n session: json::Value,\n\n }\n\n\n\n let (selected_tab_data, set_selected_tab_data) = {\n\n let (name, schema) = props.schema[0].clone();\n\n let session = props.session.get(&name).unwrap().clone();\n\n use_state(|| TabData {\n\n name,\n\n schema,\n\n session,\n\n })\n\n };\n\n\n\n let (advanced, set_advanced) = use_state(|| false);\n", "file_path": "alvr/dashboard/src/components/settings.rs", "rank": 29, "score": 203092.24055317865 }, { "content": "// Current data extrapolation strategy: match both field name and value type exactly.\n\n// Integer bounds are not validated, if they do not match the schema, deserialization will fail and\n\n// all data is lost.\n\n// Future strategies: check if value respects schema constraints, fuzzy field name matching, accept\n\n// integer to float and float to integer, tree traversal.\n\nfn extrapolate_session_settings_from_session_settings(\n\n old_session_settings: &json::Value,\n\n new_session_settings: &json::Value,\n\n schema: &SchemaNode,\n\n) -> json::Value {\n\n match schema {\n\n SchemaNode::Section(entries) => json::Value::Object(\n\n entries\n\n .iter()\n\n .filter_map(|(field_name, maybe_data)| {\n\n if let EntryType::Data(data_schema) = maybe_data {\n\n Some((field_name, data_schema))\n\n } else {\n\n None\n\n }\n\n })\n\n .map(|(field_name, data_schema)| {\n\n let value_json =\n\n if let Some(new_value_json) = new_session_settings.get(field_name) {\n\n extrapolate_session_settings_from_session_settings(\n", "file_path": "alvr/common/src/data/session.rs", "rank": 30, "score": 201919.95498116626 }, { "content": "// session_settings does not get validated here, it must be already valid\n\nfn json_session_settings_to_settings(\n\n session_settings: &json::Value,\n\n schema: &SchemaNode,\n\n) -> json::Value {\n\n match schema {\n\n SchemaNode::Section { entries } => json::Value::Object(\n\n entries\n\n .iter()\n\n .filter_map(|(field_name, maybe_data)| {\n\n maybe_data.as_ref().map(|data_schema| {\n\n (\n\n field_name.clone(),\n\n json_session_settings_to_settings(\n\n &session_settings[field_name],\n\n &data_schema.content,\n\n ),\n\n )\n\n })\n\n })\n\n .collect(),\n", "file_path": "alvr/common/src/data/legacy_session.rs", "rank": 31, "score": 201894.20671717988 }, { "content": "#[cfg(windows)]\n\npub fn exec_fname(name: &str) -> String {\n\n format!(\"{}.exe\", name)\n\n}\n\n\n", "file_path": "alvr/common/src/commands.rs", "rank": 32, "score": 199620.88694793754 }, { "content": "#[cfg(windows)]\n\npub fn exec_fname(name: &str) -> String {\n\n format!(\"{}.exe\", name)\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 33, "score": 199620.88694793754 }, { "content": "fn invoke_launcher(alvr_dir: &Path, flag: &str) -> StrResult {\n\n trace_err!(Command::new(alvr_dir.join(exec_fname(\"ALVR launcher\")))\n\n .arg(flag)\n\n .status())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/common/src/commands.rs", "rank": 34, "score": 199426.29482348717 }, { "content": "#[derive(Clone, PartialEq)]\n\nstruct SettingsTransContext(Vec<String>);\n\n\n", "file_path": "alvr/dashboard/src/translation.rs", "rank": 35, "score": 199249.45178996376 }, { "content": "fn set_loading_message(\n\n java_vm: &JavaVM,\n\n activity_ref: &GlobalRef,\n\n hostname: &str,\n\n message: &str,\n\n) -> StrResult {\n\n let message = format!(\n\n \"ALVR v{}\\nhostname: {}\\n \\n{}\",\n\n ALVR_VERSION.to_string(),\n\n hostname,\n\n message\n\n );\n\n\n\n // Note: env = java_vm.attach_current_thread() cannot be saved into a variable because it is\n\n // not Send (compile error). This makes sense since tokio could move the execution of this\n\n // task to another thread at any time, and env is valid only within a specific thread. For\n\n // the same reason, other jni objects cannot be made into variables and the arguments must\n\n // be created inline within the call_method() call\n\n trace_err!(trace_err!(java_vm.attach_current_thread())?.call_method(\n\n activity_ref,\n", "file_path": "alvr/client/src/connection.rs", "rank": 36, "score": 199208.47854075563 }, { "content": "// Current data extrapolation strategy: match both field name and value type exactly.\n\n// Integer bounds are not validated, if they do not match the schema, deserialization will fail and\n\n// all data is lost.\n\n// Future strategies: check if value respects schema constraints, fuzzy field name matching, accept\n\n// integer to float and float to integer, tree traversal.\n\nfn extrapolate_session_settings_from_session_settings(\n\n old_session_settings: &json::Value,\n\n new_session_settings: &json::Value,\n\n schema: &SchemaNode,\n\n) -> json::Value {\n\n match schema {\n\n SchemaNode::Section { entries } => json::Value::Object(\n\n entries\n\n .iter()\n\n .filter_map(|(field_name, maybe_data)| {\n\n maybe_data.as_ref().map(|data_schema| {\n\n let value_json =\n\n if let Some(new_value_json) = new_session_settings.get(field_name) {\n\n extrapolate_session_settings_from_session_settings(\n\n &old_session_settings[field_name],\n\n new_value_json,\n\n &data_schema.content,\n\n )\n\n } else {\n\n old_session_settings[field_name].clone()\n", "file_path": "alvr/common/src/data/legacy_session.rs", "rank": 37, "score": 198186.6277430929 }, { "content": "fn download_and_extract(url: &str, target_name: &str) -> PathBuf {\n\n let random_dir_name = iter::repeat(())\n\n .map(|_| rand::thread_rng().sample(rand::distributions::Alphanumeric))\n\n .map(char::from)\n\n .take(10)\n\n .collect::<String>();\n\n let download_dir = std::env::temp_dir().join(random_dir_name);\n\n\n\n // Note: downloaded in-memory instead of on disk\n\n // todo: display progress\n\n println!(\"Downloading {}...\", target_name);\n\n let mut zip_data = vec![];\n\n ureq::get(url)\n\n .call()\n\n .unwrap()\n\n .into_reader()\n\n .read_to_end(&mut zip_data)\n\n .unwrap();\n\n\n\n println!(\n", "file_path": "alvr/xtask/src/dependencies.rs", "rank": 38, "score": 196165.7845630661 }, { "content": "pub fn setting_control(\n\n schema: SchemaNode,\n\n session: json::Value,\n\n set_session: Callback<json::Value>,\n\n) -> Option<Html> {\n\n logging::show_err((|| {\n\n StrResult::Ok(match schema {\n\n SchemaNode::Choice(schema) => Some(html! {\n\n <ChoiceControl\n\n schema=schema\n\n session=trace_err!(json::from_value::<HashMap<_, _>>(session))?\n\n set_session=bubble_up(set_session)\n\n />\n\n }),\n\n SchemaNode::Optional(schema) => Some(html! {\n\n <OptionalControl\n\n schema=schema\n\n session=trace_err!(json::from_value::<OptionalDefault<json::Value>>(session))?\n\n set_session=bubble_up(set_session)\n\n />\n", "file_path": "alvr/dashboard/src/components/settings_controls/mod.rs", "rank": 39, "score": 195048.42740151408 }, { "content": "pub fn setting_container(\n\n schema: SchemaNode,\n\n session: json::Value,\n\n set_session: Callback<json::Value>,\n\n advanced: bool,\n\n) -> Option<Html> {\n\n logging::show_err((|| {\n\n StrResult::Ok(match schema {\n\n SchemaNode::Section(schema) => Some(html! {\n\n <Section\n\n schema=schema\n\n session=trace_err!(json::from_value::<HashMap<_, _>>(session))?\n\n set_session=bubble_up(set_session)\n\n />\n\n }),\n\n SchemaNode::Choice(schema) => choice_container(\n\n schema,\n\n trace_err!(json::from_value::<HashMap<_, _>>(session))?,\n\n bubble_up(set_session),\n\n advanced,\n", "file_path": "alvr/dashboard/src/components/settings_controls/mod.rs", "rank": 40, "score": 195048.42740151408 }, { "content": "fn choice_test_default() -> ChoiceTestDefault {\n\n ChoiceTestDefault {\n\n variant: ChoiceTestDefaultVariant::B,\n\n B: 10,\n\n C: ChoiceTestCDefault {\n\n text_c: \"Hello World\".into(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "alvr/settings-schema/examples/example0.rs", "rank": 42, "score": 191345.86983204912 }, { "content": "#[cfg(not(any(windows, target_os = \"linux\")))]\n\npub fn get_gpu_names() -> Vec<String> {\n\n vec![]\n\n}\n\n\n", "file_path": "alvr/common/src/graphics.rs", "rank": 44, "score": 189089.3456696105 }, { "content": "#[derive(Clone, PartialEq)]\n\nstruct AdvancedContext(bool);\n\n\n", "file_path": "alvr/dashboard/src/components/settings.rs", "rank": 45, "score": 187610.70007118647 }, { "content": "enum StreamReceiverType {\n\n Queue(mpsc::UnboundedReceiver<BytesMut>),\n\n // QuicReliable(...)\n\n}\n\n\n\npub struct ReceivedPacket<T> {\n\n pub header: T,\n\n pub buffer: BytesMut,\n\n pub had_packet_loss: bool,\n\n}\n\n\n\npub struct StreamReceiver<T, const ID: StreamId> {\n\n receiver: StreamReceiverType,\n\n next_packet_index: u32,\n\n _phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<T: DeserializeOwned, const ID: StreamId> StreamReceiver<T, ID> {\n\n pub async fn recv(&mut self) -> StrResult<ReceivedPacket<T>> {\n\n let mut bytes = match &mut self.receiver {\n", "file_path": "alvr/common/src/sockets/stream_socket/mod.rs", "rank": 46, "score": 185624.9745335635 }, { "content": "#[derive(FromMeta)]\n\nenum HigherOrderType {\n\n Choice {\n\n default: String,\n\n\n\n #[darling(multiple)]\n\n #[darling(rename = \"variant\")]\n\n variants: Vec<String>,\n\n\n\n #[darling(default)]\n\n gui: Option<ChoiceControlType>,\n\n },\n\n Boolean {\n\n default: bool,\n\n },\n\n Action,\n\n}\n\n\n\n#[derive(FromMeta)]\n\npub struct HigherOrderSetting {\n\n name: String,\n", "file_path": "alvr/settings-schema-derive/src/higher_order.rs", "rank": 47, "score": 185543.6528593625 }, { "content": "#[function_component(Array)]\n\npub fn array(props: &SettingProps<Vec<SchemaNode>, Vec<json::Value>>) -> Html {\n\n html!(\"array\")\n\n}\n", "file_path": "alvr/dashboard/src/components/settings_controls/array.rs", "rank": 48, "score": 184006.618422539 }, { "content": "pub fn run_as_shell_in(workdir: &Path, shell: &str, shell_flag: &str, cmd: &str) -> BResult {\n\n println!(\"\\n> {}\", cmd);\n\n\n\n let output = Command::new(shell)\n\n .args(&[shell_flag, cmd])\n\n .stdout(Stdio::inherit())\n\n .current_dir(workdir)\n\n .spawn()?\n\n .wait_with_output()?;\n\n\n\n if output.status.success() {\n\n Ok(())\n\n } else {\n\n Err(format!(\n\n \"Command failed: {}\",\n\n String::from_utf8_lossy(&output.stderr)\n\n )\n\n .into())\n\n }\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 49, "score": 183100.5125872053 }, { "content": "fn get_single_openvr_path(path_type: &str) -> StrResult<PathBuf> {\n\n let openvr_paths_json = load_openvr_paths_json()?;\n\n let paths_json = trace_none!(openvr_paths_json.get(path_type))?;\n\n trace_none!(from_openvr_paths(paths_json).get(0).cloned())\n\n}\n\n\n", "file_path": "alvr/common/src/commands.rs", "rank": 50, "score": 182839.00182381345 }, { "content": " enum class FormatFeatureFlagBits : VkFormatFeatureFlags\n\n {\n\n eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,\n\n eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,\n\n eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,\n\n eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,\n\n eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,\n\n eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,\n\n eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,\n\n eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,\n\n eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,\n\n eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,\n\n eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,\n\n eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,\n\n eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,\n\n eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,\n\n eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,\n\n eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,\n\n eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,\n\n eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 51, "score": 178743.81255998943 }, { "content": " /// token types for the parser\n\n enum class token_type\n\n {\n\n uninitialized, ///< indicating the scanner is uninitialized\n\n literal_true, ///< the `true` literal\n\n literal_false, ///< the `false` literal\n\n literal_null, ///< the `null` literal\n\n value_string, ///< a string -- use get_string() for actual value\n\n value_unsigned, ///< an unsigned integer -- use get_number_unsigned() for actual value\n\n value_integer, ///< a signed integer -- use get_number_integer() for actual value\n\n value_float, ///< an floating point number -- use get_number_float() for actual value\n\n begin_array, ///< the character for array begin `[`\n\n begin_object, ///< the character for object begin `{`\n\n end_array, ///< the character for array end `]`\n\n end_object, ///< the character for object end `}`\n\n name_separator, ///< the name separator `:`\n\n value_separator, ///< the value separator `,`\n\n parse_error, ///< indicating a parse error\n\n end_of_input, ///< indicating the end of the input buffer\n\n literal_or_value ///< a literal or the begin of a value (only for diagnostics)\n\n };\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 52, "score": 178738.25105723305 }, { "content": "fn netsh_delete_rule_command_string(rule_name: &str) -> String {\n\n format!(\n\n \"netsh advfirewall firewall delete rule name=\\\"{}\\\"\",\n\n rule_name,\n\n )\n\n}\n\n\n", "file_path": "alvr/common/src/commands.rs", "rank": 53, "score": 177930.11696419382 }, { "content": " enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags\n\n {\n\n eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,\n\n eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,\n\n eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case SparseImageFormatFlagBits::eSingleMiptail : return \"SingleMiptail\";\n\n case SparseImageFormatFlagBits::eAlignedMipSize : return \"AlignedMipSize\";\n\n case SparseImageFormatFlagBits::eNonstandardBlockSize : return \"NonstandardBlockSize\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 54, "score": 173713.46460209042 }, { "content": "pub fn get_registered_drivers() -> StrResult<Vec<PathBuf>> {\n\n Ok(from_openvr_paths(trace_none!(\n\n load_openvr_paths_json()?.get_mut(\"external_drivers\")\n\n )?))\n\n}\n\n\n", "file_path": "alvr/common/src/commands.rs", "rank": 55, "score": 173336.2343811227 }, { "content": "fn bump_cargo_version(crate_dir_name: &str, new_version: &Version) {\n\n let manifest_path = crate::workspace_dir()\n\n .join(\"alvr\")\n\n .join(crate_dir_name)\n\n .join(\"Cargo.toml\");\n\n\n\n let mut manifest: toml_edit::Document =\n\n fs::read_to_string(&manifest_path).unwrap().parse().unwrap();\n\n\n\n manifest[\"package\"][\"version\"] = toml_edit::value(new_version.to_string());\n\n\n\n fs::write(manifest_path, manifest.to_string_in_original_order()).unwrap();\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 56, "score": 169986.27742290753 }, { "content": " enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags\n\n {\n\n eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,\n\n ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,\n\n eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE,\n\n eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool : return \"UpdateAfterBindPool\";\n\n case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return \"PushDescriptorKHR\";\n\n case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE : return \"HostOnlyPoolVALVE\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 57, "score": 168919.2295186201 }, { "content": " enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags\n\n {\n\n eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,\n\n eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,\n\n eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,\n\n eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,\n\n eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,\n\n eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,\n\n eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,\n\n eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,\n\n#ifdef VK_USE_PLATFORM_ANDROID_KHR\n\n eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,\n\n#endif /*VK_USE_PLATFORM_ANDROID_KHR*/\n\n eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,\n\n eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT};\n\n using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )\n\n {\n\n switch ( value )\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 58, "score": 168488.67479845032 }, { "content": " enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags\n\n {\n\n eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,\n\n eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,\n\n eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,\n\n eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT};\n\n using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return \"OpaqueFd\";\n\n case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return \"OpaqueWin32\";\n\n case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return \"OpaqueWin32Kmt\";\n\n case ExternalFenceHandleTypeFlagBits::eSyncFd : return \"SyncFd\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 59, "score": 168488.67479845032 }, { "content": " enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags\n\n {\n\n eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,\n\n eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,\n\n eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,\n\n eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,\n\n eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,\n\n eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT};\n\n using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return \"OpaqueFd\";\n\n case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return \"OpaqueWin32\";\n\n case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return \"OpaqueWin32Kmt\";\n\n case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return \"D3D12Fence\";\n\n case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return \"SyncFd\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 60, "score": 168488.67479845032 }, { "content": " enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT\n\n {\n\n eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,\n\n eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,\n\n eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,\n\n eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )\n\n {\n\n switch ( value )\n\n {\n\n case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return \"Verbose\";\n\n case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return \"Info\";\n\n case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return \"Warning\";\n\n case DebugUtilsMessageSeverityFlagBitsEXT::eError : return \"Error\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 61, "score": 164594.00765032347 }, { "content": " enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT\n\n {};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT )\n\n {\n\n return \"(void)\";\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 62, "score": 164504.16095828536 }, { "content": " enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV\n\n {\n\n eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,\n\n eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,\n\n eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,\n\n eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )\n\n {\n\n switch ( value )\n\n {\n\n case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return \"OpaqueWin32\";\n\n case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return \"OpaqueWin32Kmt\";\n\n case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return \"D3D11Image\";\n\n case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return \"D3D11ImageKmt\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 63, "score": 164077.311300691 }, { "content": "struct has_non_default_from_json : std::false_type {};\n\n\n\ntemplate<typename BasicJsonType, typename T>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 64, "score": 163150.93628483472 }, { "content": "#ifdef VK_USE_PLATFORM_VI_NN\n\n enum class ViSurfaceCreateFlagBitsNN : VkFlags\n\n {};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )\n\n {\n\n return \"(void)\";\n\n }\n\n\n\n using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN )\n\n {\n\n\n\n return \"{}\";\n\n }\n\n#endif /*VK_USE_PLATFORM_VI_NN*/\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 65, "score": 162094.5078127737 }, { "content": "fn netsh_add_rule_command_string(rule_name: &str, program_path: &Path) -> String {\n\n format!(\n\n \"netsh advfirewall firewall add rule name=\\\"{}\\\" dir=in program=\\\"{}\\\" action=allow\",\n\n rule_name,\n\n program_path.to_string_lossy()\n\n )\n\n}\n\n\n", "file_path": "alvr/common/src/commands.rs", "rank": 66, "score": 160185.7216982675 }, { "content": " enum class IndirectCommandsTokenTypeNV\n\n {\n\n eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,\n\n eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,\n\n eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,\n\n eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,\n\n ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,\n\n eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,\n\n eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,\n\n eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value )\n\n {\n\n switch ( value )\n\n {\n\n case IndirectCommandsTokenTypeNV::eShaderGroup : return \"ShaderGroup\";\n\n case IndirectCommandsTokenTypeNV::eStateFlags : return \"StateFlags\";\n\n case IndirectCommandsTokenTypeNV::eIndexBuffer : return \"IndexBuffer\";\n\n case IndirectCommandsTokenTypeNV::eVertexBuffer : return \"VertexBuffer\";\n\n case IndirectCommandsTokenTypeNV::ePushConstant : return \"PushConstant\";\n\n case IndirectCommandsTokenTypeNV::eDrawIndexed : return \"DrawIndexed\";\n\n case IndirectCommandsTokenTypeNV::eDraw : return \"Draw\";\n\n case IndirectCommandsTokenTypeNV::eDrawTasks : return \"DrawTasks\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 67, "score": 157943.64924896715 }, { "content": "fn bubble_up<T: Serialize>(set_session: Callback<json::Value>) -> Callback<T> {\n\n Callback::from(move |session| {\n\n if let Some(json) = logging::show_err(json::to_value(session)) {\n\n set_session.emit(json)\n\n }\n\n })\n\n}\n\n\n", "file_path": "alvr/dashboard/src/components/settings_controls/mod.rs", "rank": 68, "score": 157014.1682871473 }, { "content": " enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags\n\n {};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )\n\n {\n\n return \"(void)\";\n\n }\n\n\n\n using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT>;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT )\n\n {\n\n\n\n return \"{}\";\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 69, "score": 156965.09231082044 }, { "content": "pub fn load_session(path: &Path) -> StrResult<SessionDesc> {\n\n trace_err!(json::from_str(&trace_err!(fs::read_to_string(path))?))\n\n}\n\n\n", "file_path": "alvr/common/src/data/session.rs", "rank": 70, "score": 153087.68313333436 }, { "content": "#[function_component(Vector)]\n\npub fn vector(props: &SettingProps<SchemaVector, VectorDefault<json::Value>>) -> Html {\n\n html!(\"vector\")\n\n}\n", "file_path": "alvr/dashboard/src/components/settings_controls/vector.rs", "rank": 71, "score": 151572.78542275738 }, { "content": "#[function_component(Dictionary)]\n\npub fn dictionary(props: &SettingProps<SchemaDictionary, DictionaryDefault<json::Value>>) -> Html {\n\n html!(\"dictionary\")\n\n}\n", "file_path": "alvr/dashboard/src/components/settings_controls/dictionary.rs", "rank": 72, "score": 151572.78542275738 }, { "content": "struct GUIInput {\n\n glm::vec3 headPosition = {};\n\n glm::vec3 controllersPosition[2] = {};\n\n glm::quat controllersRotation[2] = {};\n\n bool actionButtonsDown[2] = {}; // trigger, A or X; left (0) right (1) controller\n\n};\n\n\n", "file_path": "alvr/client/android/app/src/main/cpp/vr_gui.h", "rank": 73, "score": 150503.44321241323 }, { "content": " class FormatNotSupportedError : public SystemError\n\n {\n\n public:\n\n FormatNotSupportedError( std::string const& message )\n\n : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}\n\n FormatNotSupportedError( char const * message )\n\n : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}\n\n };\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 74, "score": 150174.38653314993 }, { "content": "pub fn load_session(path: &Path) -> StrResult<SessionDesc> {\n\n trace_err!(json::from_str(&trace_err!(fs::read_to_string(path))?))\n\n}\n\n\n", "file_path": "alvr/common/src/data/legacy_session.rs", "rank": 75, "score": 149869.18152108885 }, { "content": "#[cfg(windows)]\n\nfn set_mute_windows_device(device: &AudioDevice, mute: bool) -> StrResult {\n\n unsafe {\n\n let mm_device = get_windows_device(device)?;\n\n\n\n let mut endpoint_volume_ptr: *mut IAudioEndpointVolume = ptr::null_mut();\n\n let hr = mm_device.Activate(\n\n &IAudioEndpointVolume::uuidof(),\n\n CLSCTX_ALL,\n\n ptr::null_mut(),\n\n &mut endpoint_volume_ptr as *mut _ as _,\n\n );\n\n if FAILED(hr) {\n\n return fmt_e!(\n\n \"IMMDevice::Activate() for IAudioEndpointVolume failed: hr = 0x{:08x}\",\n\n hr,\n\n );\n\n }\n\n let endpoint_volume = ComPtr::from_raw(endpoint_volume_ptr);\n\n\n\n let hr = endpoint_volume.SetMute(mute as _, ptr::null_mut());\n\n if FAILED(hr) {\n\n return fmt_e!(\"Failed to mute audio device: hr = 0x{:08x}\", hr,);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/common/src/audio.rs", "rank": 76, "score": 149862.41472401336 }, { "content": "#[function_component(SwitchControl)]\n\npub fn switch_control(props: &SettingProps<SchemaSwitch, SwitchDefault<json::Value>>) -> Html {\n\n html!(\"switch control\")\n\n}\n\n\n", "file_path": "alvr/dashboard/src/components/settings_controls/switch.rs", "rank": 77, "score": 149002.76142439147 }, { "content": " enum class Format\n\n {\n\n eUndefined = VK_FORMAT_UNDEFINED,\n\n eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,\n\n eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,\n\n eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,\n\n eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,\n\n eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,\n\n eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,\n\n eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,\n\n eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,\n\n eR8Unorm = VK_FORMAT_R8_UNORM,\n\n eR8Snorm = VK_FORMAT_R8_SNORM,\n\n eR8Uscaled = VK_FORMAT_R8_USCALED,\n\n eR8Sscaled = VK_FORMAT_R8_SSCALED,\n\n eR8Uint = VK_FORMAT_R8_UINT,\n\n eR8Sint = VK_FORMAT_R8_SINT,\n\n eR8Srgb = VK_FORMAT_R8_SRGB,\n\n eR8G8Unorm = VK_FORMAT_R8G8_UNORM,\n\n eR8G8Snorm = VK_FORMAT_R8G8_SNORM,\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 78, "score": 147535.97084010957 }, { "content": "fn main() {\n\n println!(\n\n \"{}\",\n\n serde_json::to_string_pretty(&ChoiceTest::schema(choice_test_default())).unwrap()\n\n );\n\n}\n", "file_path": "alvr/settings-schema/examples/example0.rs", "rank": 79, "score": 146587.70003224714 }, { "content": "fn set_property(\n\n device_index: vr::TrackedDeviceIndex_t,\n\n key: vr::ETrackedDeviceProperty,\n\n value: OpenvrPropValue,\n\n) -> vr::ETrackedPropertyError {\n\n unsafe {\n\n let container_handle = vr::vrTrackedDeviceToPropertyContainer(device_index);\n\n\n\n let res = match value {\n\n OpenvrPropValue::Bool(value) => vr::vrSetBoolProperty(container_handle, key, value),\n\n OpenvrPropValue::Float(value) => vr::vrSetFloatProperty(container_handle, key, value),\n\n OpenvrPropValue::Int32(value) => vr::vrSetInt32Property(container_handle, key, value),\n\n OpenvrPropValue::Uint64(value) => vr::vrSetUint64Property(container_handle, key, value),\n\n OpenvrPropValue::Vector3(value) => {\n\n vr::vrSetVec3Property(container_handle, key, &vr::HmdVector3_t { v: value })\n\n }\n\n OpenvrPropValue::Double(value) => vr::vrSetDoubleProperty(container_handle, key, value),\n\n OpenvrPropValue::String(value) => {\n\n // unwrap never fails\n\n let c_string = CString::new(value).unwrap();\n", "file_path": "alvr/server/src/openvr.rs", "rank": 80, "score": 146587.70003224714 }, { "content": "fn main() {\n\n let default = Test2Default {\n\n variant: Test2DefaultVariant::Hello3,\n\n Hello1: 3,\n\n Hello3: Test2Hello3Default {\n\n hello3_test: true,\n\n test1: Test1Default {\n\n test: false,\n\n float: 3.,\n\n },\n\n dict: DictionaryDefault {\n\n key: \"hello\".into(),\n\n value: Test3Default { hello: 0 },\n\n content: vec![(\"hello\".into(), Test3Default { hello: 1 })],\n\n },\n\n },\n\n };\n\n\n\n println!(\n\n \"default: {}\\n\",\n\n serde_json::to_string_pretty(&default).unwrap()\n\n );\n\n\n\n let schema = Test2::schema(default);\n\n\n\n println!(\"schema: {}\", serde_json::to_string_pretty(&schema).unwrap());\n\n}\n", "file_path": "alvr/settings-schema/examples/example1.rs", "rank": 81, "score": 146587.70003224714 }, { "content": "fn build_installer(wix_path: &str) {\n\n let wix_path = PathBuf::from(wix_path).join(\"bin\");\n\n let heat_cmd = wix_path.join(\"heat.exe\");\n\n let candle_cmd = wix_path.join(\"candle.exe\");\n\n let light_cmd = wix_path.join(\"light.exe\");\n\n\n\n let mut version = Version::parse(&version::version()).unwrap();\n\n // Clear away build and prerelease version specifiers, MSI can have only dot-separated numbers.\n\n version.pre.clear();\n\n version.build.clear();\n\n\n\n command::run_without_shell(\n\n &heat_cmd.to_string_lossy(),\n\n &[\n\n \"dir\",\n\n \"build\\\\alvr_server_windows\",\n\n \"-ag\",\n\n \"-sreg\",\n\n \"-srd\",\n\n \"-dr\",\n", "file_path": "alvr/xtask/src/main.rs", "rank": 82, "score": 146569.9904729687 }, { "content": "fn build_ffmpeg(target_os: &str) {\n\n if target_os == \"windows\" {\n\n bash(&format!(\n\n \"sudo apt update && sudo apt remove --auto-remove -y gcc && sudo apt install -y {}\",\n\n \"make mingw-w64 mingw-w64-tools binutils-mingw-w64 nasm\"\n\n ))\n\n .unwrap();\n\n };\n\n\n\n let mut temp_paths = vec![];\n\n\n\n let ffmpeg_path = download_and_extract(\n\n \"https://github.com/FFmpeg/FFmpeg/archive/n4.3.2.zip\",\n\n \"FFmpeg\",\n\n );\n\n temp_paths.push(ffmpeg_path.clone());\n\n let ffmpeg_path = ffmpeg_path.join(\"FFmpeg-n4.3.2\");\n\n\n\n // todo: add more video encoders: libkvazaar, OpenH264, libvpx, libx265\n\n // AV1 encoders are excluded because of lack of hardware accelerated decoding support\n", "file_path": "alvr/xtask/src/dependencies.rs", "rank": 83, "score": 146569.9904729687 }, { "content": "#[derive(SettingsSchema)]\n\nstruct Test1 {\n\n #[schema(higher_order(\n\n name = \"hello\",\n\n data(boolean(default = false)),\n\n modifier = r#\"{hello1.0.hello2} = {input} * {hello3}\"#,\n\n ))]\n\n #[schema(advanced)]\n\n test: bool,\n\n\n\n #[schema(min = 10., gui = \"up_down\")]\n\n float: f32,\n\n}\n\n\n", "file_path": "alvr/settings-schema/examples/example1.rs", "rank": 84, "score": 146495.39386311086 }, { "content": "#[derive(SettingsSchema)]\n\nstruct Test3 {\n\n hello: i32,\n\n}\n\n\n", "file_path": "alvr/settings-schema/examples/example1.rs", "rank": 85, "score": 146495.39386311086 }, { "content": "#[derive(SettingsSchema)]\n\n#[schema(gui = \"button_group\")]\n\nenum Test2 {\n\n Hello1(#[schema(advanced)] i32),\n\n Hello2,\n\n Hello3 {\n\n hello3_test: bool,\n\n test1: Test1,\n\n dict: Vec<(String, Test3)>,\n\n },\n\n}\n\n\n", "file_path": "alvr/settings-schema/examples/example1.rs", "rank": 86, "score": 146486.6161586669 }, { "content": "pub fn save_session(session_desc: &SessionDesc, path: &Path) -> StrResult {\n\n trace_err!(fs::write(\n\n path,\n\n trace_err!(json::to_string_pretty(session_desc))?\n\n ))\n\n}\n\n\n\n// This structure is used to store the minimum configuration data that ALVR driver needs to\n\n// initialize OpenVR before having the chance to communicate with a client. When a client is\n\n// connected, a new OpenvrConfig instance is generated, then the connection is accepted only if that\n\n// instance is equivalent to the one stored in the session, otherwise SteamVR is restarted.\n\n// Other components (like the encoder, audio recorder) don't need this treatment and are initialized\n\n// dynamically.\n\n// todo: properties that can be set after the OpenVR initialization should be removed and set with\n\n// UpdateForStream.\n\n#[derive(Serialize, Deserialize, Clone, PartialEq, Default)]\n\npub struct OpenvrConfig {\n\n pub universe_id: u64,\n\n pub headset_serial_number: String,\n\n pub headset_tracking_system_name: String,\n", "file_path": "alvr/common/src/data/session.rs", "rank": 87, "score": 145409.49632166812 }, { "content": "fn get_variant(entries: &HashMap<String, json::Value>) -> String {\n\n entries[\"variant\"].as_str().unwrap().to_owned()\n\n}\n\n\n", "file_path": "alvr/dashboard/src/components/settings_controls/choice.rs", "rank": 88, "score": 145391.72810406837 }, { "content": "fn set_custom_properties(\n\n device_index: vr::TrackedDeviceIndex_t,\n\n properties: Vec<(String, OpenvrPropValue)>,\n\n) -> StrResult {\n\n for (name, value) in properties {\n\n let key = vr::tracked_device_property_name_to_key(&name)?;\n\n\n\n let res = set_property(device_index, key, value);\n\n if res != vr::TrackedProp_Success {\n\n return fmt_e!(\"Failed to set OpenVR property {} with code={}\", name, res);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/server/src/openvr.rs", "rank": 89, "score": 143348.00896759442 }, { "content": "struct FfmpegEncoder {\n\n encoder: *mut ff::Encoder,\n\n}\n\n\n\nunsafe impl Send for FfmpegEncoder {}\n\n\n\nimpl FfmpegEncoder {\n\n pub fn new(\n\n resolution_width: u32,\n\n resolution_height: u32,\n\n fps: f32,\n\n encoder_desc: FfmpegVideoEncoderDesc,\n\n ) -> StrResult<Self> {\n\n todo!()\n\n }\n\n}\n\n\n\nimpl Drop for FfmpegEncoder {\n\n fn drop(&mut self) {}\n\n}\n", "file_path": "alvr/common/src/ffmpeg/mod.rs", "rank": 90, "score": 143346.2314371371 }, { "content": "enum EHDCPError\n\n{\n\n\tHDCPError_None = 0,\n\n\tHDCPError_LinkLost = 1,\n\n\tHDCPError_Tampered = 2,\n\n\tHDCPError_DeviceRevoked = 3,\n\n\tHDCPError_Unknown = 4\n\n};\n\n\n", "file_path": "alvr/server/cpp/openvr/headers/openvr.h", "rank": 91, "score": 143317.84269172975 }, { "content": "fn gui() -> impl Widget<View> {\n\n ViewSwitcher::new(\n\n |view: &View, _| view.clone(),\n\n |view, _, _| match view {\n\n View::RequirementsCheck { steamvr } => Box::new(\n\n Flex::row()\n\n .with_default_spacer()\n\n .with_flex_child(\n\n Flex::column()\n\n .cross_axis_alignment(CrossAxisAlignment::Start)\n\n .with_flex_spacer(1.0)\n\n .with_child(\n\n Label::new(steamvr.clone())\n\n .with_line_break_mode(LineBreaking::WordWrap),\n\n )\n\n .with_default_spacer()\n\n .with_flex_spacer(1.5),\n\n FlexParams::new(1.0, None),\n\n )\n\n .with_default_spacer(),\n", "file_path": "alvr/launcher/src/main.rs", "rank": 92, "score": 143289.3679271854 }, { "content": "#[allow(dead_code)]\n\n#[derive(SettingsSchema)]\n\nenum ChoiceTest {\n\n A,\n\n\n\n B(#[schema(min = -10, max = 10, step = 2, gui = \"slider\")] i32),\n\n\n\n C {\n\n #[schema(advanced)]\n\n text_c: String,\n\n },\n\n}\n\n\n", "file_path": "alvr/settings-schema/examples/example0.rs", "rank": 93, "score": 143241.87971718475 }, { "content": "pub fn run_without_shell(cmd: &str, args: &[&str]) -> BResult {\n\n println!(\n\n \"\\n> {}\",\n\n args.iter().fold(String::from(cmd), |s, arg| s + \" \" + arg)\n\n );\n\n let output = Command::new(cmd)\n\n .args(args)\n\n .stdout(Stdio::inherit())\n\n .spawn()?\n\n .wait_with_output()?;\n\n\n\n if output.status.success() {\n\n Ok(())\n\n } else {\n\n Err(format!(\n\n \"Command failed: {}\",\n\n String::from_utf8_lossy(&output.stderr)\n\n )\n\n .into())\n\n }\n\n}\n", "file_path": "alvr/xtask/src/command.rs", "rank": 94, "score": 142716.44809643133 }, { "content": "pub fn save_session(session_desc: &SessionDesc, path: &Path) -> StrResult {\n\n trace_err!(fs::write(\n\n path,\n\n trace_err!(json::to_string_pretty(session_desc))?\n\n ))\n\n}\n\n\n\n// This structure is used to store the minimum configuration data that ALVR driver needs to\n\n// initialize OpenVR before having the chance to communicate with a client. When a client is\n\n// connected, a new OpenvrConfig instance is generated, then the connection is accepted only if that\n\n// instance is equivalent to the one stored in the session, otherwise SteamVR is restarted.\n\n// Other components (like the encoder, audio recorder) don't need this treatment and are initialized\n\n// dynamically.\n\n// todo: properties that can be set after the OpenVR initialization should be removed and set with\n\n// UpdateForStream.\n\n#[derive(Serialize, Deserialize, PartialEq, Default)]\n\npub struct OpenvrConfig {\n\n pub universe_id: u64,\n\n pub headset_serial_number: String,\n\n pub headset_tracking_system_name: String,\n", "file_path": "alvr/common/src/data/legacy_session.rs", "rank": 95, "score": 142471.1694525132 } ]
Rust
src/lib.rs
wngr/libp2p-maybe-transport
395f519d8b9040c050c1441c8f97b36856a49fe1
#![allow(clippy::type_complexity)] use std::{fmt, marker::PhantomData, sync::Arc}; use futures::{ channel::mpsc, future::{self, BoxFuture}, stream::{self, BoxStream}, FutureExt, StreamExt, TryFutureExt, TryStreamExt, }; use libp2p::{ core::{either::EitherOutput, transport::ListenerEvent}, Multiaddr, Transport, TransportError, }; use parking_lot::Mutex; pub struct CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { base: TBase, outer: TOuter, construct_outer: fn(ProxyTransport<TBase>) -> TOuter, proxy: ProxyTransport<TBase>, try_upgrade: MaybeUpgrade<TBase>, map_base_addr_to_outer: fn(Multiaddr) -> Multiaddr, } impl<TBase, TOuter> CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { pub fn new( base: TBase, construct_outer: fn(ProxyTransport<TBase>) -> TOuter, try_upgrade: MaybeUpgrade<TBase>, map_base_addr_to_outer: fn(Multiaddr) -> Multiaddr, ) -> Self { let proxy = ProxyTransport::<TBase>::new(base.clone()); let mut proxy_clone = proxy.clone(); proxy_clone.pending = proxy.pending.clone(); let outer = construct_outer(proxy_clone); Self { base, proxy, outer, construct_outer, try_upgrade, map_base_addr_to_outer, } } } impl<TBase, TOuter> Clone for CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { fn clone(&self) -> Self { Self::new( self.base.clone(), self.construct_outer, self.try_upgrade, self.map_base_addr_to_outer, ) } } type MaybeUpgrade<TBase> = fn( <TBase as Transport>::Output, ) -> BoxFuture<'static, Result<<TBase as Transport>::Output, <TBase as Transport>::Output>>; #[derive(Debug, Copy, Clone)] pub enum CombinedError<Base, Outer> { UpgradedToOuterTransport, Base(Base), Outer(Outer), } impl<A, B> fmt::Display for CombinedError<A, B> where A: fmt::Display, B: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CombinedError::Base(a) => a.fmt(f), CombinedError::Outer(b) => b.fmt(f), CombinedError::UpgradedToOuterTransport => write!(f, "Upgraded to outer transport"), } } } impl<A, B> std::error::Error for CombinedError<A, B> where A: std::error::Error, B: std::error::Error, { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { CombinedError::Base(a) => a.source(), CombinedError::Outer(b) => b.source(), CombinedError::UpgradedToOuterTransport => None, } } } impl<TBase, TOuter> Transport for CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Listener: Send + 'static, TBase::ListenerUpgrade: Send + 'static, TBase::Error: Send + 'static, TBase::Output: Send + 'static, TBase::Dial: Send + 'static, TOuter: Transport, TOuter::Listener: Send + 'static, TOuter::ListenerUpgrade: Send + 'static, TOuter::Error: 'static, TOuter::Output: 'static, TOuter::Dial: Send + 'static, { type Output = EitherOutput<TBase::Output, TOuter::Output>; type Error = CombinedError<TBase::Error, TOuter::Error>; type Listener = BoxStream<'static, Result<ListenerEvent<Self::ListenerUpgrade, Self::Error>, Self::Error>>; type ListenerUpgrade = BoxFuture<'static, Result<Self::Output, Self::Error>>; type Dial = BoxFuture<'static, Result<Self::Output, Self::Error>>; fn listen_on( self, addr: libp2p::Multiaddr, ) -> Result<Self::Listener, libp2p::TransportError<Self::Error>> where Self: Sized, { let base_listener = self .base .listen_on(addr.clone()) .map_err(|e| e.map(CombinedError::Base))?; let (mut tx, rx) = mpsc::channel(256); let x = self.proxy.pending.lock().replace(rx); debug_assert!(x.is_none()); let outer_listener = self .outer .listen_on((self.map_base_addr_to_outer)(addr)) .map_err(|e| e.map(CombinedError::Outer))?; debug_assert!(self.proxy.pending.lock().is_none()); let upgrader = self.try_upgrade; let combined_listener = stream::select( base_listener .map_ok(move |ev| { let cloned = match &ev { ListenerEvent::NewAddress(a) => Some(ListenerEvent::NewAddress(a.clone())), ListenerEvent::AddressExpired(a) => { Some(ListenerEvent::AddressExpired(a.clone())) } ListenerEvent::Error(_) => None, ListenerEvent::Upgrade { .. } => None, }; if let Some(ev) = cloned { tx.start_send(ev).unwrap(); } let ev = match ev { ListenerEvent::Upgrade { upgrade, local_addr, remote_addr, } => { let local_addr_c = local_addr.clone(); let remote_addr_c = remote_addr.clone(); let mut tx_c = tx.clone(); let upgrade = async move { match upgrade.await { Ok(u) => { match upgrader(u).await { Ok(u) => { tx_c.start_send(ListenerEvent::Upgrade { upgrade: future::ok(u).boxed(), local_addr: local_addr_c, remote_addr: remote_addr_c, }) .expect("Out of sync with proxy"); Err(CombinedError::UpgradedToOuterTransport) } Err(u) => { Ok(EitherOutput::First(u)) } } } Err(e) => Err(CombinedError::Base(e)), } } .boxed(); ListenerEvent::Upgrade { local_addr, remote_addr, upgrade, } } ListenerEvent::NewAddress(a) => ListenerEvent::NewAddress(a), ListenerEvent::AddressExpired(a) => ListenerEvent::AddressExpired(a), ListenerEvent::Error(e) => ListenerEvent::Error(e), }; ev.map_err(CombinedError::Base) }) .map_err(CombinedError::Base) .boxed(), outer_listener .map_ok(|ev| { ev.map(|upgrade_fut| { upgrade_fut .map_ok(EitherOutput::Second) .map_err(CombinedError::Outer) .boxed() }) .map_err(CombinedError::Outer) }) .map_err(CombinedError::Outer) .boxed(), ) .boxed(); Ok(combined_listener) } fn dial( self, addr: libp2p::Multiaddr, ) -> Result<Self::Dial, libp2p::TransportError<Self::Error>> where Self: Sized, { let addr = match self.outer.dial(addr) { Ok(connec) => { return Ok(connec .map_ok(EitherOutput::Second) .map_err(CombinedError::Outer) .boxed()) } Err(TransportError::MultiaddrNotSupported(addr)) => addr, Err(TransportError::Other(err)) => { return Err(TransportError::Other(CombinedError::Outer(err))) } }; let addr = match self.base.dial(addr) { Ok(connec) => { return Ok(connec .map_ok(EitherOutput::First) .map_err(CombinedError::Base) .boxed()) } Err(TransportError::MultiaddrNotSupported(addr)) => addr, Err(TransportError::Other(err)) => { return Err(TransportError::Other(CombinedError::Base(err))) } }; Err(TransportError::MultiaddrNotSupported(addr)) } fn address_translation( &self, listen: &libp2p::Multiaddr, observed: &libp2p::Multiaddr, ) -> Option<libp2p::Multiaddr> { self.outer .address_translation(listen, observed) .or_else(|| self.base.address_translation(listen, observed)) } } pub struct ProxyTransport<TBase> where Self: Transport, { _marker: PhantomData<TBase>, pub(crate) pending: Arc< Mutex< Option< mpsc::Receiver< ListenerEvent<<Self as Transport>::ListenerUpgrade, <Self as Transport>::Error>, >, >, >, >, base: TBase, } impl<TBase> Clone for ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { fn clone(&self) -> Self { Self { _marker: Default::default(), pending: Default::default(), base: self.base.clone(), } } } impl<TBase> ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { fn new(base: TBase) -> Self { Self { pending: Default::default(), _marker: Default::default(), base, } } } impl<TBase> Transport for ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { type Output = TBase::Output; type Error = TBase::Error; type Listener = BoxStream<'static, Result<ListenerEvent<Self::ListenerUpgrade, Self::Error>, Self::Error>>; type ListenerUpgrade = BoxFuture<'static, Result<Self::Output, Self::Error>>; type Dial = TBase::Dial; fn listen_on( self, _addr: libp2p::Multiaddr, ) -> Result<Self::Listener, libp2p::TransportError<Self::Error>> where Self: Sized, { let listener = self .pending .lock() .take() .expect("Only called after successful base listen"); Ok(listener.map(Ok).boxed()) } fn dial( self, addr: libp2p::Multiaddr, ) -> Result<Self::Dial, libp2p::TransportError<Self::Error>> where Self: Sized, { self.base.dial(addr) } fn address_translation( &self, listen: &libp2p::Multiaddr, observed: &libp2p::Multiaddr, ) -> Option<libp2p::Multiaddr> { self.base.address_translation(listen, observed) } }
#![allow(clippy::type_complexity)] use std::{fmt, marker::PhantomData, sync::Arc}; use futures::{ channel::mpsc, future::{self, BoxFuture}, stream::{self, BoxStream}, FutureExt, StreamExt, TryFutureExt, TryStreamExt, }; use libp2p::{ core::{either::EitherOutput, transport::ListenerEvent}, Multiaddr, Transport, TransportError, }; use parking_lot::Mutex; pub struct CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { base: TBase, outer: TOuter, construct_outer: fn(ProxyTransport<TBase>) -> TOuter, proxy: ProxyTransport<TBase>, try_upgrade: MaybeUpgrade<TBase>, map_base_addr_to_outer: fn(Multiaddr) -> Multiaddr, } impl<TBase, TOuter> CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { pub fn new( base: TBase, construct_outer: fn(ProxyTransport<TBase>) -> TOuter, try_upgrade: MaybeUpgrade<TBase>, map_base_addr_to_outer: fn(Multiaddr) -> Multiaddr, ) -> Self { let proxy = ProxyTransport::<TBase>::new(base.clone()); let mut proxy_clone = proxy.clone(); proxy_clone.pending = proxy.pending.clone(); let outer = construct_outer(proxy_clone); Self { base, proxy, outer, construct_outer, try_upgrade, map_base_addr_to_outer, } } } impl<TBase, TOuter> Clone for CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Error: Send + 'static, TBase::Output: 'static, { fn clone(&self) -> Self { Self::new( self.base.clone(), self.construct_outer, self.try_upgrade, self.map_base_addr_to_outer, ) } } type MaybeUpgrade<TBase> = fn( <TBase as Transport>::Output, ) -> BoxFuture<'static, Result<<TBase as Transport>::Output, <TBase as Transport>::Output>>; #[derive(Debug, Copy, Clone)] pub enum CombinedError<Base, Outer> { UpgradedToOuterTransport, Base(Base), Outer(Outer), } impl<A, B> fmt::Display for CombinedError<A, B> where A: fmt::Display, B: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CombinedError::Base(a) => a.fmt(f), CombinedError::Outer(b) => b.fmt(f), CombinedError::UpgradedToOuterTransport => write!(f, "Upgraded to outer transport"), } } } impl<A, B> std::error::Error for CombinedError<A, B> where A: std::error::Error, B: std::error::Error, { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { CombinedError::Base(a) => a.source(), CombinedError::Outer(b) => b.source(), CombinedError::UpgradedToOuterTransport => None, } } } impl<TBase, TOuter> Transport for CombinedTransport<TBase, TOuter> where TBase: Transport + Clone, TBase::Listener: Send + 'static, TBase::ListenerUpgrade: Send + 'static, TBase::Error: Send + 'static, TBase::Output: Send + 'static, TBase::Dial: Send + 'static, TOuter: Transport, TOuter::Listener: Send + 'static, TOuter::ListenerUpgrade: Send + 'static, TOuter::Error: 'static, TOuter::Output: 'static, TOuter::Dial: Send + 'static, { type Output = EitherOutput<TBase::Output, TOuter::Output>; type Error = CombinedError<TBase::Error, TOuter::Error>; type Listener = BoxStream<'static, Result<ListenerEvent<Self::ListenerUpgrade, Self::Error>, Self::Error>>; type ListenerUpgrade = BoxFuture<'static, Result<Self::Output, Self::Error>>; type Dial = BoxFuture<'static, Result<Self::Output, Self::Error>>; fn listen_on( self, addr: libp2p::Multiaddr, ) -> Result<Self::Listener, libp2p::TransportError<Self::Error>> where Self: Sized, { let base_listener = self .base .listen_on(addr.clone()) .map_err(|e| e.map(CombinedError::Base))?; let (mut tx, rx) = mpsc::channel(256); let x = self.proxy.pending.lock().replace(rx); debug_assert!(x.is_none()); let outer_listener = self .outer .listen_on((self.map_base_addr_to_outer)(addr)) .map_err(|e| e.map(CombinedError::Outer))?; debug_assert!(self.proxy.pending.lock().is_none()); let upgrader = self.try_upgrade; let combined_listener = stream::select( base_listener .map_ok(move |ev| { let cloned =
; if let Some(ev) = cloned { tx.start_send(ev).unwrap(); } let ev = match ev { ListenerEvent::Upgrade { upgrade, local_addr, remote_addr, } => { let local_addr_c = local_addr.clone(); let remote_addr_c = remote_addr.clone(); let mut tx_c = tx.clone(); let upgrade = async move { match upgrade.await { Ok(u) => { match upgrader(u).await { Ok(u) => { tx_c.start_send(ListenerEvent::Upgrade { upgrade: future::ok(u).boxed(), local_addr: local_addr_c, remote_addr: remote_addr_c, }) .expect("Out of sync with proxy"); Err(CombinedError::UpgradedToOuterTransport) } Err(u) => { Ok(EitherOutput::First(u)) } } } Err(e) => Err(CombinedError::Base(e)), } } .boxed(); ListenerEvent::Upgrade { local_addr, remote_addr, upgrade, } } ListenerEvent::NewAddress(a) => ListenerEvent::NewAddress(a), ListenerEvent::AddressExpired(a) => ListenerEvent::AddressExpired(a), ListenerEvent::Error(e) => ListenerEvent::Error(e), }; ev.map_err(CombinedError::Base) }) .map_err(CombinedError::Base) .boxed(), outer_listener .map_ok(|ev| { ev.map(|upgrade_fut| { upgrade_fut .map_ok(EitherOutput::Second) .map_err(CombinedError::Outer) .boxed() }) .map_err(CombinedError::Outer) }) .map_err(CombinedError::Outer) .boxed(), ) .boxed(); Ok(combined_listener) } fn dial( self, addr: libp2p::Multiaddr, ) -> Result<Self::Dial, libp2p::TransportError<Self::Error>> where Self: Sized, { let addr = match self.outer.dial(addr) { Ok(connec) => { return Ok(connec .map_ok(EitherOutput::Second) .map_err(CombinedError::Outer) .boxed()) } Err(TransportError::MultiaddrNotSupported(addr)) => addr, Err(TransportError::Other(err)) => { return Err(TransportError::Other(CombinedError::Outer(err))) } }; let addr = match self.base.dial(addr) { Ok(connec) => { return Ok(connec .map_ok(EitherOutput::First) .map_err(CombinedError::Base) .boxed()) } Err(TransportError::MultiaddrNotSupported(addr)) => addr, Err(TransportError::Other(err)) => { return Err(TransportError::Other(CombinedError::Base(err))) } }; Err(TransportError::MultiaddrNotSupported(addr)) } fn address_translation( &self, listen: &libp2p::Multiaddr, observed: &libp2p::Multiaddr, ) -> Option<libp2p::Multiaddr> { self.outer .address_translation(listen, observed) .or_else(|| self.base.address_translation(listen, observed)) } } pub struct ProxyTransport<TBase> where Self: Transport, { _marker: PhantomData<TBase>, pub(crate) pending: Arc< Mutex< Option< mpsc::Receiver< ListenerEvent<<Self as Transport>::ListenerUpgrade, <Self as Transport>::Error>, >, >, >, >, base: TBase, } impl<TBase> Clone for ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { fn clone(&self) -> Self { Self { _marker: Default::default(), pending: Default::default(), base: self.base.clone(), } } } impl<TBase> ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { fn new(base: TBase) -> Self { Self { pending: Default::default(), _marker: Default::default(), base, } } } impl<TBase> Transport for ProxyTransport<TBase> where TBase: Transport + Clone, TBase::Output: 'static, TBase::Error: Send + 'static, { type Output = TBase::Output; type Error = TBase::Error; type Listener = BoxStream<'static, Result<ListenerEvent<Self::ListenerUpgrade, Self::Error>, Self::Error>>; type ListenerUpgrade = BoxFuture<'static, Result<Self::Output, Self::Error>>; type Dial = TBase::Dial; fn listen_on( self, _addr: libp2p::Multiaddr, ) -> Result<Self::Listener, libp2p::TransportError<Self::Error>> where Self: Sized, { let listener = self .pending .lock() .take() .expect("Only called after successful base listen"); Ok(listener.map(Ok).boxed()) } fn dial( self, addr: libp2p::Multiaddr, ) -> Result<Self::Dial, libp2p::TransportError<Self::Error>> where Self: Sized, { self.base.dial(addr) } fn address_translation( &self, listen: &libp2p::Multiaddr, observed: &libp2p::Multiaddr, ) -> Option<libp2p::Multiaddr> { self.base.address_translation(listen, observed) } }
match &ev { ListenerEvent::NewAddress(a) => Some(ListenerEvent::NewAddress(a.clone())), ListenerEvent::AddressExpired(a) => { Some(ListenerEvent::AddressExpired(a.clone())) } ListenerEvent::Error(_) => None, ListenerEvent::Upgrade { .. } => None, }
if_condition
[ { "content": "fn maybe_upgrade(r: TcpStream) -> BoxFuture<'static, Result<TcpStream, TcpStream>> {\n\n async move {\n\n let mut buffer = [0; 3];\n\n if r.0.peek(&mut buffer).await.is_ok() && buffer == *b\"GET\" {\n\n println!(\"It's probably HTTP\");\n\n Ok(r)\n\n } else {\n\n println!(\"It's probably not HTTP\");\n\n Err(r)\n\n }\n\n }\n\n .boxed()\n\n}\n\n\n", "file_path": "examples/tcp-websocket.rs", "rank": 1, "score": 56007.04531683649 }, { "content": "enum TransportKind {\n\n Tcp,\n\n Websocket,\n\n Combined,\n\n}\n\n\n", "file_path": "examples/tcp-websocket.rs", "rank": 2, "score": 45054.154528948784 }, { "content": "fn mk_transport(kind: TransportKind) -> (PeerId, Boxed<(PeerId, StreamMuxerBox)>) {\n\n let tcp = TokioTcpConfig::new().nodelay(true);\n\n let base_transport = match kind {\n\n TransportKind::Tcp => EitherTransport::Left(EitherTransport::Left(tcp)),\n\n TransportKind::Websocket => {\n\n EitherTransport::Left(EitherTransport::Right(WsConfig::new(tcp)))\n\n }\n\n TransportKind::Combined => EitherTransport::Right(CombinedTransport::new(\n\n tcp,\n\n WsConfig::new,\n\n maybe_upgrade,\n\n |mut addr| {\n\n addr.push(Protocol::Ws(\"/\".into()));\n\n addr\n\n },\n\n )),\n\n };\n\n let id_keys = identity::Keypair::generate_ed25519();\n\n let local_peer_id = PeerId::from(id_keys.public());\n\n let noise_keys = noise::Keypair::<noise::X25519Spec>::new()\n", "file_path": "examples/tcp-websocket.rs", "rank": 3, "score": 31560.376026680897 }, { "content": "fn mk_swarm(kind: TransportKind) -> Swarm<ping::Ping> {\n\n let (peer_id, transport) = mk_transport(kind);\n\n let behaviour = ping::Ping::new(ping::PingConfig::new().with_keep_alive(true));\n\n let b = SwarmBuilder::new(transport, behaviour, peer_id).executor(Box::new(|f| {\n\n tokio::spawn(f);\n\n }));\n\n b.build()\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> anyhow::Result<()> {\n\n let mut combined_swarm = mk_swarm(TransportKind::Combined);\n\n let mut ws_swarm = mk_swarm(TransportKind::Websocket);\n\n let mut tcp_swarm = mk_swarm(TransportKind::Tcp);\n\n\n\n let (tx, mut rx) = mpsc::channel(2);\n\n\n\n tokio::spawn(async move {\n\n combined_swarm.listen_on(\"/ip4/127.0.0.1/tcp/0\".parse()?)?;\n\n\n", "file_path": "examples/tcp-websocket.rs", "rank": 4, "score": 30807.557740790766 }, { "content": "# libp2p-combined-transport\n\n\n\n[![License](https://img.shields.io/badge/license-MIT%2FApache--2.0-blue.svg)](https://github.com/wngr/libp2p-combined-transport)\n\n[![Cargo](https://img.shields.io/crates/v/libp2p-combined-transport.svg)](https://crates.io/crates/libp2p-combined-transport)\n\n[![Documentation](https://docs.rs/libp2p-combined-transport/badge.svg)](https://docs.rs/libp2p-combined-transport)\n\n\n\nLibp2p Transport combining two other transports. One of which is the\n\nbase transport (like TCP), and another one is a higher-level transport\n\n(like WebSocket). Similar to [`OrTransport`], this tries to dial first\n\nwith the outer connection, and if that fails, with the base one. The\n\nmain difference is that incoming connections can be accepted on either\n\none of them. For this to work, a switch must be provided when handling\n\nincoming connections. For example for TCP, this can be achieved with\n\nthe [`peek`] method on the underlying [`TcpStream`].\n\n[`ListenerEvent`]s from the base transport are cloned and routed to\n\nthe outer transport via the [`ProxyTransport`], with the exception of\n\nupgrades.\n\n\n\n[`peek`]: https://doc.rust-lang.org/std/net/struct.TcpStream.html#method.peek\n\n\n\n\n\nFor a usage example, have a look at the [TCP-Websocket example](https://github.com/wngr/libp2p-combined-transport/tree/master/examples/tcp-websocket.rs).\n", "file_path": "README.md", "rank": 5, "score": 22033.573337585207 }, { "content": "MIT License\n\n\n\nCopyright (c) 2019 Rüdiger Klaehn\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "LICENSE-MIT.md", "rank": 6, "score": 21248.69718445138 }, { "content": " stating that You changed the files; and\n\n\n\n (c) You must retain, in the Source form of any Derivative Works\n\n that You distribute, all copyright, patent, trademark, and\n\n attribution notices from the Source form of the Work,\n\n excluding those notices that do not pertain to any part of\n\n the Derivative Works; and\n\n\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n\n distribution, then any Derivative Works that You distribute must\n\n include a readable copy of the attribution notices contained\n\n within such NOTICE file, excluding those notices that do not\n\n pertain to any part of the Derivative Works, in at least one\n\n of the following places: within a NOTICE text file distributed\n\n as part of the Derivative Works; within the Source form or\n\n documentation, if provided along with the Derivative Works; or,\n\n within a display generated by the Derivative Works, if and\n\n wherever such third-party notices normally appear. The contents\n\n of the NOTICE file are for informational purposes only and\n\n do not modify the License. You may add Your own attribution\n\n notices within Derivative Works that You distribute, alongside\n\n or as an addendum to the NOTICE text from the Work, provided\n\n that such additional attribution notices cannot be construed\n\n as modifying the License.\n\n\n\n You may add Your own copyright statement to Your modifications and\n\n may provide additional or different license terms and conditions\n\n for use, reproduction, or distribution of Your modifications, or\n\n for any such Derivative Works as a whole, provided Your use,\n\n reproduction, and distribution of the Work otherwise complies with\n", "file_path": "LICENSE-APACHE2.md", "rank": 7, "score": 21246.973824882814 }, { "content": " Apache License\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n 1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "LICENSE-APACHE2.md", "rank": 8, "score": 21246.920849178307 }, { "content": " APPENDIX: How to apply the Apache License to your work.\n\n\n\n To apply the Apache License to your work, attach the following\n\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n\n replaced with your own identifying information. (Don't include\n\n the brackets!) The text should be enclosed in the appropriate\n\n comment syntax for the file format. We also recommend that a\n\n file or class name and description of purpose be included on the\n\n same \"printed page\" as the copyright notice for easier\n\n identification within third-party archives.\n\n\n\n Copyright [yyyy] [name of copyright owner]\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n", "file_path": "LICENSE-APACHE2.md", "rank": 9, "score": 21246.908780756534 }, { "content": " subsequently incorporated within the Work.\n\n\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n copyright license to reproduce, prepare Derivative Works of,\n\n publicly display, publicly perform, sublicense, and distribute the\n\n Work and such Derivative Works in Source or Object form.\n\n\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n (except as stated in this section) patent license to make, have made,\n\n use, offer to sell, sell, import, and otherwise transfer the Work,\n\n where such license applies only to those patent claims licensable\n\n by such Contributor that are necessarily infringed by their\n\n Contribution(s) alone or by combination of their Contribution(s)\n\n with the Work to which such Contribution(s) was submitted. If You\n\n institute patent litigation against any entity (including a\n\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n\n or a Contribution incorporated within the Work constitutes direct\n\n or contributory patent infringement, then any patent licenses\n\n granted to You under this License for that Work shall terminate\n\n as of the date such litigation is filed.\n\n\n\n 4. Redistribution. You may reproduce and distribute copies of the\n\n Work or Derivative Works thereof in any medium, with or without\n\n modifications, and in Source or Object form, provided that You\n\n meet the following conditions:\n\n\n\n (a) You must give any other recipients of the Work or\n\n Derivative Works a copy of this License; and\n\n\n\n (b) You must cause any modified files to carry prominent notices\n", "file_path": "LICENSE-APACHE2.md", "rank": 10, "score": 21246.846036137347 }, { "content": " the conditions stated in this License.\n\n\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n\n any Contribution intentionally submitted for inclusion in the Work\n\n by You to the Licensor shall be under the terms and conditions of\n\n this License, without any additional terms or conditions.\n\n Notwithstanding the above, nothing herein shall supersede or modify\n\n the terms of any separate license agreement you may have executed\n\n with Licensor regarding such Contributions.\n\n\n\n 6. Trademarks. This License does not grant permission to use the trade\n\n names, trademarks, service marks, or product names of the Licensor,\n\n except as required for reasonable and customary use in describing the\n\n origin of the Work and reproducing the content of the NOTICE file.\n\n\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n\n agreed to in writing, Licensor provides the Work (and each\n\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n\n implied, including, without limitation, any warranties or conditions\n\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n\n PARTICULAR PURPOSE. You are solely responsible for determining the\n\n appropriateness of using or redistributing the Work and assume any\n", "file_path": "LICENSE-APACHE2.md", "rank": 11, "score": 21246.65350669249 }, { "content": " risks associated with Your exercise of permissions under this License.\n\n\n\n 8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n\n has been advised of the possibility of such damages.\n\n\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\n END OF TERMS AND CONDITIONS\n\n\n", "file_path": "LICENSE-APACHE2.md", "rank": 12, "score": 21246.265154482702 }, { "content": " (an example is provided in the Appendix below).\n\n\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n\n form, that is based on (or derived from) the Work and for which the\n\n editorial revisions, annotations, elaborations, or other modifications\n\n represent, as a whole, an original work of authorship. For the purposes\n\n of this License, Derivative Works shall not include works that remain\n\n separable from, or merely link (or bind by name) to the interfaces of,\n\n the Work and Derivative Works thereof.\n\n\n\n \"Contribution\" shall mean any work of authorship, including\n\n the original version of the Work and any modifications or additions\n\n to that Work or Derivative Works thereof, that is intentionally\n\n submitted to Licensor for inclusion in the Work by the copyright owner\n\n or by an individual or Legal Entity authorized to submit on behalf of\n\n the copyright owner. For the purposes of this definition, \"submitted\"\n\n means any form of electronic, verbal, or written communication sent\n\n to the Licensor or its representatives, including but not limited to\n\n communication on electronic mailing lists, source code control systems,\n\n and issue tracking systems that are managed by, or on behalf of, the\n\n Licensor for the purpose of discussing and improving the Work, but\n\n excluding communication that is conspicuously marked or otherwise\n\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n\n on behalf of whom a Contribution has been received by Licensor and\n", "file_path": "LICENSE-APACHE2.md", "rank": 13, "score": 21245.53222434103 }, { "content": " while let Some(ev) = combined_swarm.next().await {\n\n println!(\"Combined: {:?}\", ev);\n\n match ev {\n\n libp2p::swarm::SwarmEvent::Behaviour(e) => println!(\"Combined: {:?}\", e),\n\n libp2p::swarm::SwarmEvent::NewListenAddr { address, .. } => {\n\n println!(\"Combined: Bound to {}\", address);\n\n tx.send(address).await?;\n\n }\n\n libp2p::swarm::SwarmEvent::ConnectionClosed { .. } => panic_any(ev),\n\n _ => {}\n\n }\n\n }\n\n\n\n anyhow::Result::<_, anyhow::Error>::Ok(())\n\n });\n\n\n\n let mut zero_addr = loop {\n\n let addr = rx.recv().await.context(\":-(\")?;\n\n if matches!(addr.iter().last(), Some(Protocol::Tcp(_))) {\n\n break addr;\n", "file_path": "examples/tcp-websocket.rs", "rank": 28, "score": 17.14087179364896 }, { "content": "use std::panic::panic_any;\n\n\n\nuse anyhow::Context;\n\nuse futures::{future::BoxFuture, FutureExt, StreamExt};\n\nuse libp2p::{\n\n core::{\n\n either::EitherTransport,\n\n muxing::StreamMuxerBox,\n\n transport::{upgrade, Boxed},\n\n },\n\n identity, mplex,\n\n multiaddr::Protocol,\n\n noise, ping,\n\n swarm::SwarmBuilder,\n\n tcp::{tokio::TcpStream, TokioTcpConfig},\n\n websocket::WsConfig,\n\n PeerId, Swarm, Transport,\n\n};\n\nuse libp2p_combined_transport::CombinedTransport;\n\nuse tokio::sync::mpsc;\n\n\n", "file_path": "examples/tcp-websocket.rs", "rank": 36, "score": 11.332353364583092 }, { "content": " }\n\n };\n\n\n\n tcp_swarm.dial_addr(zero_addr.clone())?;\n\n\n\n tokio::spawn(async move {\n\n while let Some(ev) = tcp_swarm.next().await {\n\n match ev {\n\n libp2p::swarm::SwarmEvent::Behaviour(e) => println!(\"Tcp: {:?}\", e),\n\n libp2p::swarm::SwarmEvent::ConnectionEstablished {\n\n peer_id, endpoint, ..\n\n } => println!(\"Tcp: Connected to {} at {:?}\", peer_id, endpoint),\n\n libp2p::swarm::SwarmEvent::ConnectionClosed { .. } => panic_any(ev),\n\n _ => {}\n\n }\n\n }\n\n });\n\n zero_addr.push(Protocol::Ws(\"/\".into()));\n\n ws_swarm.dial_addr(zero_addr)?;\n\n\n", "file_path": "examples/tcp-websocket.rs", "rank": 37, "score": 11.00602127121776 }, { "content": " .into_authentic(&id_keys)\n\n .unwrap();\n\n\n\n let transport = base_transport\n\n .upgrade(upgrade::Version::V1)\n\n .authenticate(noise::NoiseConfig::xx(noise_keys).into_authenticated())\n\n .multiplex(mplex::MplexConfig::new())\n\n .boxed();\n\n (local_peer_id, transport)\n\n}\n\n\n", "file_path": "examples/tcp-websocket.rs", "rank": 38, "score": 7.309131605639605 }, { "content": " while let Some(ev) = ws_swarm.next().await {\n\n match ev {\n\n libp2p::swarm::SwarmEvent::Behaviour(e) => println!(\"Ws: {:?}\", e),\n\n libp2p::swarm::SwarmEvent::ConnectionEstablished {\n\n peer_id, endpoint, ..\n\n } => println!(\"Ws: Connected to {} at {:?}\", peer_id, endpoint),\n\n libp2p::swarm::SwarmEvent::ConnectionClosed { .. } => panic_any(ev),\n\n _ => {}\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/tcp-websocket.rs", "rank": 39, "score": 6.374542812412564 } ]
Rust
src/gfx.rs
1HPorange/maboy
e8dc60b776edc95d71c234d641aceb3ab0ce1bab
use super::hresult_error::*; use super::window::Window; use maboy::MemPixel; use std::marker::PhantomData; use std::mem::MaybeUninit; use std::pin::Pin; use std::ptr; use winapi::shared::dxgi::*; use winapi::shared::dxgiformat::*; use winapi::shared::minwindef::*; use winapi::shared::winerror::*; use winapi::shared::{dxgi1_2::*, dxgitype::*}; use winapi::um::d3d11::*; use winapi::um::d3dcommon::*; use winapi::um::unknwnbase::IUnknown; use winapi::Interface; use wio::com::ComPtr; pub struct GfxDevice { d: ComPtr<ID3D11Device>, dc: ComPtr<ID3D11DeviceContext>, dxgi_factory: ComPtr<IDXGIFactory2>, } impl GfxDevice { pub fn new() -> Result<GfxDevice, HResultError> { unsafe { let mut flags = D3D11_CREATE_DEVICE_SINGLETHREADED; if cfg!(debug_assertions) { flags |= D3D11_CREATE_DEVICE_DEBUG; } let mut d = ptr::null_mut(); let mut dc = ptr::null_mut(); D3D11CreateDevice( ptr::null_mut(), D3D_DRIVER_TYPE_HARDWARE, ptr::null_mut(), flags, ptr::null(), 0, D3D11_SDK_VERSION, &mut d, ptr::null_mut(), &mut dc, ) .into_result()?; let d = ComPtr::from_raw(d); let dc = ComPtr::from_raw(dc); let mut dxgi_device = ptr::null_mut(); d.QueryInterface(&IDXGIDevice2::uuidof(), &mut dxgi_device) .into_result()?; let dxgi_device = ComPtr::from_raw(dxgi_device as *mut IDXGIDevice2); let mut dxgi_adapter = ptr::null_mut(); dxgi_device.GetAdapter(&mut dxgi_adapter).into_result()?; let dxgi_adapter = ComPtr::from_raw(dxgi_adapter as *mut IDXGIAdapter2); let mut dxgi_factory = ptr::null_mut(); dxgi_adapter .GetParent(&IDXGIFactory2::uuidof(), &mut dxgi_factory) .into_result()?; let dxgi_factory = ComPtr::from_raw(dxgi_factory as *mut IDXGIFactory2); Ok(GfxDevice { d, dc, dxgi_factory, }) } } pub fn create_gfx_window<I: Into<Option<u32>>>( &self, window: &Pin<Box<Window>>, width: I, height: I, ) -> Result<GfxWindow, HResultError> { unsafe { let scd = DXGI_SWAP_CHAIN_DESC1 { Width: width.into().unwrap_or(0), Height: height.into().unwrap_or(0), Format: DXGI_FORMAT_R8G8B8A8_UNORM, Stereo: FALSE, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, Quality: 0, }, BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT, BufferCount: 2, Scaling: DXGI_SCALING_STRETCH, SwapEffect: DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL, AlphaMode: DXGI_ALPHA_MODE_UNSPECIFIED, Flags: DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING, }; let mut swap_chain = ptr::null_mut(); self.dxgi_factory .CreateSwapChainForHwnd( self.d.as_raw() as *mut IUnknown, window.hwnd(), &scd, ptr::null(), ptr::null_mut(), &mut swap_chain, ) .into_result()?; let swap_chain = ComPtr::from_raw(swap_chain); let mut backbuffer = ptr::null_mut(); swap_chain .GetBuffer(0, &ID3D11Texture2D::uuidof(), &mut backbuffer) .into_result()?; let backbuffer = ComPtr::from_raw(backbuffer as *mut ID3D11Texture2D); let mut backbuffer_desc: D3D11_TEXTURE2D_DESC = MaybeUninit::zeroed().assume_init(); backbuffer.GetDesc(&mut backbuffer_desc); let mut viewport: D3D11_VIEWPORT = MaybeUninit::zeroed().assume_init(); viewport.Height = backbuffer_desc.Height as f32; viewport.Width = backbuffer_desc.Width as f32; viewport.MinDepth = 0.0; viewport.MaxDepth = 1.0; let mut backbuffer_rtv = ptr::null_mut(); self.d .CreateRenderTargetView( backbuffer.as_raw() as *mut ID3D11Resource, ptr::null(), &mut backbuffer_rtv, ) .into_result()?; let backbuffer_rtv = ComPtr::from_raw(backbuffer_rtv); Ok(GfxWindow { device_context: self.dc.clone(), swap_chain, backbuffer, backbuffer_rtv, viewport, _window: PhantomData, }) } } } pub struct GfxWindow<'w> { device_context: ComPtr<ID3D11DeviceContext>, swap_chain: ComPtr<IDXGISwapChain1>, backbuffer: ComPtr<ID3D11Texture2D>, backbuffer_rtv: ComPtr<ID3D11RenderTargetView>, viewport: D3D11_VIEWPORT, _window: PhantomData<&'w ()>, } impl<'w> GfxWindow<'w> { pub fn next_frame(&mut self) -> GfxFrame<'_, 'w> { GfxFrame(self) } } pub struct GfxFrame<'a, 'w>(&'a mut GfxWindow<'w>); impl GfxFrame<'_, '_> { pub fn clear(&mut self, color: &[f32; 4]) { unsafe { self.0 .device_context .ClearRenderTargetView(self.0.backbuffer_rtv.as_raw(), color); } } pub fn copy_from_slice(&mut self, data: &[MemPixel]) { unsafe { assert_eq!( data.len(), self.0.viewport.Width as usize * self.0.viewport.Height as usize, "Slice does not have the exact number of pixels that the window backbuffer requires" ); self.0.device_context.UpdateSubresource( self.0.backbuffer.as_raw() as *mut ID3D11Resource, 0, ptr::null(), data as *const _ as *const std::ffi::c_void, self.0.viewport.Width as u32 * 4, 0, ); } } pub fn present(self, blocking: bool) -> Result<(), HResultError> { unsafe { let (sync_interval, flags) = if blocking { (1, 0) } else { (0, DXGI_PRESENT_ALLOW_TEARING) }; let result = self .0 .swap_chain .Present(sync_interval, flags) .into_result(); if matches!(result, Err(HResultError(DXGI_ERROR_WAS_STILL_DRAWING))) { return Ok(()); } else { result } } } }
use super::hresult_error::*; use super::window::Window; use maboy::MemPixel; use std::marker::PhantomData; use std::mem::MaybeUninit; use std::pin::Pin; use std::ptr; use winapi::shared::dxgi::*; use winapi::shared::dxgiformat::*; use winapi::shared::minwindef::*; use winapi::shared::winerror::*; use winapi::shared::{dxgi1_2::*, dxgitype::*}; use winapi::um::d3d11::*; use winapi::um::d3dcommon::*; use winapi::um::unknwnbase::IUnknown; use winapi::Interface; use wio::com::ComPtr; pub struct GfxDevice { d: ComPtr<ID3D11Device>, dc: ComPtr<ID3D11DeviceContext>, dxgi_factory: ComPtr<IDXGIFactory2>, } impl GfxDevice { pub fn new() -> Result<GfxDevice, HResultError> { unsafe { let mut flags = D3D11_CREATE_DEVICE_SINGLETHREADED; if cfg!(debug_assertions) { flags |= D3D11_CREATE_DEVICE_DEBUG; } let mut d = ptr::null_mut(); let mut dc = ptr::null_mut(); D3D11CreateDevice( ptr::null_mut(), D3D_DRIVER_TYPE_HARDWARE, ptr::null_mut(), flags, ptr::null(), 0, D3D11_SDK_VERSION, &mut d, ptr::null_mut(), &mut dc, ) .into_result()?; let d = ComPtr::from_raw(d); let d
ckbuffer_rtv, ) .into_result()?; let backbuffer_rtv = ComPtr::from_raw(backbuffer_rtv); Ok(GfxWindow { device_context: self.dc.clone(), swap_chain, backbuffer, backbuffer_rtv, viewport, _window: PhantomData, }) } } } pub struct GfxWindow<'w> { device_context: ComPtr<ID3D11DeviceContext>, swap_chain: ComPtr<IDXGISwapChain1>, backbuffer: ComPtr<ID3D11Texture2D>, backbuffer_rtv: ComPtr<ID3D11RenderTargetView>, viewport: D3D11_VIEWPORT, _window: PhantomData<&'w ()>, } impl<'w> GfxWindow<'w> { pub fn next_frame(&mut self) -> GfxFrame<'_, 'w> { GfxFrame(self) } } pub struct GfxFrame<'a, 'w>(&'a mut GfxWindow<'w>); impl GfxFrame<'_, '_> { pub fn clear(&mut self, color: &[f32; 4]) { unsafe { self.0 .device_context .ClearRenderTargetView(self.0.backbuffer_rtv.as_raw(), color); } } pub fn copy_from_slice(&mut self, data: &[MemPixel]) { unsafe { assert_eq!( data.len(), self.0.viewport.Width as usize * self.0.viewport.Height as usize, "Slice does not have the exact number of pixels that the window backbuffer requires" ); self.0.device_context.UpdateSubresource( self.0.backbuffer.as_raw() as *mut ID3D11Resource, 0, ptr::null(), data as *const _ as *const std::ffi::c_void, self.0.viewport.Width as u32 * 4, 0, ); } } pub fn present(self, blocking: bool) -> Result<(), HResultError> { unsafe { let (sync_interval, flags) = if blocking { (1, 0) } else { (0, DXGI_PRESENT_ALLOW_TEARING) }; let result = self .0 .swap_chain .Present(sync_interval, flags) .into_result(); if matches!(result, Err(HResultError(DXGI_ERROR_WAS_STILL_DRAWING))) { return Ok(()); } else { result } } } }
c = ComPtr::from_raw(dc); let mut dxgi_device = ptr::null_mut(); d.QueryInterface(&IDXGIDevice2::uuidof(), &mut dxgi_device) .into_result()?; let dxgi_device = ComPtr::from_raw(dxgi_device as *mut IDXGIDevice2); let mut dxgi_adapter = ptr::null_mut(); dxgi_device.GetAdapter(&mut dxgi_adapter).into_result()?; let dxgi_adapter = ComPtr::from_raw(dxgi_adapter as *mut IDXGIAdapter2); let mut dxgi_factory = ptr::null_mut(); dxgi_adapter .GetParent(&IDXGIFactory2::uuidof(), &mut dxgi_factory) .into_result()?; let dxgi_factory = ComPtr::from_raw(dxgi_factory as *mut IDXGIFactory2); Ok(GfxDevice { d, dc, dxgi_factory, }) } } pub fn create_gfx_window<I: Into<Option<u32>>>( &self, window: &Pin<Box<Window>>, width: I, height: I, ) -> Result<GfxWindow, HResultError> { unsafe { let scd = DXGI_SWAP_CHAIN_DESC1 { Width: width.into().unwrap_or(0), Height: height.into().unwrap_or(0), Format: DXGI_FORMAT_R8G8B8A8_UNORM, Stereo: FALSE, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, Quality: 0, }, BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT, BufferCount: 2, Scaling: DXGI_SCALING_STRETCH, SwapEffect: DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL, AlphaMode: DXGI_ALPHA_MODE_UNSPECIFIED, Flags: DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING, }; let mut swap_chain = ptr::null_mut(); self.dxgi_factory .CreateSwapChainForHwnd( self.d.as_raw() as *mut IUnknown, window.hwnd(), &scd, ptr::null(), ptr::null_mut(), &mut swap_chain, ) .into_result()?; let swap_chain = ComPtr::from_raw(swap_chain); let mut backbuffer = ptr::null_mut(); swap_chain .GetBuffer(0, &ID3D11Texture2D::uuidof(), &mut backbuffer) .into_result()?; let backbuffer = ComPtr::from_raw(backbuffer as *mut ID3D11Texture2D); let mut backbuffer_desc: D3D11_TEXTURE2D_DESC = MaybeUninit::zeroed().assume_init(); backbuffer.GetDesc(&mut backbuffer_desc); let mut viewport: D3D11_VIEWPORT = MaybeUninit::zeroed().assume_init(); viewport.Height = backbuffer_desc.Height as f32; viewport.Width = backbuffer_desc.Width as f32; viewport.MinDepth = 0.0; viewport.MaxDepth = 1.0; let mut backbuffer_rtv = ptr::null_mut(); self.d .CreateRenderTargetView( backbuffer.as_raw() as *mut ID3D11Resource, ptr::null(), &mut ba
random
[ { "content": "pub fn rlca(cpu: &mut CPU) {\n\n let old = cpu.reg.get_r8(R8::A);\n\n cpu.reg.set_r8(R8::A, old.rotate_left(1));\n\n\n\n cpu.reg.flags.remove(Flags::Z | Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(7));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 0, "score": 156105.69888522007 }, { "content": "pub fn rrca(cpu: &mut CPU) {\n\n let old = cpu.reg.get_r8(R8::A);\n\n\n\n cpu.reg.set_r8(R8::A, old.rotate_right(1));\n\n\n\n cpu.reg.flags.remove(Flags::Z | Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(0));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 1, "score": 156105.69888522004 }, { "content": "pub fn daa(cpu: &mut CPU) {\n\n // DAA is kind of infamous for having complicated behaviour\n\n // This is why I took the source code from https://forums.nesdev.com/viewtopic.php?t=15944\n\n\n\n let mut new = cpu.reg.get_r8(R8::A);\n\n\n\n // note: assumes a is a uint8_t and wraps from 0xff to 0\n\n if !cpu.reg.flags.contains(Flags::N) {\n\n // after an addition, adjust if (half-)carry occurred or if result is out of bounds\n\n if cpu.reg.flags.contains(Flags::C) || new > 0x99 {\n\n new = new.wrapping_add(0x60);\n\n cpu.reg.flags.insert(Flags::C);\n\n }\n\n if cpu.reg.flags.contains(Flags::H) || (new & 0x0f) > 0x09 {\n\n new = new.wrapping_add(0x6);\n\n }\n\n } else {\n\n // after a subtraction, only adjust if (half-)carry occurred\n\n if cpu.reg.flags.contains(Flags::C) {\n\n new = new.wrapping_sub(0x60);\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 2, "score": 156105.69888522007 }, { "content": "pub fn rra(cpu: &mut CPU) {\n\n let old = cpu.reg.get_r8(R8::A);\n\n let new = (old >> 1)\n\n + if cpu.reg.flags.contains(Flags::C) {\n\n 0b_1000_0000\n\n } else {\n\n 0\n\n };\n\n\n\n cpu.reg.set_r8(R8::A, new);\n\n\n\n cpu.reg.flags.remove(Flags::Z | Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(0));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 3, "score": 156105.69888522004 }, { "content": "pub fn cpl(cpu: &mut CPU) {\n\n cpu.reg.set_r8(R8::A, !cpu.reg.get_r8(R8::A));\n\n cpu.reg.flags.insert(Flags::N | Flags::H);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 4, "score": 156105.69888522007 }, { "content": "pub fn ccf(cpu: &mut CPU) {\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.toggle(Flags::C);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 5, "score": 156105.69888522007 }, { "content": "pub fn scf(cpu: &mut CPU) {\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.insert(Flags::C);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 6, "score": 156105.69888522004 }, { "content": "pub fn rla(cpu: &mut CPU) {\n\n let old = cpu.reg.get_r8(R8::A);\n\n let new = (old << 1)\n\n + if cpu.reg.flags.contains(Flags::C) {\n\n 1\n\n } else {\n\n 0\n\n };\n\n\n\n cpu.reg.set_r8(R8::A, new);\n\n\n\n cpu.reg.flags.remove(Flags::Z | Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(7));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 7, "score": 156105.69888522007 }, { "content": "pub fn pop_af<B: Board>(cpu: &mut CPU, board: &mut B) {\n\n cpu.reg.set_r16(R16::AF, board.read16(cpu.reg.sp));\n\n cpu.reg.sp = cpu.reg.sp.wrapping_add(2);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 8, "score": 145212.68164695456 }, { "content": "pub fn jp_hl<B: Board>(cpu: &mut CPU, board: &mut B) {\n\n cpu.reg.pc = cpu.reg.hl;\n\n\n\n board.push_cpu_evt(CpuEvt::TakeJmpTo(cpu.reg.hl));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 9, "score": 145212.68164695456 }, { "content": "pub fn ld_sp_hl<B: Board>(cpu: &mut CPU, board: &mut B) {\n\n cpu.reg.sp = cpu.reg.hl;\n\n board.advance_mcycle();\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 10, "score": 143304.63430093668 }, { "content": "pub fn add_sp_r8<B: Board>(cpu: &mut CPU, board: &mut B) {\n\n let offset = unsafe { std::mem::transmute::<u8, i8>(cpu.read8i(board)) } as i32;\n\n let old = cpu.reg.sp as i32;\n\n\n\n cpu.reg.sp = (old + offset) as u16;\n\n\n\n cpu.reg.flags.remove(Flags::Z | Flags::N);\n\n cpu.reg\n\n .flags\n\n .set(Flags::H, (old & 0xF) + (offset & 0xF) > 0xF);\n\n cpu.reg\n\n .flags\n\n .set(Flags::C, (old & 0xFF) + (offset & 0xFF) > 0xFF);\n\n\n\n board.advance_mcycle();\n\n board.advance_mcycle();\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 11, "score": 143304.63430093668 }, { "content": "pub fn ld_a16_sp<B: Board>(cpu: &mut CPU, board: &mut B) {\n\n let addr = cpu.read16i(board);\n\n board.write16(addr, cpu.reg.sp);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 12, "score": 143304.63430093668 }, { "content": "pub fn ld_hl_sp_r8<B: Board>(cpu: &mut CPU, board: &mut B) {\n\n let offset = unsafe { std::mem::transmute::<u8, i8>(cpu.read8i(board)) } as i32;\n\n let sp = cpu.reg.sp as i32;\n\n\n\n cpu.reg.hl = (sp + offset) as u16;\n\n\n\n cpu.reg.flags.remove(Flags::Z | Flags::N);\n\n cpu.reg\n\n .flags\n\n .set(Flags::H, (sp & 0xF) + (offset & 0xF) > 0xF);\n\n cpu.reg\n\n .flags\n\n .set(Flags::C, (sp & 0xFF) + (offset & 0xFF) > 0xFF);\n\n\n\n board.advance_mcycle();\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 13, "score": 141475.90969507914 }, { "content": "pub fn push<B: Board>(cpu: &mut CPU, board: &mut B, rr: R16) {\n\n cpu.reg.sp = cpu.reg.sp.wrapping_sub(2);\n\n board.advance_mcycle();\n\n board.write16(cpu.reg.sp, cpu.reg.get_r16(rr));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 14, "score": 139131.30445042718 }, { "content": "pub fn pop<B: Board>(cpu: &mut CPU, board: &mut B, rr: R16) {\n\n cpu.reg.set_r16(rr, board.read16(cpu.reg.sp));\n\n cpu.reg.sp = cpu.reg.sp.wrapping_add(2);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 15, "score": 139131.30445042718 }, { "content": "pub fn rst<B: Board>(cpu: &mut CPU, board: &mut B, target: u16) {\n\n push(cpu, board, R16::PC);\n\n cpu.reg.pc = target;\n\n\n\n board.push_cpu_evt(CpuEvt::TakeJmpTo(target));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 16, "score": 139131.30445042718 }, { "content": "pub fn ret_cond<B: Board>(cpu: &mut CPU, board: &mut B, cond: bool) {\n\n board.advance_mcycle();\n\n\n\n if cond {\n\n // This call already pushes the debug event, so no need for us to do that\n\n ret(cpu, board, false);\n\n } else {\n\n // It's really important that this is an *instant* read, since it's only a debug thingy\n\n board.push_cpu_evt(CpuEvt::SkipJmpTo(board.read16_instant(cpu.reg.sp)));\n\n }\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 17, "score": 137302.57984456967 }, { "content": "/// Due to timing differences, this function CANNOT be expressed as ret_cond(..., true)!!!\n\npub fn ret<B: Board>(cpu: &mut CPU, board: &mut B, enable_ime: bool) {\n\n pop(cpu, board, R16::PC);\n\n\n\n if enable_ime {\n\n cpu.set_ime(board, true);\n\n }\n\n\n\n board.push_cpu_evt(CpuEvt::TakeJmpTo(cpu.reg.pc));\n\n\n\n board.advance_mcycle();\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 18, "score": 137302.57984456964 }, { "content": "pub fn jr_cond<B: Board>(cpu: &mut CPU, board: &mut B, cond: bool) {\n\n let offset = cpu.read8i(board) as i8;\n\n\n\n if cond {\n\n cpu.reg.pc = cpu.reg.pc.wrapping_add(offset as u16);\n\n\n\n board.push_cpu_evt(CpuEvt::TakeJmpTo(cpu.reg.pc));\n\n\n\n board.advance_mcycle();\n\n } else {\n\n board.push_cpu_evt(CpuEvt::SkipJmpTo(cpu.reg.pc.wrapping_add(offset as u16)));\n\n }\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 19, "score": 137302.57984456967 }, { "content": "pub fn jp_cond<B: Board>(cpu: &mut CPU, board: &mut B, cond: bool) {\n\n let target = cpu.read16i(board);\n\n\n\n if cond {\n\n cpu.reg.pc = target;\n\n\n\n board.push_cpu_evt(CpuEvt::TakeJmpTo(target));\n\n\n\n board.advance_mcycle();\n\n } else {\n\n board.push_cpu_evt(CpuEvt::SkipJmpTo(target));\n\n }\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 20, "score": 137302.57984456964 }, { "content": "pub fn inc_rr<B: Board>(cpu: &mut CPU, board: &mut B, rr: R16) {\n\n cpu.reg.set_r16(rr, cpu.reg.get_r16(rr).wrapping_add(1));\n\n board.advance_mcycle();\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 21, "score": 137302.57984456967 }, { "content": "pub fn dec_rr<B: Board>(cpu: &mut CPU, board: &mut B, rr: R16) {\n\n cpu.reg.set_r16(rr, cpu.reg.get_r16(rr).wrapping_sub(1));\n\n board.advance_mcycle();\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 22, "score": 137302.57984456964 }, { "content": "pub fn call_cond<B: Board>(cpu: &mut CPU, board: &mut B, cond: bool) {\n\n let target = cpu.read16i(board);\n\n\n\n if cond {\n\n push(cpu, board, R16::PC);\n\n cpu.reg.pc = target;\n\n\n\n board.push_cpu_evt(CpuEvt::TakeJmpTo(target));\n\n } else {\n\n board.push_cpu_evt(CpuEvt::SkipJmpTo(target));\n\n }\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 23, "score": 137302.57984456964 }, { "content": "pub fn ld_rr_d16<B: Board>(cpu: &mut CPU, board: &mut B, rr: R16) {\n\n let d16 = cpu.read16i(board);\n\n cpu.reg.set_r16(rr, d16);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 24, "score": 135548.21795437456 }, { "content": "pub fn add_hl_rr<B: Board>(cpu: &mut CPU, board: &mut B, rr: R16) {\n\n let old = cpu.reg.hl;\n\n let addend = cpu.reg.get_r16(rr);\n\n\n\n let (new, carry) = old.overflowing_add(addend);\n\n\n\n cpu.reg.hl = new;\n\n\n\n cpu.reg.flags.remove(Flags::N);\n\n cpu.reg\n\n .flags\n\n .set(Flags::H, (old & 0x0FFF) + (addend & 0x0FFF) > 0x0FFF);\n\n cpu.reg.flags.set(Flags::C, carry);\n\n\n\n board.advance_mcycle();\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 25, "score": 135548.21795437456 }, { "content": "pub fn sub8<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, src: S) {\n\n let a_sub_src = cp8(cpu, board, src);\n\n cpu.reg.set_r8(R8::A, a_sub_src);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 26, "score": 131997.11788932118 }, { "content": "pub fn adc8<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, src: S) {\n\n let old = cpu.reg.get_r8(R8::A) as u16;\n\n let addend = src.read(cpu, board) as u16;\n\n let carry_val = if cpu.reg.flags.contains(Flags::C) {\n\n 1\n\n } else {\n\n 0\n\n };\n\n let sum = old + addend + carry_val;\n\n let new = (sum & 0xff) as u8;\n\n\n\n cpu.reg.set_r8(R8::A, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg\n\n .flags\n\n .set(Flags::H, (old & 0x0f) + (addend & 0x0f) + carry_val > 0x0f);\n\n cpu.reg.flags.set(Flags::C, sum > 0xff);\n\n cpu.reg.flags.remove(Flags::N);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 27, "score": 131997.11788932118 }, { "content": "pub fn add8<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, src: S) {\n\n let old = cpu.reg.get_r8(R8::A);\n\n let addend = src.read(cpu, board);\n\n let (new, carry) = old.overflowing_add(addend);\n\n\n\n cpu.reg.set_r8(R8::A, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N);\n\n cpu.reg\n\n .flags\n\n .set(Flags::H, (old & 0x0f) + (addend & 0x0f) > 0x0f);\n\n cpu.reg.flags.set(Flags::C, carry);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 28, "score": 131997.11788932118 }, { "content": "pub fn xor8<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, src: S) {\n\n let new = cpu.reg.get_r8(R8::A) ^ src.read(cpu, board);\n\n\n\n cpu.reg.set_r8(R8::A, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H | Flags::C);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 29, "score": 131997.11788932118 }, { "content": "pub fn sbc8<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, src: S) {\n\n // The bit magic gets a bit easier when we convert stuff to i16\n\n let old = cpu.reg.get_r8(R8::A) as i16;\n\n let subtrahend = src.read(cpu, board) as i16;\n\n let carry_val = if cpu.reg.flags.contains(Flags::C) {\n\n 1\n\n } else {\n\n 0\n\n };\n\n\n\n let new = old - subtrahend - carry_val;\n\n\n\n cpu.reg.set_r8(R8::A, new as u8);\n\n\n\n cpu.reg.flags.set(Flags::Z, new & 0xff == 0);\n\n cpu.reg.flags.insert(Flags::N);\n\n cpu.reg\n\n .flags\n\n .set(Flags::H, (old & 0xf) < (subtrahend & 0xf) + carry_val);\n\n cpu.reg.flags.set(Flags::C, new < 0);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 30, "score": 131997.11788932118 }, { "content": "pub fn and8<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, src: S) {\n\n let new = cpu.reg.get_r8(R8::A) & src.read(cpu, board);\n\n\n\n cpu.reg.set_r8(R8::A, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::C);\n\n cpu.reg.flags.insert(Flags::H);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 31, "score": 131997.11788932118 }, { "content": "pub fn or8<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, src: S) {\n\n let new = cpu.reg.get_r8(R8::A) | src.read(cpu, board);\n\n\n\n cpu.reg.set_r8(R8::A, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H | Flags::C);\n\n}\n\n\n\n// CB prefixed Instructions\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 32, "score": 131997.11788932118 }, { "content": "// Returns a - src, useful for implementing sub8\n\npub fn cp8<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, src: S) -> u8 {\n\n let old = cpu.reg.get_r8(R8::A);\n\n let subtrahend = src.read(cpu, board);\n\n\n\n let (new, carry) = old.overflowing_sub(subtrahend);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.insert(Flags::N);\n\n cpu.reg\n\n .flags\n\n .set(Flags::H, (old & 0x0f) < (subtrahend & 0x0f));\n\n cpu.reg.flags.set(Flags::C, carry);\n\n\n\n new\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 33, "score": 128732.0200427331 }, { "content": "pub fn inc8<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n let new = old.wrapping_add(1);\n\n\n\n target.write(cpu, board, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N);\n\n cpu.reg.flags.set(Flags::H, (old & 0x0f) == 0x0f);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 34, "score": 125633.63601650478 }, { "content": "pub fn rl<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n let new = (old << 1)\n\n + if cpu.reg.flags.contains(Flags::C) {\n\n 1\n\n } else {\n\n 0\n\n };\n\n\n\n target.write(cpu, board, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(7));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 35, "score": 125633.63601650478 }, { "content": "pub fn rlc<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n target.write(cpu, board, old.rotate_left(1));\n\n\n\n cpu.reg.flags.set(Flags::Z, old == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(7));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 36, "score": 125633.63601650478 }, { "content": "pub fn rr<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n let new = (old >> 1)\n\n + if cpu.reg.flags.contains(Flags::C) {\n\n 0b_1000_0000\n\n } else {\n\n 0\n\n };\n\n\n\n target.write(cpu, board, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(0));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 37, "score": 125633.63601650475 }, { "content": "pub fn srl<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n let new = old >> 1;\n\n\n\n target.write(cpu, board, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(0));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 38, "score": 125633.63601650475 }, { "content": "pub fn bit<B: Board, S: Src8>(cpu: &mut CPU, board: &mut B, bit: u8, src: S) {\n\n let bit_set = src.read(cpu, board).bit(bit);\n\n cpu.reg.flags.set(Flags::Z, !bit_set);\n\n cpu.reg.flags.remove(Flags::N);\n\n cpu.reg.flags.insert(Flags::H);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 39, "score": 125633.63601650478 }, { "content": "pub fn swap<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n target.write(cpu, board, (old >> 4) + (old << 4));\n\n\n\n cpu.reg.flags.set(Flags::Z, old == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H | Flags::C);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 40, "score": 125633.63601650478 }, { "content": "pub fn dec8<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n let new = old.wrapping_sub(1);\n\n\n\n target.write(cpu, board, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.insert(Flags::N);\n\n cpu.reg.flags.set(Flags::H, (new & 0x0f) == 0x0f);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 41, "score": 125633.63601650475 }, { "content": "pub fn rrc<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n target.write(cpu, board, old.rotate_right(1));\n\n\n\n cpu.reg.flags.set(Flags::Z, old == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(0))\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 42, "score": 125633.63601650475 }, { "content": "pub fn sla<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n let new = old << 1;\n\n\n\n target.write(cpu, board, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(7));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 43, "score": 125633.63601650478 }, { "content": "pub fn sra<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, target: T) {\n\n let old = target.read(cpu, board);\n\n let new = (old >> 1) | (old & 0b_1000_0000);\n\n\n\n target.write(cpu, board, new);\n\n\n\n cpu.reg.flags.set(Flags::Z, new == 0);\n\n cpu.reg.flags.remove(Flags::N | Flags::H);\n\n cpu.reg.flags.set(Flags::C, old.bit(0));\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 44, "score": 125633.63601650475 }, { "content": "pub fn ld8<B: Board, D: Dst8, S: Src8>(cpu: &mut CPU, board: &mut B, dst: D, src: S) {\n\n let val = src.read(cpu, board);\n\n dst.write(cpu, board, val);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 45, "score": 119912.33095191995 }, { "content": "pub fn set<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, bit: u8, target: T) {\n\n let new = target.read(cpu, board).set_bit(bit);\n\n target.write(cpu, board, new);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 46, "score": 119912.33095191995 }, { "content": "pub fn res<B: Board, T: Src8 + Dst8 + Copy>(cpu: &mut CPU, board: &mut B, bit: u8, target: T) {\n\n let new = target.read(cpu, board).reset_bit(bit);\n\n target.write(cpu, board, new);\n\n}\n\n\n", "file_path": "maboy/src/cpu/execute.rs", "rank": 47, "score": 119912.33095191995 }, { "content": "fn load_savegame<C: Savegame>(rom_path: &mut PathBuf, cartridge: &mut C) {\n\n use std::fs::File;\n\n use std::io::Read;\n\n\n\n if let Some(cram) = cartridge.savegame_mut() {\n\n rom_path.set_extension(\"sav\");\n\n\n\n // If it exists, we read it into the cartridge RAM\n\n if let Ok(mut save_file) = File::open(&rom_path) {\n\n save_file\n\n .read_exact(cram)\n\n .expect_msg_box(\"Failed to load savegame\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 48, "score": 105835.50735089608 }, { "content": "fn load_metadata<C: Metadata>(rom_path: &mut PathBuf, cartridge: &mut C) {\n\n if !cartridge.supports_metadata() {\n\n return;\n\n }\n\n\n\n rom_path.set_extension(\"meta\");\n\n\n\n if let Ok(metadata) = fs::read(rom_path) {\n\n cartridge\n\n .deserialize_metadata(metadata)\n\n .expect_msg_box(\"Metadata file was found, but had invalid contents\")\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 49, "score": 105835.50735089608 }, { "content": "fn dispatch_emulator(rom_path: &str, mut cartridge: CartridgeVariant) {\n\n match &mut cartridge {\n\n CartridgeVariant::Rom(c) => run_emu(rom_path, c),\n\n CartridgeVariant::RomRam(c) => run_emu(rom_path, c),\n\n CartridgeVariant::RomRamBanked(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC1(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC1Ram(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC1RamBanked(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC2(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC3(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC3Rtc(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC3Ram(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC3RamBanked(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC3RamRtc(c) => run_emu(rom_path, c),\n\n CartridgeVariant::MBC3RamBankedRtc(c) => run_emu(rom_path, c),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 50, "score": 98513.33787847334 }, { "content": "fn present_frame(frame: GfxFrame, os_timing: &mut OsTiming) {\n\n os_timing.wait_frame_remaining().unwrap();\n\n\n\n let frame_duration = os_timing.notify_frame_start().unwrap().as_secs_f64();\n\n\n\n log::info!(\"Frame took {:.2} ms\", frame_duration * 1000.0);\n\n\n\n frame\n\n .present(false)\n\n .expect_msg_box(\"Could not present frame\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 96933.13493993419 }, { "content": "fn store_savegame<C: Savegame>(rom_path: &mut PathBuf, cartridge: &C) {\n\n if let Some(cram) = cartridge.savegame() {\n\n // Try to guess savegame path from rom path\n\n rom_path.set_extension(\"sav\");\n\n\n\n // We overwrite / create a sav file with the cram contents\n\n fs::write(rom_path, cram).expect_msg_box(\"Could not write savegame to disk\");\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 52, "score": 91362.16456665922 }, { "content": "fn store_metadata<C: Metadata>(rom_path: &mut PathBuf, cartridge: &C) {\n\n if !cartridge.supports_metadata() {\n\n return;\n\n }\n\n\n\n rom_path.set_extension(\"meta\");\n\n\n\n let metadata = cartridge\n\n .serialize_metadata()\n\n .expect_msg_box(\"Could not serialize cartridge metadata\");\n\n\n\n fs::write(rom_path, metadata).expect_msg_box(\"Could not write cartridge metadata to disk\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 53, "score": 91362.16456665922 }, { "content": "fn run_emu<C: Cartridge + Savegame + Metadata>(rom_path: &str, mut cartridge: C) {\n\n let mut rom_path = PathBuf::from(rom_path);\n\n\n\n load_savegame(&mut rom_path, &mut cartridge);\n\n\n\n load_metadata(&mut rom_path, &mut cartridge);\n\n\n\n let mut emu = Emulator::with_debugger(&mut cartridge, cpu_logger(), NoDbgLogger);\n\n\n\n #[cfg(debug_assertions)]\n\n let mut cpu_debugger = CpuDebugger::new();\n\n\n\n // Initialize input system\n\n let window_input = Rc::new(RefCell::new(WindowInput::from_watched_keys(&[\n\n A_BUTTON_KEY,\n\n B_BUTTON_KEY,\n\n START_BUTTON_KEY,\n\n SELECT_BUTTON_KEY,\n\n UP_BUTTON_KEY,\n\n RIGHT_BUTTON_KEY,\n", "file_path": "src/main.rs", "rank": 54, "score": 86656.65570174348 }, { "content": "// TODO: Think about returning a `Result` from here instead of an `Option`\n\n/// Displays a native open file dialog with the specified window title and file filter.\n\npub fn open_file_dialog(title: &str, filters: Vec<FileFilter>) -> Option<OsString> {\n\n const MAX_FILE_NAME_LEN: usize = 300;\n\n\n\n let mut title = OsString::from(title).encode_wide_nul_term();\n\n\n\n let mut filter_buf = filters\n\n .into_iter()\n\n .flat_map(|f| vec![f.display_name.to_owned(), f.file_types.join(\";\")])\n\n .flat_map(|s| OsString::from(s).encode_wide_nul_term())\n\n .chain(iter::once(0))\n\n .collect::<Vec<u16>>();\n\n\n\n let mut file_name_buffer = vec![0u16; MAX_FILE_NAME_LEN];\n\n\n\n let mut open_dialog_options = OPENFILENAMEW {\n\n lStructSize: std::mem::size_of::<OPENFILENAMEW>() as u32,\n\n hwndOwner: ptr::null_mut(),\n\n hInstance: ptr::null_mut(),\n\n lpstrFilter: filter_buf.as_mut_ptr(),\n\n lpstrCustomFilter: ptr::null_mut(),\n", "file_path": "src/open_file_dialog.rs", "rank": 55, "score": 83749.17510057302 }, { "content": "#[derive(Default)]\n\nstruct RtcReg {\n\n seconds: u8,\n\n minutes: u8,\n\n hours: u8,\n\n days_lower: u8,\n\n flags: RtcFlags,\n\n}\n\n\n\nimpl RtcReg {\n\n fn get_mut(&mut self, addr: RtcRegAddr) -> &mut u8 {\n\n match addr {\n\n RtcRegAddr::Seconds => &mut self.seconds,\n\n RtcRegAddr::Minutes => &mut self.minutes,\n\n RtcRegAddr::Hours => &mut self.hours,\n\n RtcRegAddr::DaysLower => &mut self.days_lower,\n\n RtcRegAddr::Flags => &mut self.flags.bits,\n\n }\n\n }\n\n}\n\n\n\nbitflags! {\n\n #[derive(Default)]\n\n pub struct RtcFlags: u8 {\n\n const DAY_MSB = 0b_0000_0001;\n\n const HALTED = 0b_0100_0000;\n\n const DAY_CARRY = 0b_1000_0000;\n\n }\n\n}\n", "file_path": "maboy/src/cartridge/mbc/rtc.rs", "rank": 56, "score": 65525.155430182334 }, { "content": "#[derive(Copy, Clone)]\n\nstruct PixelQuad {\n\n /// Contains the actual *paletted* pixel colors with the leftmost pixel color\n\n /// being at the least significant 2 bits. If color is unknown (for BG sprites),\n\n /// pixel color is undefined. Sprite colors are also contained, even if the\n\n /// color might later get overwritten.\n\n pixel_col: u8,\n\n\n\n /// In the same order as `pixel_types`, each two bits describe a pixel source:\n\n /// 0b00 - Background (needs to be calculated later)\n\n /// 0b01 - Window or blended Sprite [Priority 1] - This pixel is final and paletted\n\n /// 0b10 - Sprite with priority 1 over BG - Paletted, even though it might be overwritten\n\n /// 0b11 - Sprite [Priority 0] (over BG or Window) - This pixel is final and paletted\n\n /// Note that in this representation, the lower bit indicates if a pixel is final.\n\n pixel_src: u8,\n\n}\n\n\n\nimpl PixelQuad {\n\n fn zero() -> PixelQuad {\n\n PixelQuad {\n\n pixel_col: 0,\n", "file_path": "maboy/src/ppu/pixel_queue.rs", "rank": 57, "score": 65525.155430182334 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n // Show file open dialog so user can select a ROM\n\n let rom_path = open_file_dialog(\n\n \"Please select a cartridge rom\",\n\n vec![FileFilter::new(\n\n \"Cartridge ROM (.gb, .rom, .gbc)\",\n\n vec![\"*.GB\", \"*.ROM\", \"*.GBC\"],\n\n )],\n\n )\n\n .map(|s| s.into_string().expect_msg_box(\"Could not read rom path\"))\n\n .expect_msg_box(\"Could not open ROM file\");\n\n\n\n // Parse Cartridge\n\n let cartridge =\n\n CartridgeVariant::from_file(&rom_path).expect_msg_box(\"Could not open rom file\");\n\n\n\n dispatch_emulator(&rom_path, cartridge);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 58, "score": 65307.37626002476 }, { "content": "#[cfg(not(debug_assertions))]\n\nfn cpu_logger() -> NoDbgLogger {\n\n NoDbgLogger\n\n}\n", "file_path": "src/main.rs", "rank": 59, "score": 59611.98801754802 }, { "content": "/// This trait is used to provide access to the internal cartridge RAM. This is\n\n/// necessary for providing savegame support for games.\n\n///\n\n/// For cartridges without RAM, as well as for cartridges with RAM but *without a\n\n/// battery*, all methods of this trait should return `None`.\n\n///\n\n/// This trait could be implemented *only* for eligible cartridges by using some type-level\n\n/// magic, but this would make cartridge handling for the frontend even more annoying\n\n/// than it already is, since they would have to dispatch different types of cartridges.\n\n/// to different methods. These trait methods are also not going to be called in a tight loop,\n\n/// so optimizing for performance is not a priority.\n\n///\n\n/// # Examples\n\n///\n\n/// Loading a savegame from disk:\n\n/// ```\n\n/// if let Some(cram) = cartridge.savegame() {\n\n/// fs::write(savegame_path, cram).expect(\"Could not write savegame to disk\");\n\n/// }\n\n/// ```\n\npub trait Savegame {\n\n fn savegame(&self) -> Option<&[u8]> {\n\n None\n\n }\n\n\n\n fn savegame_mut(&mut self) -> Option<&mut [u8]> {\n\n None\n\n }\n\n}\n\n\n\nimpl<MBC: CartridgeMBC> Savegame for CartridgeImpl<MBC> {\n\n fn savegame(&self) -> Option<&[u8]> {\n\n self.mbc.savegame()\n\n }\n\n\n\n fn savegame_mut(&mut self) -> Option<&mut [u8]> {\n\n self.mbc.savegame_mut()\n\n }\n\n}\n\n\n", "file_path": "maboy/src/cartridge/mod.rs", "rank": 60, "score": 59412.53384391568 }, { "content": "/// Some cartridges can use external metadata to provide some functionality. MBC3, for\n\n/// example, can use metadata to persist real-time clock state across multiple emulator\n\n/// runs. This trait provides access to load and store such metadata, if present.\n\n///\n\n/// This trait is similar to ['Savegame'], which contains some useful further information.\n\npub trait Metadata {\n\n fn supports_metadata(&self) -> bool {\n\n false\n\n }\n\n\n\n fn serialize_metadata(&self) -> Result<Vec<u8>, CartridgeParseError> {\n\n Err(CartridgeParseError::MetadataNotSuported)\n\n }\n\n\n\n fn deserialize_metadata(&mut self, _data: Vec<u8>) -> Result<(), CartridgeParseError> {\n\n Err(CartridgeParseError::MetadataNotSuported)\n\n }\n\n}\n\n\n\nimpl<MBC: CartridgeMBC> Metadata for CartridgeImpl<MBC> {\n\n fn supports_metadata(&self) -> bool {\n\n self.mbc.supports_metadata()\n\n }\n\n\n\n fn serialize_metadata(&self) -> Result<Vec<u8>, CartridgeParseError> {\n", "file_path": "maboy/src/cartridge/mod.rs", "rank": 61, "score": 59411.22320034176 }, { "content": "/// Interface between the CPU and the cartridge. This trait is mainly used so we don't\n\n/// have to write out the MBC type parameter in a million places, and instead can just\n\n/// accept any type that implements this trait.\n\npub trait Cartridge {\n\n /// The MBC (memory bank controller) used in the cartridge\n\n type MBC: CartridgeMBC;\n\n\n\n fn read_rom(&self, addr: CRomAddr) -> u8;\n\n fn write_rom(&mut self, addr: CRomAddr, val: u8);\n\n\n\n fn read_cram(&self, addr: CRamAddr) -> u8;\n\n fn write_cram(&mut self, addr: CRamAddr, val: u8);\n\n}\n\n\n\nimpl<MBC: CartridgeMBC> Cartridge for CartridgeImpl<MBC> {\n\n type MBC = MBC;\n\n\n\n fn read_rom(&self, addr: CRomAddr) -> u8 {\n\n self.mbc.read_rom(addr)\n\n }\n\n\n\n fn write_rom(&mut self, addr: CRomAddr, val: u8) {\n\n self.mbc.write_rom(addr, val);\n", "file_path": "maboy/src/cartridge/mod.rs", "rank": 62, "score": 59410.28915994033 }, { "content": "/// Writes an 8 bit value (to memory or a CPU register), consuming\n\n/// the correct amount of cycles in the process.\n\npub trait Dst8 {\n\n fn write<B: Board>(self, cpu: &mut CPU, board: &mut B, val: u8);\n\n}\n\n\n\n/// Passing this as source reads an immediate operand from (PC), then increases PC.\n\npub struct Imm8;\n\n\n\nimpl Src8 for Imm8 {\n\n fn read<B: Board>(self, cpu: &mut CPU, board: &mut B) -> u8 {\n\n cpu.read8i(board)\n\n }\n\n}\n\n\n\n/// Some operations save a byte by assuming the upper byte of the src/dst address\n\n/// of the operation to be 0xFF (0xFFxx), with the lower byte provided via this operand.\n\npub enum HighRamOperand {\n\n Imm8,\n\n C,\n\n}\n\n\n", "file_path": "maboy/src/cpu/operands.rs", "rank": 63, "score": 59406.73562144674 }, { "content": "/// See the [module documentation](super::board)\n\npub trait Board {\n\n /// The type of Cartridge that this Game Boy can handle.\n\n type CMem: Cartridge;\n\n\n\n /// Cpu event logger for debugging purposes\n\n type CpuDbgEvtSrc: DbgEvtSrc<CpuEvt>;\n\n\n\n /// Ppu event logger for debugging purposes\n\n type PpuDbgEvtSrc: DbgEvtSrc<PpuEvt>;\n\n\n\n /// Advances all components of the gameboy by 1 machine cycle (4 clock cycles).\n\n ///\n\n /// To be called only from the CPU, whenever a cycle is advanced *without*\n\n /// any memory reads/writes. This method is automatically called when\n\n /// `read8`, `read16`, `write8`, `write16` are called.\n\n fn advance_mcycle(&mut self);\n\n\n\n /// Reads a byte from memory *without* consuming a cycle. Should not be called\n\n /// from the CPU unless for very special cases (like IR handling). This method\n\n /// is also necessary to handle OAM DMA.\n", "file_path": "maboy/src/board/mod.rs", "rank": 64, "score": 59406.73562144674 }, { "content": "/// Reads an 8 bit value (from memory or a CPU register), consuming\n\n/// the correct amount of cycles in the process.\n\npub trait Src8 {\n\n fn read<B: Board>(self, cpu: &mut CPU, board: &mut B) -> u8;\n\n}\n\n\n", "file_path": "maboy/src/cpu/operands.rs", "rank": 65, "score": 59406.73562144674 }, { "content": "pub trait FmtNum {\n\n fn fmt_val(self) -> StyledObject<String>;\n\n fn fmt_addr(self) -> StyledObject<String>;\n\n}\n\n\n\nimpl FmtNum for u8 {\n\n fn fmt_val(self) -> StyledObject<String> {\n\n style(format!(\"{} ({:#04X})\", self, self)).blue()\n\n }\n\n\n\n fn fmt_addr(self) -> StyledObject<String> {\n\n style(format!(\"{:#04X}\", self)).yellow()\n\n }\n\n}\n\n\n\nimpl FmtNum for u16 {\n\n fn fmt_val(self) -> StyledObject<String> {\n\n style(format!(\"{} ({:#06X})\", self, self)).blue()\n\n }\n\n\n", "file_path": "maboy/src/debug/fmt.rs", "rank": 66, "score": 58360.508127459456 }, { "content": "pub trait IntoResult: Sized {\n\n fn into_result(self) -> Result<(), HResultError>;\n\n}\n\n\n\nimpl IntoResult for HRESULT {\n\n fn into_result(self) -> Result<(), HResultError> {\n\n if SUCCEEDED(self) {\n\n Ok(())\n\n } else {\n\n Err(HResultError(self))\n\n }\n\n }\n\n}\n", "file_path": "src/hresult_error.rs", "rank": 67, "score": 57606.07671410809 }, { "content": "/// A single row of pixels within a tile. Modifiying instances of this\n\n/// struct does *not* modify the backing tile array\n\npub trait TileRow {\n\n /// Returns the leftmost pixel of the tile and removes it\n\n fn pop_leftmost(&mut self) -> Color;\n\n\n\n /// Removes the n leftmost pixels from the tile row\n\n fn discard_leftmost(&mut self, n: u8);\n\n}\n\n\n\npub enum SpriteTileRow {\n\n InOrder(InOrderTileRow),\n\n Reverse(ReverseTileRow),\n\n}\n\n\n\npub struct InOrderTileRow(u16);\n\n\n\npub struct ReverseTileRow(u16);\n\n\n\nconst TILE_BYTE_WIDTH: usize = 16;\n\n\n\n// TODO: Remove all the unneccesary repr transparents for all files\n", "file_path": "maboy/src/ppu/tile_data.rs", "rank": 68, "score": 57379.14998514483 }, { "content": "/// The interface between the RAM implementation and the MBC. The CPU will never\n\n/// directly interact with this trait since the MBC can decide to disable RAM\n\n/// temporarily; Thus, all communication goes through the MBC implementation.\n\npub trait CartridgeRam: Savegame {\n\n fn read(&self, addr: CRamAddr) -> u8;\n\n fn write(&mut self, addr: CRamAddr, val: u8);\n\n fn try_select_bank(&mut self, bank: u8);\n\n}\n\n\n\n/// Cartridges with no internal RAM should use this implementation, where every\n\n/// write is a NOOP and every read yields 0xFF.\n\npub struct NoCRam;\n\n\n\nimpl Savegame for NoCRam {}\n\n\n\nimpl CartridgeRam for NoCRam {\n\n fn read(&self, _addr: CRamAddr) -> u8 {\n\n 0xff\n\n }\n\n\n\n fn write(&mut self, _addr: CRamAddr, _val: u8) {}\n\n\n\n fn try_select_bank(&mut self, _bank: u8) {}\n", "file_path": "maboy/src/cartridge/cram.rs", "rank": 69, "score": 55574.434583436276 }, { "content": "#[cfg(debug_assertions)]\n\nfn cpu_logger() -> DbgEvtLogger<CpuEvt> {\n\n DbgEvtLogger::new()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 70, "score": 54834.51172532445 }, { "content": "/// Some common per-bit operations\n\npub trait BitOps: Copy {\n\n #[must_use]\n\n fn bit(self, bit: u8) -> bool;\n\n\n\n #[must_use]\n\n fn reset_bit(self, bit: u8) -> Self;\n\n\n\n #[must_use]\n\n fn set_bit(self, bit: u8) -> Self;\n\n\n\n #[must_use]\n\n fn with_bit(self, bit: u8, is_set: bool) -> Self;\n\n}\n\n\n\nmacro_rules! impl_bitops {\n\n ($($type:ty),*) => {\n\n $(impl BitOps for $type {\n\n fn bit(self, bit: u8) -> bool {\n\n (self >> bit) & 1 != 0\n\n }\n", "file_path": "maboy/src/util/bit_ops.rs", "rank": 71, "score": 54644.68035130024 }, { "content": "pub trait DbgEvtSrc<T> {\n\n fn push(&mut self, evt: T);\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum CpuEvt {\n\n Exec(u16, ByteInstr),\n\n ExecCB(CBByteInstr),\n\n ReadMem(u16, u8),\n\n WriteMem(u16, u8),\n\n HandleIR(Interrupt),\n\n TakeJmpTo(u16),\n\n SkipJmpTo(u16),\n\n EnterHalt(HaltState),\n\n IrEnable,\n\n IrDisable,\n\n}\n\n\n\npub enum PpuEvt {}\n\n\n", "file_path": "maboy/src/debug/mod.rs", "rank": 72, "score": 54644.68035130024 }, { "content": "/// The default behaviour of `Option::expect` and `Result::expect` is just to panic\n\n/// when no value if contained. This is not very pretty for applications with a GUI,\n\n/// so this trait adds a method similar to `expect` that additionally displays\n\n/// a message box with the error message (and then panics).\n\npub trait ExpectMsgBox<T> {\n\n fn expect_msg_box(self, msg: &str) -> T;\n\n}\n\n\n\nimpl<T, E: Debug> ExpectMsgBox<T> for Result<T, E> {\n\n fn expect_msg_box(self, msg: &str) -> T {\n\n let title = msg_box_title();\n\n\n\n match self {\n\n Ok(val) => val,\n\n Err(err) => unsafe {\n\n let err_str =\n\n OsString::from(&format!(\"{} ({:?})\", msg, err)).encode_wide_nul_term();\n\n MessageBoxW(ptr::null_mut(), err_str.as_ptr(), title.as_ptr(), MB_OK);\n\n panic!(\"{:?}\", err);\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/expect_msg_box.rs", "rank": 73, "score": 54644.68035130024 }, { "content": "/// Very often, we need to encode strings as weird pseudo UTF-16 with a null terminator\n\n/// for Win32 interop. This trait provides an easy extension method for this purpose.\n\npub trait EncodeWideNulTerm: OsStrExt {\n\n fn encode_wide_nul_term(&self) -> Vec<u16> {\n\n self.encode_wide().chain(once(0)).collect()\n\n }\n\n}\n\n\n\n// TODO: Why does this not work???\n\n// impl<T: OsStrExt> EncodeWithNulTerm for T {}\n\n\n\n// ... but this does...\n\nimpl EncodeWideNulTerm for OsStr {}\n", "file_path": "src/util.rs", "rank": 74, "score": 53766.00014856126 }, { "content": "fn msg_box_title() -> &'static Vec<u16> {\n\n unsafe {\n\n MSG_BOX_TITLE_INIT.call_once(|| {\n\n MSG_BOX_TITLE = OsString::from(\"MaBoy GameBoy Emulator\").encode_wide_nul_term()\n\n });\n\n &MSG_BOX_TITLE\n\n }\n\n}\n", "file_path": "src/expect_msg_box.rs", "rank": 75, "score": 52390.18820250252 }, { "content": "/// The public interface of all MBCs. The CPU only communicates with cartridge memory\n\n/// via this trait.\n\npub trait CartridgeMBC: Savegame + Metadata {\n\n type CRAM: CartridgeRam;\n\n\n\n fn read_rom(&self, addr: CRomAddr) -> u8;\n\n fn write_rom(&mut self, addr: CRomAddr, val: u8);\n\n\n\n fn read_cram(&self, addr: CRamAddr) -> u8;\n\n fn write_cram(&mut self, addr: CRamAddr, val: u8);\n\n}\n\n\n\n/// Cartridges with no MBC (e.g. Tetris) can use this MBC implementation where any\n\n/// writes to ROM compile to NOOPs\n\npub struct NoMBC<CRAM> {\n\n rom: Box<[u8]>,\n\n cram: CRAM,\n\n}\n\n\n\nimpl<CRAM: CartridgeRam> NoMBC<CRAM> {\n\n pub(super) fn new(rom: Box<[u8]>, cram: CRAM) -> NoMBC<CRAM> {\n\n debug_assert!(rom.len() == 0x8000);\n", "file_path": "maboy/src/cartridge/mbc/mod.rs", "rank": 76, "score": 52212.33285415993 }, { "content": "/// For sprites with OBJ-to-BG priority 1, this function calculates\n\n/// the resulting color of a blend with a BG/WND color\n\nfn blend_sprite_col(sprite_col: Color, bg_col: Color, bg_palette: Palette) -> Color {\n\n match bg_col.into_val() {\n\n ColorVal::C00 => sprite_col,\n\n _ => bg_palette.apply(bg_col),\n\n }\n\n}\n", "file_path": "maboy/src/ppu/pixel_queue.rs", "rank": 77, "score": 42205.625673022296 }, { "content": "// TODO: Make this signature nicer by lowering trait requirements for Emulator function calls\n\n// or by introducing an Emulator trait\n\nfn os_update<CMem: Cartridge, CpuDbg: DbgEvtSrc<CpuEvt>, PpuDbg: DbgEvtSrc<PpuEvt>>(\n\n emu: &mut Emulator<CMem, CpuDbg, PpuDbg>,\n\n window_factory: &WindowFactory,\n\n window_input: &RefCell<WindowInput>,\n\n gamepad_input: &Option<GamePadInput>,\n\n) -> bool {\n\n if !window_factory.dispatch_window_msgs() {\n\n return false;\n\n }\n\n\n\n let mut button_states =\n\n window_input\n\n .borrow()\n\n .depressed_keys()\n\n .fold(Buttons::empty(), |mut acc, key| {\n\n match key {\n\n A_BUTTON_KEY => acc.insert(Buttons::A),\n\n B_BUTTON_KEY => acc.insert(Buttons::B),\n\n START_BUTTON_KEY => acc.insert(Buttons::START),\n\n SELECT_BUTTON_KEY => acc.insert(Buttons::SELECT),\n", "file_path": "src/main.rs", "rank": 78, "score": 39738.47114522072 }, { "content": " x: mem[1],\n\n id: mem[2],\n\n flags: SpriteFlags(mem[3]),\n\n }\n\n }\n\n}\n\n\n\nimpl SpriteFlags {\n\n pub fn is_occluded(&self) -> bool {\n\n self.0.bit(7)\n\n }\n\n\n\n pub fn y_flipped(&self) -> bool {\n\n self.0.bit(6)\n\n }\n\n\n\n pub fn x_flipped(&self) -> bool {\n\n self.0.bit(5)\n\n }\n\n\n\n pub fn uses_alternative_pallette(&self) -> bool {\n\n self.0.bit(4)\n\n }\n\n}\n", "file_path": "maboy/src/ppu/sprite.rs", "rank": 80, "score": 16.097595584811046 }, { "content": "/// Attempting to switch to a non-existent bank leaves the currently mapped bank unchanged.\n\npub struct CRamBanked {\n\n cram: Pin<Box<[u8]>>,\n\n mapped_bank: &'static mut [u8],\n\n has_battery: bool,\n\n}\n\n\n\nimpl CRamBanked {\n\n pub fn new(has_battery: bool) -> Self {\n\n let mut cram = Pin::new(vec![0u8; 4 * 0x2000].into_boxed_slice());\n\n\n\n // We forget about the lifetime of the reference here, which is safe because we got the memory\n\n // inside a `Pin<Box<...>>` right here in the struct.\n\n let mapped_bank = unsafe { std::mem::transmute(&mut cram[..]) };\n\n\n\n Self {\n\n cram,\n\n mapped_bank,\n\n has_battery,\n\n }\n", "file_path": "maboy/src/cartridge/cram.rs", "rank": 81, "score": 15.740330335344888 }, { "content": "use crate::util::BitOps;\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Sprite {\n\n pub y: u8,\n\n pub x: u8,\n\n pub id: u8,\n\n pub flags: SpriteFlags,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct SpriteFlags(u8);\n\n\n\nimpl Sprite {\n\n /// Creates a sprite (usually from a slice of OAM RAM) by copying bytes\n\n pub fn from_slice(mem: &[u8]) -> Sprite {\n\n debug_assert!(mem.len() >= 4);\n\n\n\n Sprite {\n\n y: mem[0],\n", "file_path": "maboy/src/ppu/sprite.rs", "rank": 82, "score": 14.786672394921158 }, { "content": "//! Implementation of the Serial Port of your Game Boy, used for connecting\n\n//! two Game Boys via a link cable. This module is almost completely unfinished;\n\n//! It is only implemented up to a point where it doesn't crash any games.\n\n\n\nuse super::address::SerialReg;\n\n\n\n/// Storage for the SB register\n\npub struct SerialPort {\n\n sb_reg: u8,\n\n}\n\n\n\nimpl SerialPort {\n\n pub fn new() -> SerialPort {\n\n SerialPort { sb_reg: 0 }\n\n }\n\n\n\n pub fn write_reg(&mut self, reg: SerialReg, val: u8) {\n\n match reg {\n\n SerialReg::SB => self.sb_reg = val,\n\n SerialReg::SC => {\n", "file_path": "maboy/src/serial_port.rs", "rank": 83, "score": 14.664799575059742 }, { "content": "use crate::address::CRomAddr;\n\nuse std::pin::Pin;\n\n\n\n// TODO: Be more consistent where warn!, debug!, error! are used\n\n\n\n/// Helper struct for all MBCs that allow ROM banking. Allows for efficient bank switching\n\n/// by keeping an internal pointer to the currently active ROM bank offset.\n\npub struct BankedRom {\n\n rom: Pin<Box<[u8]>>,\n\n // TODO: Figure out exact behaviour when a non-existent bank is selected\n\n mapped_bank: Option<&'static [u8]>,\n\n}\n\n\n\nimpl BankedRom {\n\n pub fn new(rom: Box<[u8]>) -> Self {\n\n let rom = Pin::new(rom);\n\n\n\n // Forgets about the lifetime of our slice. This is safe because it is pinned and also\n\n // lives inside of self\n\n let mapped_bank = Some(unsafe { std::mem::transmute(&rom[0x4000..]) });\n", "file_path": "maboy/src/cartridge/mbc/banked_rom.rs", "rank": 84, "score": 14.616942622163998 }, { "content": "/// to be unnceccesary, this struct will be removed.\n\n///\n\n/// Note that [`Cartridge`] is also implemented for `&mut CartridgeImpl<_>`, meaning\n\n/// that you can pass a mutable reference to the emulator instead of passing by value.\n\n/// This allows you to store savegames and metadata after the emulator has concluded\n\n/// its run.\n\npub struct CartridgeImpl<MBC> {\n\n mbc: MBC,\n\n}\n\n\n\nimpl<MBC: CartridgeMBC> CartridgeImpl<MBC> {\n\n fn new(mbc: MBC) -> CartridgeImpl<MBC> {\n\n CartridgeImpl { mbc }\n\n }\n\n}\n\n\n\n/// Interface between the CPU and the cartridge. This trait is mainly used so we don't\n\n/// have to write out the MBC type parameter in a million places, and instead can just\n\n/// accept any type that implements this trait.\n", "file_path": "maboy/src/cartridge/mod.rs", "rank": 85, "score": 14.549458164183916 }, { "content": " pub fn notify_buttons_pressed(&mut self, ir_system: &mut InterruptSystem, buttons: Buttons) {\n\n if self.pressed.bits() & buttons.bits() != 0 {\n\n ir_system.schedule_interrupt(Interrupt::Joypad);\n\n }\n\n\n\n self.pressed.remove(buttons);\n\n }\n\n\n\n /// See documentation at [`Emulator::notify_buttons_released`]\n\n pub fn notify_buttons_released(&mut self, buttons: Buttons) {\n\n self.pressed.insert(buttons);\n\n }\n\n\n\n /// See documentation at [`Emulator::notify_buttons_state`]\n\n pub fn notify_buttons_state(&mut self, ir_system: &mut InterruptSystem, buttons: Buttons) {\n\n if self.pressed.bits() & buttons.bits() != 0 {\n\n ir_system.schedule_interrupt(Interrupt::Joypad);\n\n }\n\n\n\n // We don't need checking here since all bits of the Buttons flag are in use;\n\n // There are no illegal values\n\n self.pressed = unsafe { Buttons::from_bits_unchecked(!buttons.bits()) };\n\n }\n\n}\n", "file_path": "maboy/src/joypad.rs", "rank": 86, "score": 14.484946546609851 }, { "content": "//! Support for an Xbox gamepad\n\n\n\nuse bitflags::bitflags;\n\nuse maboy::Buttons;\n\nuse std::mem::MaybeUninit;\n\nuse winapi::shared::minwindef::DWORD;\n\nuse winapi::shared::winerror::ERROR_SUCCESS;\n\nuse winapi::um::xinput::{XInputGetState, XINPUT_STATE};\n\n\n\n/// Used to query the state of a connected Xbox gamepad\n\n/// Supports only a single device (since it's only a GameBoy... What more do you want?)\n\npub struct GamePadInput(DWORD);\n\n\n\nimpl GamePadInput {\n\n /// Returns the first gamepad that was found, or `None`. Microsoft warns about calling\n\n /// this in a tight loop, so I'll do the same.\n\n pub fn find_gamepad() -> Option<GamePadInput> {\n\n unsafe {\n\n let mut input_state: XINPUT_STATE = MaybeUninit::uninit().assume_init();\n\n\n", "file_path": "src/gamepad_input.rs", "rank": 87, "score": 14.21807885756586 }, { "content": " Serial = 1 << 3,\n\n Joypad = 1 << 4,\n\n}\n\n\n\n/// The read-mask if the IF register\n\nconst IF_MASK: u8 = 0b_1110_0000;\n\n\n\nimpl InterruptSystem {\n\n pub fn new() -> InterruptSystem {\n\n InterruptSystem {\n\n if_reg: IF_MASK,\n\n ie_reg: 0x0,\n\n }\n\n }\n\n\n\n pub fn read_if(&self) -> u8 {\n\n self.if_reg\n\n }\n\n\n\n pub fn write_if(&mut self, val: u8) {\n", "file_path": "maboy/src/interrupt_system.rs", "rank": 88, "score": 14.153714419522373 }, { "content": "pub struct NoDbgLogger;\n\n\n\nimpl<T> DbgEvtSrc<T> for NoDbgLogger {\n\n fn push(&mut self, _evt: T) {}\n\n}\n\n\n\npub struct DbgEvtLogger<T>(VecDeque<T>);\n\n\n\nimpl<T> DbgEvtLogger<T> {\n\n pub fn new() -> Self {\n\n Self(VecDeque::with_capacity(MAX_EVTS_LOGGED))\n\n }\n\n\n\n pub fn evts(&self) -> impl DoubleEndedIterator<Item = &T> {\n\n self.0.iter()\n\n }\n\n}\n\n\n\nimpl<T> DbgEvtSrc<T> for DbgEvtLogger<T> {\n\n fn push(&mut self, evt: T) {\n\n if self.0.len() == MAX_EVTS_LOGGED {\n\n self.0.pop_front();\n\n }\n\n self.0.push_back(evt)\n\n }\n\n}\n", "file_path": "maboy/src/debug/mod.rs", "rank": 89, "score": 14.076617527636635 }, { "content": "use super::{banked_rom::BankedRom, CartridgeMBC};\n\nuse crate::address::{CRamAddr, CRomAddr};\n\nuse crate::cartridge::cram::CRamMBC2;\n\nuse crate::{cartridge::CartridgeRam, util::BitOps, Metadata, Savegame};\n\n\n\npub struct MBC2 {\n\n rom: BankedRom,\n\n cram: CRamMBC2,\n\n cram_enabled: bool,\n\n}\n\n\n\nimpl MBC2 {\n\n pub fn new(rom: Box<[u8]>, has_battery: bool) -> MBC2 {\n\n MBC2 {\n\n rom: BankedRom::new(rom),\n\n cram: CRamMBC2::new(has_battery),\n\n cram_enabled: false,\n\n }\n\n }\n\n}\n", "file_path": "maboy/src/cartridge/mbc/mbc2.rs", "rank": 90, "score": 13.900480096935203 }, { "content": "//! Helper code to deal with register 0xFF41, the LCD Status register (LCDS)\n\n\n\nuse super::Mode;\n\nuse crate::util::BitOps;\n\n\n\n/// Wrapper around the LCDS register with some utility methods\n\n#[derive(Clone)]\n\npub struct LCDS(u8);\n\n\n\nimpl LCDS {\n\n pub fn new() -> LCDS {\n\n LCDS(0b1000_0000)\n\n }\n\n\n\n pub fn from_raw(reg: u8) -> LCDS {\n\n LCDS(0b1000_0000 | reg)\n\n }\n\n\n\n pub fn ly_coincidence_interrupt(&self) -> bool {\n\n self.0.bit(6)\n", "file_path": "maboy/src/ppu/lcds.rs", "rank": 91, "score": 13.772284137817767 }, { "content": "use crate::address::{Addr, VideoMemAddr};\n\nuse crate::board::{Board, BoardImpl};\n\nuse crate::{\n\n cartridge::Cartridge,\n\n debug::{CpuEvt, DbgEvtSrc, PpuEvt},\n\n};\n\n\n\n// TODO: Move this onto emulator. It's too ugly here, i think...\n\n\n\n/// Stores the DMA register, as well as the internal state necessary to perform OAM DMA.\n\npub struct OamDma {\n\n reg: u8,\n\n src_addr: u16,\n\n oam_dst_idx: u8,\n\n read_buf: u8,\n\n}\n\n\n\nimpl OamDma {\n\n pub fn new() -> OamDma {\n\n OamDma {\n", "file_path": "maboy/src/board/oam_dma.rs", "rank": 92, "score": 13.362111076405483 }, { "content": " type Output = u8;\n\n\n\n fn index(&self, index: u16) -> &Self::Output {\n\n &self.mem[index as usize]\n\n }\n\n}\n\n\n\nimpl IndexMut<u16> for OAM {\n\n fn index_mut(&mut self, index: u16) -> &mut Self::Output {\n\n // TODO: See if it is worth not implementing this and making an explicit write method;\n\n // That way, we could check if the new data is actually different from the old one,\n\n // and only set the dirty flag in case it is.\n\n self.is_dirty = true;\n\n\n\n &mut self.mem[index as usize]\n\n }\n\n}\n", "file_path": "maboy/src/ppu/oam.rs", "rank": 93, "score": 13.103724221042324 }, { "content": "use super::{banked_rom::BankedRom, rtc::Rtc, CartridgeMBC};\n\nuse crate::address::{CRamAddr, CRomAddr};\n\nuse crate::{cartridge::cram::CartridgeRam, Metadata, Savegame};\n\n\n\n/// For speedyness reasons, we split MBC3 into a variant with an RTC module,\n\n/// and one without it.\n\n\n\npub struct MBC3<CRAM> {\n\n rom: BankedRom,\n\n cram: CRAM,\n\n cram_enabled: bool,\n\n}\n\n\n\nimpl<CRAM: CartridgeRam> MBC3<CRAM> {\n\n pub fn new(rom: Box<[u8]>, cram: CRAM) -> Self {\n\n Self {\n\n rom: BankedRom::new(rom),\n\n cram,\n\n cram_enabled: false,\n\n }\n", "file_path": "maboy/src/cartridge/mbc/mbc3.rs", "rank": 94, "score": 13.068707924010958 }, { "content": " }\n\n}\n\n\n\nmod cmd_bp {\n\n use super::*;\n\n\n\n pub fn execute<'a, I: Iterator<Item = &'a str>>(\n\n dbg: &mut CpuDebugger,\n\n term: &Term,\n\n mut args: I,\n\n ) {\n\n let mut output = String::new();\n\n\n\n match args.by_ref().next() {\n\n Some(\"set\") => set(dbg, &mut output, args),\n\n Some(\"mem\") => mem(dbg, &mut output, args),\n\n Some(\"list\") => list(dbg, &mut output),\n\n Some(\"rm\") => rm(dbg, &mut output, args),\n\n Some(\"clear\") => clear(dbg, &mut output),\n\n _ => writeln!(\n", "file_path": "maboy/src/debug/cpu_debugger.rs", "rank": 95, "score": 12.790251104741504 }, { "content": "mod joypad;\n\nmod memory;\n\nmod ppu;\n\nmod serial_port;\n\nmod timer;\n\nmod util;\n\n\n\nuse board::BoardImpl;\n\nuse cpu::CPU;\n\nuse debug::*;\n\nuse memory::{InternalMem, Memory};\n\n\n\npub use cartridge::*;\n\n\n\npub use joypad::Buttons;\n\npub use ppu::{MemPixel, VideoFrameStatus};\n\n\n\npub struct Emulator<C, CpuDbg, PpuDbg> {\n\n cpu: CPU,\n\n board: BoardImpl<C, CpuDbg, PpuDbg>,\n", "file_path": "maboy/src/lib.rs", "rank": 96, "score": 12.722237484018201 }, { "content": "//! Contains code for storing and accessing CPU registers.\n\n//! See [`Registers`] for more info.\n\n\n\nuse bitflags::*;\n\n\n\n#[repr(C)]\n\n#[derive(Default)]\n\npub struct Registers {\n\n pub a: u8,\n\n pub flags: Flags,\n\n pub bc: u16,\n\n pub de: u16,\n\n pub hl: u16,\n\n pub sp: u16,\n\n pub pc: u16,\n\n}\n\n\n\nbitflags! {\n\n #[derive(Default)]\n\n pub struct Flags: u8 {\n", "file_path": "maboy/src/cpu/registers.rs", "rank": 97, "score": 12.415618423733719 }, { "content": "\n\nimpl CartridgeRam for CRamUnbanked {\n\n fn read(&self, addr: CRamAddr) -> u8 {\n\n *self.cram.get(addr.raw() as usize).unwrap_or(&0xff)\n\n }\n\n\n\n fn write(&mut self, addr: CRamAddr, val: u8) {\n\n if let Some(mem) = self.cram.get_mut(addr.raw() as usize) {\n\n *mem = val;\n\n }\n\n }\n\n\n\n fn try_select_bank(&mut self, _bank: u8) {}\n\n}\n\n\n\n/// MBC2 has a weird half-byte RAM, where only the lower 4 bits of each addressable byte are used.\n\n/// We store this in a compressed format so we use all 8 bits of each byte. The lower half of the\n\n/// byte contains the lower address.\n\npub struct CRamMBC2 {\n\n // TODO: Internally, this looks very much like CRAMUnbanked. The Savegame impl is also the same. See if it should be modularized\n", "file_path": "maboy/src/cartridge/cram.rs", "rank": 98, "score": 12.367752289840226 }, { "content": " /// This method should only be called when a frame is finished; Otherwise, garbage\n\n /// might be displayed.\n\n\n\n pub fn data(&self) -> &[MemPixel] {\n\n &self.data\n\n }\n\n\n\n /// Retrieves one entire scanline\n\n pub fn line(&mut self, ly: u8) -> &mut [MemPixel] {\n\n &mut self.data[WIDTH * ly as usize..WIDTH * ly as usize + WIDTH]\n\n }\n\n}\n\n\n\n// TODO: Make this configurable\n\n/// The conversion from 2-bit color values to RGBA values\n\nimpl From<Color> for MemPixel {\n\n fn from(col: Color) -> Self {\n\n // These values simulate the original Game Boy's signature green tint...\n\n\n\n use super::color::ColorVal;\n", "file_path": "maboy/src/ppu/mem_frame.rs", "rank": 99, "score": 12.296689263661388 } ]
Rust
2020/src/bin/day19.rs
sebnow/adventofcode
9193b7f9181cd2249fd889c8e6723054f4e5b789
use anyhow::{anyhow, Result}; use std::collections::HashMap; use rayon::str::ParallelString; use rayon::iter::ParallelIterator; #[derive(Clone, PartialEq, Debug)] enum Rule { Char(char), Seq(Vec<i64>), Alt(Vec<i64>, Vec<i64>), } impl std::str::FromStr for Rule { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.contains('|') { let mut parts = s.split(" | "); let left = parts .next() .ok_or_else(|| anyhow!("missing left alternate"))? .split(' ') .map(|n| n.parse().unwrap()) .collect(); let right = parts .next() .ok_or_else(|| anyhow!("missing right alternate"))? .split(' ') .map(|n| n.parse().unwrap()) .collect(); Ok(Rule::Alt(left, right)) } else if s.starts_with('"') { let c = s .chars() .nth(1) .ok_or_else(|| anyhow!("missing reference"))?; Ok(Rule::Char(c)) } else { let ids = s .split(' ') .map(|id| { id.parse() .map_err(|err| anyhow!("invalid reference: {}", err)) }) .collect::<Result<_>>()?; Ok(Rule::Seq(ids)) } } } struct RuleEngine { rules: HashMap<i64, Rule>, } impl RuleEngine { pub fn new(rules: HashMap<i64, Rule>) -> Self { RuleEngine { rules } } pub fn matches(&self, s: &str) -> bool { self.match_rule_id(s, 0).contains(&Some("")) } fn match_rule_id<'a>(&self, s: &'a str, rule_id: i64) -> Vec<Option<&'a str>> { let rule = self.rules.get(&rule_id).unwrap(); self.match_rule(s, rule) } fn match_rule<'a>(&self, s: &'a str, rule: &Rule) -> Vec<Option<&'a str>> { match rule { Rule::Char(c) if s.chars().next() == Some(*c) => vec![Some(&s[1..])], Rule::Char(_) => vec![None], Rule::Seq(rs) => self.match_seq(s, rs), Rule::Alt(left, right) => self.match_alt(s, left, right), } } fn match_seq<'a>(&self, s: &'a str, rules: &[i64]) -> Vec<Option<&'a str>> { rules.iter().fold(vec![Some(s)], |ss, r| { ss.iter() .flat_map(|s| match s { Some(s) if !s.is_empty() => self.match_rule_id(s, *r), _ => vec![None], }) .collect() }) } fn match_alt<'a>(&self, s: &'a str, left: &[i64], right: &[i64]) -> Vec<Option<&'a str>> { [left, right] .iter() .flat_map(|rs| self.match_seq(s, rs)) .collect() } } impl std::str::FromStr for RuleEngine { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let rules = s .par_lines() .map(|l| { let mut parts = l.split(": "); Ok(( parts.next().ok_or_else(|| anyhow!("missing id"))?.parse()?, parts .next() .ok_or_else(|| anyhow!("missing definition"))? .parse()?, )) }) .collect::<Result<_>>()?; Ok(RuleEngine::new(rules)) } } fn parse_input<'a>(input: &'a str) -> (RuleEngine, impl ParallelIterator<Item = &'a str> + 'a) { let mut parts = input.split("\n\n"); ( parts.next().unwrap().parse().unwrap(), parts.next().unwrap().par_lines(), ) } fn part_one(input: &str) -> String { let (rules, messages) = parse_input(input); messages.filter(|m| rules.matches(m)).count().to_string() } fn part_two(input: &str) -> String { let input: String = input .lines() .map(|l| { if l.starts_with("8: ") { "8: 42 | 42 8" } else if l.starts_with("11: ") { "11: 42 31 | 42 11 31" } else { l } }) .collect::<Vec<&str>>() .join("\n"); part_one(&input) } fn main() { let input = include_str!("../../input/day19.txt"); println!("Part one: {}", part_one(&input)); println!("Part two: {}", part_two(&input)); } #[cfg(test)] mod test { use super::*; use aocutil::test_example; use std::str::FromStr; test_example!(example_one_1, part_one, 19, 1, 1); test_example!(example_one_2, part_one, 19, 1, 2); test_example!(example_one_3, part_one, 19, 1, 3); test_example!(example_one_4, part_one, 19, 1, 4); test_example!(example_one_5, part_one, 19, 1, 5); test_example!(example_one_6, part_one, 19, 1, 6); test_example!(example_two_1, part_two, 19, 2, 1); test_example!(example_two_2, part_two, 19, 2, 2); test_example!(example_two_3, part_two, 19, 2, 3); test_example!(example_two_4, part_two, 19, 2, 4); test_example!(example_two_5, part_two, 19, 2, 5); #[test] fn parse_char() -> Result<()> { assert_eq!(Rule::from_str("\"a\"")?, Rule::Char('a')); Ok(()) } #[test] fn parse_seq() -> Result<()> { assert_eq!(Rule::from_str("1 2")?, Rule::Seq(vec![1, 2])); Ok(()) } #[test] fn parse_alt() -> Result<()> { assert_eq!(Rule::from_str("1 | 2")?, alt(vec![1], vec![2]),); Ok(()) } #[test] fn match_char() { let mut rules = HashMap::new(); rules.insert(0, Rule::Char('a')); assert!(RuleEngine::new(rules).matches("a")); } #[test] fn match_seq() { let mut rules = HashMap::new(); rules.insert(0, Rule::Seq(vec![1, 2])); rules.insert(1, Rule::Char('a')); rules.insert(2, Rule::Char('b')); assert!(RuleEngine::new(rules).matches("ab")); } #[test] fn match_alt() { let mut rules = HashMap::new(); rules.insert(0, alt(vec![1], vec![2])); rules.insert(1, Rule::Char('a')); rules.insert(2, Rule::Char('b')); let engine = RuleEngine::new(rules); assert!(engine.matches("a")); assert!(engine.matches("b")); } #[test] fn match_seq_alt() { let mut rules = HashMap::new(); rules.insert(0, Rule::Seq(vec![1, 1])); rules.insert(1, alt(vec![2], vec![3])); rules.insert(2, Rule::Char('a')); rules.insert(3, Rule::Char('b')); let engine = RuleEngine::new(rules); assert!(engine.matches("aa")); assert!(engine.matches("ab")); assert!(engine.matches("ba")); assert!(engine.matches("bb")); } fn alt(left: Vec<i64>, right: Vec<i64>) -> Rule { Rule::Alt(left, right) } }
use anyhow::{anyhow, Result}; use std::collections::HashMap; use rayon::str::ParallelString; use rayon::iter::ParallelIterator; #[derive(Clone, PartialEq, Debug)] enum Rule { Char(char), Seq(Vec<i64>), Alt(Vec<i64>, Vec<i64>), } impl std::str::FromStr for Rule { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.contains('|') { let mut parts = s.split(" | "); let left = parts .next() .ok_or_else(|| anyhow!("missing left alternate"))? .split(' ') .map(|n| n.parse().unwrap()) .collect(); let right = parts .next() .ok_or_else(|| anyhow!("missing right alternate"))? .split(' ') .map(|n| n.parse().unwrap()) .collect(); Ok(Rule::Alt(left, right)) } else if s.starts_with('"') { let c = s .chars() .nth(1) .ok_or_else(|| anyhow!("missing reference"))?; Ok(Rule::Char(c)) } else { let ids = s .split(' ') .map(|id| { id.parse() .map_err(|err| anyhow!("invalid reference: {}", err)) }) .collect::<Result<_>>()?; Ok(Rule::Seq(ids)) } } } struct RuleEngine { rules: HashMap<i64, Rule>, } impl RuleEngine { pub fn new(rules: HashMap<i64, Rule>) -> Self { RuleEngine { rules } } pub fn matches(&self, s: &str) -> bool { self.match_rule_id(s, 0).contains(&Some("")) } fn match_rule_id<'a>(&self, s: &'a str, rule_id: i64) -> Vec<Option<&'a str>> { let rule = self.rules.get(&rule_id).unwrap(); self.match_rule(s, rule) } fn match_rule<'a>(&self, s: &'a str, rule: &Rule) -> Vec<Option<&'a str>> { match rule { Rule::Char(c) if s.chars().next() == Some(*c) => vec![Some(&s[1..])], Rule::Char(_) => vec![None], Rule::Seq(rs) => self.match_seq(s, rs), Rule::Alt(left, right) => self.match_alt(s, left, right), } } fn match_seq<'a>(&self, s: &'a str, rules: &[i64]) -> Vec<Option<&'a str>> { rules.iter().fold(vec![Some(s)], |ss, r| { ss.iter() .flat_map(|s| match s { Some(s) if !s.is_empty() => self.match_rule_id(s, *r), _ => vec![None], }) .collect() }) } fn match_alt<'a>(&self, s: &'a str, left: &[i64], right: &[i64]) -> Vec<Option<&'a str>> { [left, right] .iter() .flat_map(|rs| self.match_seq(s, rs)) .collect() } } impl std::str::FromStr for RuleEngine { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let rules = s .par_lines() .map(|l| { let mut parts = l.split(": ");
}) .collect::<Result<_>>()?; Ok(RuleEngine::new(rules)) } } fn parse_input<'a>(input: &'a str) -> (RuleEngine, impl ParallelIterator<Item = &'a str> + 'a) { let mut parts = input.split("\n\n"); ( parts.next().unwrap().parse().unwrap(), parts.next().unwrap().par_lines(), ) } fn part_one(input: &str) -> String { let (rules, messages) = parse_input(input); messages.filter(|m| rules.matches(m)).count().to_string() } fn part_two(input: &str) -> String { let input: String = input .lines() .map(|l| { if l.starts_with("8: ") { "8: 42 | 42 8" } else if l.starts_with("11: ") { "11: 42 31 | 42 11 31" } else { l } }) .collect::<Vec<&str>>() .join("\n"); part_one(&input) } fn main() { let input = include_str!("../../input/day19.txt"); println!("Part one: {}", part_one(&input)); println!("Part two: {}", part_two(&input)); } #[cfg(test)] mod test { use super::*; use aocutil::test_example; use std::str::FromStr; test_example!(example_one_1, part_one, 19, 1, 1); test_example!(example_one_2, part_one, 19, 1, 2); test_example!(example_one_3, part_one, 19, 1, 3); test_example!(example_one_4, part_one, 19, 1, 4); test_example!(example_one_5, part_one, 19, 1, 5); test_example!(example_one_6, part_one, 19, 1, 6); test_example!(example_two_1, part_two, 19, 2, 1); test_example!(example_two_2, part_two, 19, 2, 2); test_example!(example_two_3, part_two, 19, 2, 3); test_example!(example_two_4, part_two, 19, 2, 4); test_example!(example_two_5, part_two, 19, 2, 5); #[test] fn parse_char() -> Result<()> { assert_eq!(Rule::from_str("\"a\"")?, Rule::Char('a')); Ok(()) } #[test] fn parse_seq() -> Result<()> { assert_eq!(Rule::from_str("1 2")?, Rule::Seq(vec![1, 2])); Ok(()) } #[test] fn parse_alt() -> Result<()> { assert_eq!(Rule::from_str("1 | 2")?, alt(vec![1], vec![2]),); Ok(()) } #[test] fn match_char() { let mut rules = HashMap::new(); rules.insert(0, Rule::Char('a')); assert!(RuleEngine::new(rules).matches("a")); } #[test] fn match_seq() { let mut rules = HashMap::new(); rules.insert(0, Rule::Seq(vec![1, 2])); rules.insert(1, Rule::Char('a')); rules.insert(2, Rule::Char('b')); assert!(RuleEngine::new(rules).matches("ab")); } #[test] fn match_alt() { let mut rules = HashMap::new(); rules.insert(0, alt(vec![1], vec![2])); rules.insert(1, Rule::Char('a')); rules.insert(2, Rule::Char('b')); let engine = RuleEngine::new(rules); assert!(engine.matches("a")); assert!(engine.matches("b")); } #[test] fn match_seq_alt() { let mut rules = HashMap::new(); rules.insert(0, Rule::Seq(vec![1, 1])); rules.insert(1, alt(vec![2], vec![3])); rules.insert(2, Rule::Char('a')); rules.insert(3, Rule::Char('b')); let engine = RuleEngine::new(rules); assert!(engine.matches("aa")); assert!(engine.matches("ab")); assert!(engine.matches("ba")); assert!(engine.matches("bb")); } fn alt(left: Vec<i64>, right: Vec<i64>) -> Rule { Rule::Alt(left, right) } }
Ok(( parts.next().ok_or_else(|| anyhow!("missing id"))?.parse()?, parts .next() .ok_or_else(|| anyhow!("missing definition"))? .parse()?, ))
call_expression
[ { "content": "pub fn answer_2(input: &str) -> Result<i64, failure::Error> {\n\n let map = parse_input(input);\n\n traverse(&map).map(|r| r.1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn example_1() {\n\n let input = [\n\n \" | \",\n\n \" | +--+ \",\n\n \" A | C \",\n\n \" F---|----E|--+ \",\n\n \" | | | D \",\n\n \" +B-+ +--+ \",\n\n \" \",\n\n ].join(\"\\n\");\n", "file_path": "2017/19/src/lib.rs", "rank": 0, "score": 274189.83512050327 }, { "content": "fn is_in_beam(input: &[i64], p: &Point) -> Result<bool> {\n\n let mut prg = intcode::Interpretor::new(input);\n\n prg.input(p.x);\n\n prg.input(p.y);\n\n\n\n match prg.run()? {\n\n intcode::State::Terminated(_) => Err(anyhow!(\"unexpectedly terminated\")),\n\n intcode::State::AwaitingInput => Err(anyhow!(\"unexectedly awaiting input\")),\n\n intcode::State::Suspended(x) => Ok(x == 1),\n\n }\n\n}\n\n\n", "file_path": "2019/src/day19.rs", "rank": 1, "score": 229699.16408395983 }, { "content": "#[aoc_generator(day17)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day17.rs", "rank": 2, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day23)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day23.rs", "rank": 3, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day19)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day19.rs", "rank": 4, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day15)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day15.rs", "rank": 5, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day22)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day22.rs", "rank": 6, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day7)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day07.rs", "rank": 7, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day18)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day18.rs", "rank": 8, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day11)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day11.rs", "rank": 9, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day24)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day24.rs", "rank": 10, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day14)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day14.rs", "rank": 11, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day2)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day02.rs", "rank": 12, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day20)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day20.rs", "rank": 13, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day5)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day05.rs", "rank": 14, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day25)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day25.rs", "rank": 15, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day9)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day09.rs", "rank": 16, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day21)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day21.rs", "rank": 17, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day13)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()))\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day13.rs", "rank": 18, "score": 228877.17771221814 }, { "content": "#[aoc_generator(day1)]\n\npub fn input_generator(input: &str) -> Vec<i64> {\n\n input.lines().map(|l| l.parse().unwrap()).collect()\n\n}\n\n\n", "file_path": "2019/src/day01.rs", "rank": 19, "score": 228877.17771221814 }, { "content": "pub fn answer_1(input: &str) -> Result<String, failure::Error> {\n\n let map = parse_input(input);\n\n traverse(&map).map(|r| r.0)\n\n}\n\n\n", "file_path": "2017/19/src/lib.rs", "rank": 20, "score": 218872.15796713874 }, { "content": "#[aoc(day7, part1)]\n\nfn answer_1(memory: &[i64]) -> Result<i64> {\n\n let mut phases: Vec<i64> = (0..=4).collect();\n\n let permutations = Heap::new(&mut phases);\n\n\n\n permutations\n\n .map(|ps| get_signal(&ps, memory))\n\n .max()\n\n .ok_or_else(|| anyhow!(\"unable to find maximum signal\"))\n\n}\n\n\n", "file_path": "2019/src/day07.rs", "rank": 21, "score": 193878.69497692893 }, { "content": "#[aoc(day19, part2)]\n\nfn answer_2(input: &[i64]) -> Result<i64> {\n\n find_square(input, 100)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_answer_2() {\n\n let input = input_generator(&std::fs::read_to_string(\"input/2019/day19.txt\").unwrap());\n\n assert_eq!(260_049, find_square(&input, 5).unwrap());\n\n }\n\n}\n", "file_path": "2019/src/day19.rs", "rank": 22, "score": 193878.69497692893 }, { "content": "#[aoc(day9, part2)]\n\nfn answer_2(memory: &[i64]) -> Result<i64> {\n\n let mut i = Interpretor::new(memory);\n\n i.input(2);\n\n i.run_complete().map(|x| x.unwrap())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn examples_1() {\n\n assert_eq!(\n\n 1219070632396864,\n\n answer_1(&input_generator(\"1102,34915192,34915192,7,4,7,99,0\")).unwrap()\n\n );\n\n\n\n assert_eq!(\n\n 1125899906842624,\n\n answer_1(&input_generator(\"104,1125899906842624,99\")).unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n fn examples_2() {}\n\n}\n", "file_path": "2019/src/day09.rs", "rank": 23, "score": 193878.69497692893 }, { "content": "#[aoc(day13, part2)]\n\nfn answer_2(input: &[i64]) -> Result<i64> {\n\n let mut input = input.to_owned();\n\n input[0] = 2;\n\n\n\n let mut game = Game::new(&input);\n\n while !game.is_over() {\n\n game.update()?;\n\n }\n\n\n\n Ok(game.get_score())\n\n}\n", "file_path": "2019/src/day13.rs", "rank": 24, "score": 193878.69497692893 }, { "content": "#[aoc(day2, part1)]\n\nfn answer_1(memory: &[i64]) -> Result<i64> {\n\n run_with_input(memory, 12, 2)\n\n}\n\n\n", "file_path": "2019/src/day02.rs", "rank": 25, "score": 193878.69497692893 }, { "content": "#[aoc(day19, part1)]\n\nfn answer_1(input: &[i64]) -> Result<i64> {\n\n let mut sum = 0;\n\n for y in 0..50 {\n\n for x in 0..50 {\n\n if is_in_beam(input, &Point::new(x, y))? {\n\n sum += 1;\n\n }\n\n }\n\n }\n\n Ok(sum)\n\n}\n\n\n\n//fn gradient(input: &[i64]) -> Result<f64> {\n\n// let mut p = Point::new(0, 25);\n\n// for x in 0..100 {\n\n// p.x = x;\n\n// if is_in_beam(input, &p)? {\n\n// return Ok(p.y as f64 / p.x as f64);\n\n// }\n\n// }\n\n//\n\n// Err(anyhow!(\"beam not found\"))\n\n//}\n\n\n", "file_path": "2019/src/day19.rs", "rank": 26, "score": 193878.69497692893 }, { "content": "#[aoc(day7, part2)]\n\nfn answer_2(memory: &[i64]) -> Result<i64> {\n\n let mut phases: Vec<i64> = (5..=9).collect();\n\n let permutations = Heap::new(&mut phases);\n\n\n\n permutations\n\n .map(|ps| get_chained_signal(&ps, memory).unwrap())\n\n .max()\n\n .ok_or_else(|| anyhow!(\"unable to find maximum signal\"))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn examples_1() {\n\n assert_eq!(\n\n 43210,\n\n answer_1(&input_generator(\n\n \"3,15,3,16,1002,16,10,16,1,16,15,15,4,15,99,0,0\"\n", "file_path": "2019/src/day07.rs", "rank": 27, "score": 193878.69497692893 }, { "content": "#[aoc(day5, part1)]\n\nfn answer_1(memory: &[i64]) -> Result<i64> {\n\n let mut proc = Interpretor::new(&memory);\n\n proc.input(1);\n\n proc.run_complete().map(|x| x.unwrap())\n\n}\n\n\n", "file_path": "2019/src/day05.rs", "rank": 28, "score": 193878.69497692893 }, { "content": "#[aoc(day9, part1)]\n\nfn answer_1(memory: &[i64]) -> Result<i64> {\n\n let mut i = Interpretor::new(memory);\n\n i.input(1);\n\n i.run_complete().map(|x| x.unwrap())\n\n}\n\n\n", "file_path": "2019/src/day09.rs", "rank": 29, "score": 193878.69497692893 }, { "content": "#[aoc(day5, part2)]\n\nfn answer_2(memory: &[i64]) -> Result<i64> {\n\n let mut proc = Interpretor::new(&memory);\n\n proc.input(5);\n\n proc.run_complete().map(|x| x.unwrap())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::intcode::State;\n\n\n\n #[test]\n\n fn examples_2_1() {\n\n let input = input_generator(\"3,9,8,9,10,9,4,9,99,-1,8\");\n\n\n\n let mut proc = Interpretor::new(&input);\n\n proc.input(8);\n\n assert_eq!(State::Suspended(1), proc.run().unwrap());\n\n\n\n let mut proc = Interpretor::new(&input);\n", "file_path": "2019/src/day05.rs", "rank": 30, "score": 193878.69497692893 }, { "content": "pub fn traverse(map: &Vec<Vec<Option<Sym>>>) -> Result<(String, i64), failure::Error> {\n\n let mut steps = 0;\n\n let mut letters = String::new();\n\n let mut dir = Point(0, 1);\n\n let mut p = find_start(map).ok_or(format_err!(\"start missing\"))?;\n\n let mut last_p = Point(p.0, p.1);\n\n let (max_x, max_y) = (map[0].len() as i64, map.len() as i64);\n\n\n\n while 0 <= p.0 && p.0 < max_x && 0 <= p.1 && p.1 < max_y {\n\n match &map[p.1 as usize][p.0 as usize] {\n\n &Some(Sym::Letter(c)) => letters.push(c),\n\n &Some(Sym::Corner) => {\n\n let paths = find_paths(&map, &p, &last_p);\n\n if paths.len() > 1 {\n\n return Err(format_err!(\"too many paths\"))\n\n } else if paths.len() == 0 {\n\n println!(\"No paths\");\n\n break;\n\n }\n\n\n", "file_path": "2017/19/src/lib.rs", "rank": 31, "score": 193350.12079017493 }, { "content": "fn run_with_input(memory: &[i64], a: i64, b: i64) -> Result<i64> {\n\n let mut mem = memory.to_owned();\n\n mem[1] = a;\n\n mem[2] = b;\n\n\n\n let mut prg = Interpretor::new(&mem);\n\n prg.run_complete()?;\n\n Ok(prg.get(0))\n\n}\n\n\n", "file_path": "2019/src/day02.rs", "rank": 32, "score": 189936.23589280416 }, { "content": "fn find_square(input: &[i64], size: i64) -> Result<i64> {\n\n let size = size - 1;\n\n let mut p = Point::new(0, size);\n\n loop {\n\n p.y += 1;\n\n while !is_in_beam(input, &p)? {\n\n p.x += 1;\n\n }\n\n let origin = Point::new(p.x, p.y - size);\n\n let bounds = [\n\n origin,\n\n Point::new(p.x + size, p.y),\n\n Point::new(p.x + size, p.y - size),\n\n ];\n\n\n\n if bounds.iter().all(|p| is_in_beam(input, p).unwrap()) {\n\n return Ok((origin.x * 10_000) + origin.y);\n\n }\n\n }\n\n}\n\n\n", "file_path": "2019/src/day19.rs", "rank": 33, "score": 189123.62155972893 }, { "content": "fn is_valid(passphrase: &str) -> bool {\n\n let mut map: HashMap<Vec<char>, ()> = HashMap::new();\n\n let words = passphrase.split_whitespace();\n\n\n\n for word in words {\n\n let normalized = normalize(word);\n\n\n\n if map.contains_key(&normalized) {\n\n return false\n\n }\n\n\n\n map.insert(normalized, ());\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "2017/04/src/main.rs", "rank": 34, "score": 187518.1255772518 }, { "content": "fn get_chained_signal(phases: &[i64], memory: &[i64]) -> Result<i64> {\n\n let mut amplifiers: Vec<_> = phases\n\n .iter()\n\n .map(|&phase| {\n\n let mut i = Interpretor::new(&memory);\n\n i.input(phase);\n\n i\n\n })\n\n .collect();\n\n\n\n let last_amplifier = amplifiers.len() - 1;\n\n let mut signal = 0;\n\n loop {\n\n for (i, a) in amplifiers.iter_mut().enumerate() {\n\n a.input(signal);\n\n match a.run()? {\n\n State::Suspended(x) => signal = x,\n\n State::Terminated(x) => {\n\n if i == last_amplifier {\n\n return Ok(x.unwrap());\n\n } else {\n\n continue;\n\n }\n\n }\n\n State::AwaitingInput => return Err(anyhow!(\"expected input\")),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2019/src/day07.rs", "rank": 35, "score": 186856.0063428692 }, { "content": "fn run_test(program: &[i64], input: &[i64]) -> Result<Option<i64>> {\n\n let mut cpu = Interpretor::new(program);\n\n for &i in input {\n\n cpu.input(i);\n\n }\n\n\n\n cpu.run_complete()\n\n}\n\n\n\nmacro_rules! test {\n\n ($c:expr, $name:expr, $program:expr, $input:expr) => {\n\n $c.bench_function($name, |b| b.iter(|| run_test(&$program, &$input)));\n\n };\n\n}\n\n\n", "file_path": "2019/benches/intcode.rs", "rank": 36, "score": 184565.764423949 }, { "content": "fn normalize(word: &str) -> Vec<char> {\n\n let mut chars: Vec<char> = word.chars().collect();\n\n chars.sort_unstable();\n\n\n\n chars\n\n}\n\n\n", "file_path": "2017/04/src/main.rs", "rank": 37, "score": 181190.7756303436 }, { "content": "pub fn answer_1(input: &str) -> String {\n\n dance(\"abcdefghijklmnop\", &parse_input(input))\n\n}\n\n\n", "file_path": "2017/16/src/lib.rs", "rank": 38, "score": 179549.20035385407 }, { "content": "pub fn answer_2(input: &str) -> String {\n\n let mut seen: Vec<String> = Vec::new();\n\n let reps = 1000000000;\n\n let mut positions = String::from(\"abcdefghijklmnop\");\n\n for i in 0..reps {\n\n if seen.contains(&positions) {\n\n return seen[reps % i].to_owned();\n\n }\n\n seen.push(positions.clone());\n\n\n\n positions = dance(&positions, &parse_input(input));\n\n }\n\n positions\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "2017/16/src/lib.rs", "rank": 39, "score": 179549.20035385407 }, { "content": "#[aoc(day16, part1)]\n\nfn answer_1(input: &str) -> Result<String> {\n\n let mut signal = FFTIterator::new(input)\n\n .nth(99)\n\n .ok_or_else(|| anyhow!(\"oops\"))?;\n\n signal.truncate(8);\n\n\n\n Ok(signal)\n\n}\n\n\n", "file_path": "2019/src/day16.rs", "rank": 40, "score": 178610.65190990822 }, { "content": "#[aoc(day16, part2)]\n\nfn answer_2(input: &str) -> Result<String> {\n\n let offset = input[..7]\n\n .parse()\n\n .map_err(|e| anyhow!(\"unable to parse offset: {}\", e))?;\n\n\n\n let mut digits: Vec<i64> = input\n\n .chars()\n\n .filter_map(|c| c.to_digit(10))\n\n .cycle()\n\n .take(10_000 * input.len())\n\n .skip(offset)\n\n .map(|x| x as i64)\n\n .collect();\n\n\n\n for _ in 0..100 {\n\n let mut sum = 0;\n\n for i in (0..digits.len()).rev() {\n\n sum += digits[i];\n\n digits[i] = sum.abs() % 10;\n\n }\n", "file_path": "2019/src/day16.rs", "rank": 41, "score": 178610.65190990825 }, { "content": "#[aoc(day18, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n", "file_path": "2019/src/day18.rs", "rank": 42, "score": 177175.86969761562 }, { "content": "#[aoc(day23, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n", "file_path": "2019/src/day23.rs", "rank": 43, "score": 177175.86969761562 }, { "content": "#[aoc(day14, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n", "file_path": "2019/src/day14.rs", "rank": 44, "score": 177175.86969761562 }, { "content": "#[aoc(day18, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "2019/src/day18.rs", "rank": 45, "score": 177175.86969761562 }, { "content": "#[aoc(day14, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "2019/src/day14.rs", "rank": 46, "score": 177175.86969761562 }, { "content": "#[aoc(day17, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n let grid = build_grid(input)?;\n\n\n\n let mut parameters = 0;\n\n for (&p, &tile) in grid.iter() {\n\n if tile == OPEN_SPACE {\n\n continue;\n\n }\n\n\n\n if get_neighbour_scaffolds(&grid, p).len() == 4 {\n\n parameters += p.x * p.y;\n\n }\n\n }\n\n Ok(parameters as usize)\n\n}\n\n\n", "file_path": "2019/src/day17.rs", "rank": 47, "score": 177175.86969761562 }, { "content": "#[aoc(day21, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n", "file_path": "2019/src/day21.rs", "rank": 48, "score": 177175.86969761562 }, { "content": "#[aoc(day22, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n", "file_path": "2019/src/day22.rs", "rank": 49, "score": 177175.86969761562 }, { "content": "#[aoc(day11, part2)]\n\nfn answer_2(input: &[i64]) -> Result<String> {\n\n let hull = paint_hull(input, WHITE)?;\n\n\n\n let mut g = aocutil::Grid::new();\n\n for (p, c) in hull {\n\n g.insert(\n\n p,\n\n match c {\n\n BLACK => '░',\n\n WHITE => '█',\n\n _ => panic!(\"not a color\"),\n\n },\n\n );\n\n }\n\n\n\n Ok(format!(\"\\n{}\", g))\n\n}\n", "file_path": "2019/src/day11.rs", "rank": 50, "score": 177175.86969761562 }, { "content": "#[aoc(day20, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n", "file_path": "2019/src/day20.rs", "rank": 51, "score": 177175.86969761562 }, { "content": "#[aoc(day25, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "2019/src/day25.rs", "rank": 52, "score": 177175.86969761562 }, { "content": "#[aoc(day17, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n let is_continuous = std::env::var(\"DEBUG\").map(|x| x != \"\").unwrap_or(false);\n\n let grid = build_grid(input)?;\n\n\n\n let paths = find_paths(&grid);\n\n for path in paths {\n\n println!(\"{}\", path);\n\n }\n\n\n\n let mut input = input.to_owned();\n\n input[0] = 2;\n\n\n\n let mut prg = intcode::Interpretor::new(&input);\n\n // R,8,L,4,R,4,R,10,R,8,R,8,L,4,R,4,R,10,R,8,L,12,L,12,R,8,R,8,R,10,R,4,R,4,L,12,L,12,R,8,R,8,R,10,R,4,R,4,L,12,L,12,R,8,R,8,R,10,R,4,R,4,R,10,R,4,R,4,R,8,L,4,R,4,R,10,R,8\n\n //\n\n // A: R,8,L,4,R,4,R,10,R,8\n\n // B: L,12,L,12,R,8,R,8\n\n // C: R,10,R,4,R,4\n\n prg.input_str(\"A,A,B,C,B,C,B,C,C,A\\n\");\n\n prg.input_str(\"R,8,L,4,R,4,R,10,R,8\\n\");\n", "file_path": "2019/src/day17.rs", "rank": 53, "score": 177175.86969761562 }, { "content": "#[aoc(day21, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "2019/src/day21.rs", "rank": 54, "score": 177175.86969761562 }, { "content": "#[aoc(day20, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "2019/src/day20.rs", "rank": 55, "score": 177175.86969761562 }, { "content": "#[aoc(day13, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n let mut game = Game::new(input);\n\n while !game.is_over() {\n\n game.update()?;\n\n }\n\n\n\n Ok(game.count_blocks())\n\n}\n\n\n", "file_path": "2019/src/day13.rs", "rank": 56, "score": 177175.86969761562 }, { "content": "#[aoc(day24, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n", "file_path": "2019/src/day24.rs", "rank": 57, "score": 177175.86969761562 }, { "content": "#[aoc(day23, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "2019/src/day23.rs", "rank": 58, "score": 177175.86969761562 }, { "content": "#[aoc(day15, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n let prg = intcode::Interpretor::new(input);\n\n let (map, robot) = build_map(&prg)?;\n\n\n\n let minutes = fill(&map, &robot.unwrap().pos)?;\n\n Ok(minutes)\n\n}\n", "file_path": "2019/src/day15.rs", "rank": 59, "score": 177175.86969761562 }, { "content": "#[aoc(day22, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "2019/src/day22.rs", "rank": 60, "score": 177175.86969761562 }, { "content": "#[aoc(day15, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n let prg = intcode::Interpretor::new(input);\n\n let (_, robot) = build_map(&prg)?;\n\n\n\n Ok(robot.unwrap().steps)\n\n}\n\n\n", "file_path": "2019/src/day15.rs", "rank": 61, "score": 177175.86969761562 }, { "content": "#[aoc(day11, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(paint_hull(input, BLACK)?.len())\n\n}\n\n\n", "file_path": "2019/src/day11.rs", "rank": 62, "score": 177175.86969761562 }, { "content": "#[aoc(day24, part1)]\n\nfn answer_1(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "2019/src/day24.rs", "rank": 63, "score": 177175.86969761562 }, { "content": "#[aoc(day25, part2)]\n\nfn answer_2(input: &[i64]) -> Result<usize> {\n\n Ok(0)\n\n}\n", "file_path": "2019/src/day25.rs", "rank": 64, "score": 177175.86969761562 }, { "content": "#[aoc_generator(day16)]\n\npub fn input_generator(input: &str) -> String {\n\n input.trim().to_owned()\n\n}\n\n\n", "file_path": "2019/src/day16.rs", "rank": 65, "score": 176583.82036800642 }, { "content": "fn paint_hull(input: &[i64], starting_color: i64) -> Result<HashMap<Point, i64>> {\n\n let mut panels: HashMap<Point, i64> = HashMap::new();\n\n let mut painted: HashMap<Point, usize> = HashMap::new();\n\n let mut rbt = Robot {\n\n brain: Interpretor::new(input),\n\n direction: Direction::Up,\n\n position: Point::new(0, 0),\n\n };\n\n\n\n panels.insert(rbt.position, starting_color);\n\n\n\n loop {\n\n let pos = rbt.position;\n\n let color = panels.get(&pos).unwrap_or(&BLACK);\n\n rbt.brain.input(*color);\n\n\n\n match rbt.brain.run()? {\n\n State::Suspended(color) => {\n\n panels.insert(pos, color);\n\n *painted.entry(pos).or_insert(0) += 1;\n", "file_path": "2019/src/day11.rs", "rank": 66, "score": 176210.05493951286 }, { "content": "fn get_orbiting<'a>(orbits: &HashMap<&'a str, &'a str>, mut satellite: &'a str) -> Vec<&'a str> {\n\n let mut orbiting = Vec::new();\n\n\n\n while let Some(&o) = orbits.get(satellite) {\n\n orbiting.push(o);\n\n satellite = o;\n\n }\n\n\n\n orbiting\n\n}\n\n\n", "file_path": "2019/src/day06.rs", "rank": 67, "score": 174990.4737261046 }, { "content": "#[aoc(day12, part1)]\n\nfn answer_1(input: &[Point3D]) -> Result<i64> {\n\n let mut moons = map_moons(input);\n\n\n\n for _ in 0..1_000 {\n\n tick(&mut moons);\n\n }\n\n\n\n Ok(moons.iter().map(|m| m.energy()).sum())\n\n}\n\n\n", "file_path": "2019/src/day12.rs", "rank": 68, "score": 174232.60119246526 }, { "content": "#[aoc_generator(day8)]\n\npub fn input_generator(input: &str) -> Vec<Color> {\n\n input\n\n .lines()\n\n .map(|l| {\n\n l.chars()\n\n .map(|x| format!(\"{}\", x).parse().expect(\"want color\"))\n\n })\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day08.rs", "rank": 69, "score": 170744.00929352932 }, { "content": "#[aoc_generator(day10)]\n\npub fn input_generator(input: &str) -> Vec<Point> {\n\n input\n\n .lines()\n\n .enumerate()\n\n .map(|(y, l)| {\n\n l.chars().enumerate().filter_map(move |(x, p)| {\n\n if p == '#' {\n\n Some(Point::new(x as f64, y as f64))\n\n } else {\n\n None\n\n }\n\n })\n\n })\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day10.rs", "rank": 70, "score": 170744.00929352932 }, { "content": "#[aoc_generator(day4)]\n\npub fn input_generator(input: &str) -> Vec<Password> {\n\n let mut i = input.split('-').map(|x| x.parse().unwrap());\n\n let min = i.next().unwrap();\n\n let max = i.next().unwrap();\n\n\n\n (min..=max).map(Password::new).collect()\n\n}\n\n\n", "file_path": "2019/src/day04.rs", "rank": 71, "score": 170744.00929352932 }, { "content": "fn build_grid(input: &[i64]) -> Result<Grid<Tile>> {\n\n let map = build_map(input)?;\n\n let height = map.len();\n\n let mut grid = Grid::new();\n\n for (y, r) in map.iter().enumerate() {\n\n for (x, &t) in r.iter().enumerate() {\n\n grid.insert(Point::new(x as i64, (height - y) as i64), t);\n\n }\n\n }\n\n\n\n Ok(grid)\n\n}\n\n\n", "file_path": "2019/src/day17.rs", "rank": 72, "score": 168475.29306530283 }, { "content": "#[aoc_generator(day12)]\n\npub fn input_generator(input: &str) -> Vec<Point3D> {\n\n let re = Regex::new(r\"<x=(-?\\d+), y=(-?\\d+), z=(-?\\d+)>\").unwrap();\n\n input\n\n .lines()\n\n .map(|l| {\n\n let c = re.captures(l).unwrap();\n\n Point3D {\n\n x: c.get(1).unwrap().as_str().parse().unwrap(),\n\n y: c.get(2).unwrap().as_str().parse().unwrap(),\n\n z: c.get(3).unwrap().as_str().parse().unwrap(),\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day12.rs", "rank": 73, "score": 168110.13868433848 }, { "content": "#[aoc_generator(day6)]\n\npub fn input_generator(input: &str) -> Vec<Vec<String>> {\n\n input\n\n .lines()\n\n .map(|l| l.split(')').map(|x| x.to_owned()).collect())\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day06.rs", "rank": 74, "score": 165283.78942960416 }, { "content": "#[aoc_generator(day3)]\n\npub fn input_generator(input: &str) -> Vec<Vec<Vector>> {\n\n input\n\n .lines()\n\n .map(|l| l.split(',').map(|x| x.parse().unwrap()).collect())\n\n .collect()\n\n}\n\n\n", "file_path": "2019/src/day03.rs", "rank": 75, "score": 165283.78942960416 }, { "content": "fn build_map(input: &[i64]) -> Result<Vec<Vec<Tile>>> {\n\n let mut prg = intcode::Interpretor::new(input);\n\n let mut map = Vec::new();\n\n let mut current = Vec::new();\n\n\n\n loop {\n\n match prg.run()? {\n\n intcode::State::Suspended(o) => match o {\n\n NL => {\n\n let len = current.len();\n\n map.push(current);\n\n current = Vec::with_capacity(len);\n\n }\n\n _ => {\n\n current.push(Tile::new(o));\n\n }\n\n },\n\n intcode::State::AwaitingInput => return Err(anyhow!(\"unexpectedly awaiting input\")),\n\n intcode::State::Terminated(_) => break,\n\n }\n\n }\n\n\n\n Ok(map)\n\n}\n\n\n", "file_path": "2019/src/day17.rs", "rank": 76, "score": 163091.91799560867 }, { "content": "fn t1(c: &mut Criterion) {\n\n test!(c, \"Sum of primes\", SUM_OF_PRIMES, [100000]);\n\n}\n", "file_path": "2019/benches/intcode.rs", "rank": 77, "score": 157393.08511425165 }, { "content": "fn t5(c: &mut Criterion) {\n\n test!(c, \"Prime factors\", PRIME_FACTOR, [2147483647]);\n\n}\n", "file_path": "2019/benches/intcode.rs", "rank": 78, "score": 157393.08511425165 }, { "content": "fn t4(c: &mut Criterion) {\n\n test!(c, \"DivMod\", DIV_MOD, [1024, 3]);\n\n}\n", "file_path": "2019/benches/intcode.rs", "rank": 79, "score": 157393.08511425165 }, { "content": "fn t6(c: &mut Criterion) {\n\n test!(c, \"Prime factors\", PRIME_FACTOR, [19201644899]);\n\n}\n\n\n\ncriterion_group!(benches, t1, t2, t3, t4, t5, t6);\n\ncriterion_main!(benches);\n", "file_path": "2019/benches/intcode.rs", "rank": 80, "score": 157393.08511425165 }, { "content": "fn t2(c: &mut Criterion) {\n\n test!(c, \"Ackermann\", ACKERMANN, [3, 6]);\n\n}\n", "file_path": "2019/benches/intcode.rs", "rank": 81, "score": 157393.08511425165 }, { "content": "fn t3(c: &mut Criterion) {\n\n test!(c, \"Isqrt\", ISQRT, [130]);\n\n}\n", "file_path": "2019/benches/intcode.rs", "rank": 82, "score": 157393.08511425165 }, { "content": "pub fn decode(input: &[Color], width: usize, height: usize) -> Result<Grid> {\n\n let mut g = Grid::default();\n\n let layers: Vec<&[Color]> = input.chunks(width * height).collect();\n\n\n\n for y in 0..height {\n\n for x in 0..width {\n\n let i = y * width + x;\n\n let p = layers\n\n .iter()\n\n .find_map(|l| {\n\n if l[i] != Color::Transparent {\n\n Some(l[i])\n\n } else {\n\n None\n\n }\n\n })\n\n .unwrap_or(Color::Transparent);\n\n g.insert(Point::new(x as i64, (height - 1 - y) as i64), p);\n\n }\n\n }\n\n\n\n Ok(g)\n\n}\n\n\n", "file_path": "2019/src/day08.rs", "rank": 83, "score": 152701.7791886886 }, { "content": "#[derive(PartialEq, Clone, Copy, Debug)]\n\nstruct Tile(i64);\n\n\n\nimpl Tile {\n\n pub fn new(code: i64) -> Self {\n\n Tile(code)\n\n }\n\n\n\n pub fn is_walkable(&self) -> bool {\n\n match self.as_char() {\n\n '^' | '>' | '<' | 'v' | 'X' | '#' => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn as_char(&self) -> char {\n\n (self.0 as u8) as char\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Tile {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.as_char())\n\n }\n\n}\n\n\n", "file_path": "2019/src/day17.rs", "rank": 84, "score": 144219.98068532514 }, { "content": "fn mass(x: &i64) -> i64 {\n\n (x / 3) - 2\n\n}\n\n\n", "file_path": "2019/src/day01.rs", "rank": 85, "score": 137469.081606969 }, { "content": "#[aoc(day1, part2)]\n\nfn answer_2(input: &[i64]) -> i64 {\n\n input.iter().map(mass_with_fuel).sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn examples_2() {\n\n assert_eq!(2, answer_2(&vec!(14)));\n\n assert_eq!(966, answer_2(&vec!(1969)));\n\n assert_eq!(50346, answer_2(&vec!(100756)));\n\n }\n\n}\n", "file_path": "2019/src/day01.rs", "rank": 86, "score": 135451.04806369802 }, { "content": "#[aoc(day1, part1)]\n\nfn answer_1(input: &[i64]) -> i64 {\n\n input.iter().map(mass).sum()\n\n}\n\n\n", "file_path": "2019/src/day01.rs", "rank": 87, "score": 135451.04806369802 }, { "content": "#[aoc(day2, part2)]\n\nfn answer_2(memory: &[i64]) -> i64 {\n\n (0..99)\n\n .cartesian_product(0..99)\n\n .filter_map(|(x, y)| {\n\n let result = run_with_input(memory, x, y).unwrap();\n\n if result == 19_690_720 {\n\n Some(100 * x + y)\n\n } else {\n\n None\n\n }\n\n })\n\n .last()\n\n .unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn run(memory: &[i64]) -> i64 {\n", "file_path": "2019/src/day02.rs", "rank": 88, "score": 135451.04806369802 }, { "content": "fn mass_with_fuel(x: &i64) -> i64 {\n\n let mut sum = 0;\n\n let mut last_mass = *x;\n\n loop {\n\n let m = mass(&last_mass);\n\n if m <= 0 {\n\n return sum;\n\n }\n\n sum += m;\n\n last_mass = m;\n\n }\n\n}\n\n\n", "file_path": "2019/src/day01.rs", "rank": 89, "score": 135451.04806369802 }, { "content": "fn get_signal(phases: &[i64], memory: &[i64]) -> i64 {\n\n let amplifiers = phases.iter().map(|&phase| {\n\n let mut p = Interpretor::new(&memory);\n\n p.input(phase);\n\n p\n\n });\n\n\n\n amplifiers.fold(0, |signal, mut p| {\n\n p.input(signal);\n\n p.run_complete().map(|x| x.unwrap()).unwrap()\n\n })\n\n}\n\n\n", "file_path": "2019/src/day07.rs", "rank": 90, "score": 134854.0251556201 }, { "content": "fn answer<R: BufRead>(reader: R) -> Answer {\n\n let count = reader.lines().map(|l| l.unwrap()).filter(|s| is_valid(&s)).count();\n\n\n\n Answer {\n\n count: count as i32,\n\n }\n\n}\n\n\n", "file_path": "2017/04/src/main.rs", "rank": 91, "score": 130780.52647023737 }, { "content": "fn parse_input<R: BufRead>(reader: R) -> Graph {\n\n let mut g = Graph::new();\n\n let re = Regex::new(r\"^(\\d+) <-> ((?:(?:\\d+)(?:, )?)+)$\").unwrap();\n\n\n\n for l in reader.lines().map(|l| l.unwrap()) {\n\n let caps = re.captures(&l).unwrap();\n\n let pid = caps.get(1).unwrap().as_str().parse::<PID>().unwrap();\n\n\n\n for p in caps.get(2).map(|m| parse_pids(m.as_str())).unwrap() {\n\n g.add_edge(pid, p);\n\n }\n\n }\n\n\n\n g\n\n}\n\n\n", "file_path": "2017/12/src/main.rs", "rank": 92, "score": 129113.73950537645 }, { "content": "fn parse_input<R: BufRead>(reader: R) -> Vec<i32> {\n\n reader\n\n .bytes()\n\n .map(|x| x.unwrap())\n\n .filter_map(|x| if x >= b'0' {Some((x - b'0') as i32)} else {None})\n\n .collect()\n\n}\n\n\n", "file_path": "2017/01/src/main.rs", "rank": 93, "score": 125906.05257377942 }, { "content": "fn process(input: &str) -> (HashMap<&str, i32>, i32) {\n\n let mut max_value = 0;\n\n let mut registers = HashMap::new();\n\n for instr in input.lines() {\n\n let default = 0;\n\n let mut tokens = instr.split_whitespace();\n\n let register = tokens.next().unwrap();\n\n let cmd = tokens.next().unwrap();\n\n let amount = tokens.next().unwrap().parse::<i32>().unwrap();\n\n tokens.next().unwrap();\n\n let target_register = tokens.next().unwrap();\n\n let cmp = tokens.next().unwrap();\n\n let bounds = tokens.next().unwrap().parse::<i32>().unwrap();\n\n\n\n let &target_value = registers.get(target_register).unwrap_or(&default);\n\n let cond = match cmp {\n\n \">\" => target_value > bounds,\n\n \">=\" => target_value >= bounds,\n\n \"<\" => target_value < bounds,\n\n \"<=\" => target_value <= bounds,\n", "file_path": "2017/08/src/main.rs", "rank": 94, "score": 125128.76736363003 }, { "content": "fn parse_steps<R: BufRead>(reader: R) -> Vec<CubeCoord> {\n\n let mut steps = vec![(0, 0, 0)];\n\n let dirs = reader\n\n .split(b',')\n\n .map(|x| String::from_utf8(x.unwrap()).unwrap().trim().to_owned());\n\n\n\n for dir in dirs {\n\n let &last = steps.last().unwrap();\n\n steps.push(step(last, dir.as_str()));\n\n }\n\n\n\n steps\n\n}\n\n\n", "file_path": "2017/11/src/main.rs", "rank": 95, "score": 124389.55199647475 }, { "content": "fn answer_1(input: &str) -> i32 {\n\n input.lines().fold(0, process)\n\n}\n\n\n", "file_path": "2018/01/main.rs", "rank": 96, "score": 122935.45854337479 }, { "content": "fn answer_2(input: &str) -> i32 {\n\n let mut history = HashSet::new();\n\n let mut cur = 0;\n\n\n\n history.insert(cur);\n\n\n\n loop {\n\n for l in input.lines() {\n\n cur = process(cur, l);\n\n\n\n if history.contains(&cur) {\n\n return cur\n\n }\n\n\n\n history.insert(cur);\n\n }\n\n }\n\n}\n\n\n", "file_path": "2018/01/main.rs", "rank": 97, "score": 122935.45854337479 }, { "content": "fn parse_input<R: BufRead>(reader: R) -> Vec<Vec<i32>> {\n\n reader.lines().map(|l| parse_line(l.unwrap())).collect()\n\n}\n\n\n", "file_path": "2017/02/src/main.rs", "rank": 98, "score": 122862.22516943814 }, { "content": "fn answer_2(input: &str) -> String {\n\n let mut lengths: Vec<usize> = input.as_bytes()\n\n .to_vec()\n\n .iter()\n\n .map(|x| *x as usize)\n\n .collect();\n\n lengths.extend_from_slice(&[17, 31, 73, 47, 23]);\n\n\n\n let mut circle = StringCircle::new();\n\n for _ in 0..64 {\n\n circle.hash(&lengths);\n\n }\n\n\n\n let hash = circle.dense_hash();\n\n hash.iter().map(|x| format!(\"{:02x}\", x)).collect()\n\n}\n\n\n", "file_path": "2017/10/src/main.rs", "rank": 99, "score": 120683.62906105364 } ]
Rust
src/unix.rs
dropbox/rust-subprocess-communicate
f6d5c664d4f4210a60e113f1300230ffa6e8ff4d
#![cfg(unix)] extern crate mio; use std::mem; use mio::*; use std::io; use std::process; use std::cmp; use mio::deprecated::{TryRead, TryWrite}; use mio::deprecated::{PipeReader, PipeWriter}; #[allow(unused_imports)] use std::process::{Command, Stdio, Child}; struct SubprocessClient { stdin: Option<PipeWriter>, stdout: Option<PipeReader>, stderr: Option<PipeReader>, stdin_token : Token, stdout_token : Token, stderr_token : Token, output : Vec<u8>, output_stderr : Vec<u8>, input : Vec<u8>, input_offset : usize, buf : [u8; 65536], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool, has_shutdown : bool, child_shutdown : bool, } impl SubprocessClient { fn new(stdin: Option<PipeWriter>, stdout : Option<PipeReader>, stderr : Option<PipeReader>, data : &[u8], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool) -> SubprocessClient { SubprocessClient { stdin: stdin, stdout: stdout, stderr: stderr, stdin_token : Token(0), stdout_token : Token(1), stderr_token : Token(2), output : Vec::<u8>::new(), output_stderr : Vec::<u8>::new(), buf : [0; 65536], input : data.to_vec(), input_offset : 0, stdout_bound : stdout_bound, stderr_bound : stderr_bound, return_on_stdout_fill : return_on_stdout_fill, has_shutdown : false, child_shutdown : false, } } fn readable(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut eof = false; let mut buf_bound : usize = cmp::min(self.stdout_bound.unwrap_or(self.buf.len()), self.buf.len()); if buf_bound == 0 { buf_bound = self.buf.len(); } match self.stdout { None => unreachable!(), Some (ref mut stdout) => match stdout.try_read(&mut self.buf[..buf_bound]) { Ok(Some(r)) => { if r == 0 { eof = true; } else { let do_extend : bool; match self.stdout_bound { None => do_extend = true, Some(ref mut bound) => { if *bound >= r { *bound = *bound - r; do_extend = true; } else { *bound = 0; do_extend = false; if self.return_on_stdout_fill || self.stderr.is_none() || self.stderr_bound.unwrap_or(1) == 0 { match self.stderr { Some(ref sub_stderr) => match poll.deregister(sub_stderr){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stderr.take()); eof = true; } } }, } if do_extend { self.output.extend(&self.buf[0..r]); } } }, Ok(None) => {}, Err(e) => { return Err(e); } } }; if eof { match self.stdout { Some(ref sub_stdout) => match poll.deregister(sub_stdout) { Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdout.take()); if self.stderr.is_none() { self.has_shutdown = true; self.child_shutdown = true; } } return Ok(()); } fn readable_stderr(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut eof = false; let mut buf_bound : usize = cmp::min(self.stderr_bound.unwrap_or(self.buf.len()), self.buf.len()); if buf_bound == 0 { buf_bound = self.buf.len(); } match self.stderr { None => unreachable!(), Some(ref mut stderr) => match stderr.try_read(&mut self.buf[..buf_bound]) { Ok(None) => { } Ok(Some(r)) => { if r == 0 { eof = true; } else { let do_extend : bool; match self.stderr_bound { None => do_extend = true, Some(ref mut bound) => { if *bound >= r { *bound = *bound - r; do_extend = true; } else { *bound = 0; do_extend = false; if self.stdout.is_none() || self.stdout_bound.unwrap_or(1) == 0 { match self.stdout { Some(ref sub_stdout) => match poll.deregister(sub_stdout){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdout.take()); eof = true; } } }, } if do_extend { self.output_stderr.extend(&self.buf[0..r]); } } } Err(e) => { return Err(e); } } }; if eof { match self.stderr { Some(ref sub_stderr) => match poll.deregister(sub_stderr){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stderr.take()); if self.stdout.is_none() { self.has_shutdown = true; self.child_shutdown = true; } } return Ok(()); } fn writable(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut ok = true; match self.stdin { None => unreachable!(), Some(ref mut stdin) => match stdin.try_write(&(&self.input)[self.input_offset..]) { Ok(None) => { }, Ok(Some(r)) => { if r == 0 { ok = false; } else { self.input_offset += r; } }, Err(_e) => { ok = false; }, } } if self.input_offset == self.input.len() || !ok { match self.stdin { Some(ref sub_stdin) => match poll.deregister(sub_stdin) { Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdin.take()); match self.stderr { None => match self.stdout { None => { self.has_shutdown = true; self.child_shutdown = true }, Some(_) => {}, }, Some(_) => {}, } } return Ok(()); } fn ready(&mut self, poll: &mut Poll, token: Token, _events: Ready) { if token == self.stderr_token { let _x = self.readable_stderr(poll); } else { let _x = self.readable(poll); } if token == self.stdin_token { let _y = self.writable(poll); } } } pub fn from_stdin(mut stdin: Option<process::ChildStdin>) -> io::Result<Option<PipeWriter> > { match stdin { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeWriter::from_stdin(stdin.take().unwrap()).unwrap())) } pub fn from_stdout(mut stdout: Option<process::ChildStdout>) -> io::Result<Option<PipeReader> > { match stdout { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeReader::from_stdout(stdout.take().unwrap()).unwrap())) } pub fn from_stderr(mut stderr: Option<process::ChildStderr>) -> io::Result<Option<PipeReader> > { match stderr { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeReader::from_stderr(stderr.take().unwrap()).unwrap())) } pub fn subprocess_communicate(process : &mut Child, input : &[u8], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool) -> (Vec<u8>, Vec<u8>, io::Result<()>) { let stdin : Option<PipeWriter>; match from_stdin(process.stdin.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stdin = pipe, } let stdout : Option<PipeReader>; match from_stdout(process.stdout.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stdout = pipe, } let stderr : Option<PipeReader>; match from_stderr(process.stderr.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stderr = pipe, } let mut subprocess = SubprocessClient::new(stdin, stdout, stderr, input, stdout_bound, stderr_bound, return_on_stdout_fill); let mut poll = Poll::new().unwrap(); match subprocess.stdout { Some(ref sub_stdout) => match poll.register(sub_stdout, subprocess.stdout_token, Ready::readable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) =>{}, }, None => {}, } match subprocess.stderr { Some(ref sub_stderr) => match poll.register(sub_stderr, subprocess.stderr_token, Ready::readable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) => {}, }, None => {}, } match subprocess.stdin { Some (ref sub_stdin) => match poll.register(sub_stdin, subprocess.stdin_token, Ready::writable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) => {}, }, None => {}, } let mut events = Events::with_capacity(1024); while !subprocess.child_shutdown { poll.poll(&mut events, None).unwrap(); for event in events.iter() { subprocess.ready(&mut poll, event.token(), event.kind()) } } let ret_stdout = mem::replace(&mut subprocess.output, Vec::<u8>::new()); let ret_stderr = mem::replace(&mut subprocess.output_stderr, Vec::<u8>::new()); return (ret_stdout, ret_stderr, Ok(())); } #[allow(dead_code)] const TEST_DATA : [u8; 1024 * 4096] = [42; 1024 * 4096]; #[test] fn test_subprocess_pipe() { let mut process = Command::new("/bin/cat") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], None, None, true); process.wait().unwrap(); err.unwrap(); assert_eq!(TEST_DATA.len(), ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in TEST_DATA.iter() { assert_eq!(*item, ret_stdout[i]); i += 1; } } #[test] fn test_subprocess_bounded_pipe() { let mut process = Command::new("/bin/cat") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(TEST_DATA.len() - 1), None, true); process.wait().unwrap(); err.unwrap(); assert_eq!(TEST_DATA.len() - 1, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in TEST_DATA[0..TEST_DATA.len() - 1].iter() { assert_eq!(*item, ret_stdout[i]); i += 1; } } #[test] fn test_subprocess_bounded_yes_stderr0() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), Some(0), false); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } } #[test] fn test_subprocess_bounded_yes() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), Some(bound), true); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } } #[test] fn test_subprocess_bounded_yes_no_stderr() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), None, false); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } }
#![cfg(unix)] extern crate mio; use std::mem; use mio::*; use std::io; use std::process; use std::cmp; use mio::deprecated::{TryRead, TryWrite}; use mio::deprecated::{PipeReader, PipeWriter}; #[allow(unused_imports)] use std::process::{Command, Stdio, Child}; struct SubprocessClient { stdin: Option<PipeWriter>, stdout: Option<PipeReader>, stderr: Option<PipeReader>, stdin_token : Token, stdout_token : Token, stderr_token : Token, output : Vec<u8>, output_stderr : Vec<u8>, input : Vec<u8>, input_offset : usize, buf : [u8; 65536], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool, has_shutdown : bool, child_shutdown : bool, } impl SubprocessClient { fn new(stdin: Option<PipeWriter>, stdout : Option<PipeReader>, stderr : Option<PipeReader>, data : &[u8], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool) -> SubprocessClient { SubprocessClient { stdin: stdin, stdout: stdout, stderr: stderr, stdin_token : Token(0), stdout_token : Token(1), stderr_token : Token(2), output : Vec::<u8>::new(), output_stderr : Vec::<u8>::new(), buf : [0; 65536], input : data.to_vec(), input_offset : 0, stdout_bound : stdout_bound, stderr_bound : stderr_bound, return_on_stdout_fill : return_on_stdout_fill, has_shutdown : false, child_shutdown : false, } } fn readable(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut eof = false; let mut buf_bound : usize = cmp::min(self.stdout_bound.unwrap_or(self.buf.len()), self.buf.len()); if buf_bound == 0 { buf_bound = self.buf.len(); } match self.stdout { None => unreachable!(), Some (ref mut stdout) => match stdout.try_read(&mut self.buf[..buf_bound]) { Ok(Some(r)) => { if r == 0 { eof = true; } else { let do_extend : bool; match self.stdout_bound { None => do_extend = true, Some(ref mut bound) => { if *bound >= r { *bound = *bound - r; do_extend = true; } else { *bound = 0; do_extend = false; if self.return_on_stdout_fill || self.stderr.is_none() || self.stderr_bound.unwrap_or(1) == 0 { match self.stderr { Some(ref sub_stderr) => match poll.deregister(sub_stderr){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stderr.take()); eof = true; } } }, } if do_extend { self.output.extend(&self.buf[0..r]); } } }, Ok(None) => {}, Err(e) => { return Err(e); } } }; if eof { match self.stdout { Some(ref sub_stdout) => match poll.deregister(sub_stdout) { Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdout.take()); if self.stderr.is_none() { self.has_shutdown = true; self.child_shutdown = true; } } return Ok(()); } fn readable_stderr(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut eof = false; let mut buf_bound : usize = cmp::min(self.stderr_bound.unwrap_or(self.buf.len()), self.buf.len()); if buf_bound == 0 { buf_bound = self.buf.len(); } match self.stderr { None => unreachable!(), Some(ref mut stderr) => match stderr.try_read(&mut self.buf[..buf_bound]) { Ok(None) => { } Ok(Some(r)) => { if r == 0 { eof = true; } else { let do_extend : bool; match self.stderr_bound { None => do_extend = true, Some(ref mut bound) => { if *bound >= r { *bound = *bound - r; do_extend = true; }
fn writable(&mut self, poll: &mut Poll) -> io::Result<()> { if self.has_shutdown { return Ok(()); } let mut ok = true; match self.stdin { None => unreachable!(), Some(ref mut stdin) => match stdin.try_write(&(&self.input)[self.input_offset..]) { Ok(None) => { }, Ok(Some(r)) => { if r == 0 { ok = false; } else { self.input_offset += r; } }, Err(_e) => { ok = false; }, } } if self.input_offset == self.input.len() || !ok { match self.stdin { Some(ref sub_stdin) => match poll.deregister(sub_stdin) { Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdin.take()); match self.stderr { None => match self.stdout { None => { self.has_shutdown = true; self.child_shutdown = true }, Some(_) => {}, }, Some(_) => {}, } } return Ok(()); } fn ready(&mut self, poll: &mut Poll, token: Token, _events: Ready) { if token == self.stderr_token { let _x = self.readable_stderr(poll); } else { let _x = self.readable(poll); } if token == self.stdin_token { let _y = self.writable(poll); } } } pub fn from_stdin(mut stdin: Option<process::ChildStdin>) -> io::Result<Option<PipeWriter> > { match stdin { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeWriter::from_stdin(stdin.take().unwrap()).unwrap())) } pub fn from_stdout(mut stdout: Option<process::ChildStdout>) -> io::Result<Option<PipeReader> > { match stdout { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeReader::from_stdout(stdout.take().unwrap()).unwrap())) } pub fn from_stderr(mut stderr: Option<process::ChildStderr>) -> io::Result<Option<PipeReader> > { match stderr { None => return Ok(None), Some(_) => {}, } Ok(Some(PipeReader::from_stderr(stderr.take().unwrap()).unwrap())) } pub fn subprocess_communicate(process : &mut Child, input : &[u8], stdout_bound : Option<usize>, stderr_bound : Option<usize>, return_on_stdout_fill : bool) -> (Vec<u8>, Vec<u8>, io::Result<()>) { let stdin : Option<PipeWriter>; match from_stdin(process.stdin.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stdin = pipe, } let stdout : Option<PipeReader>; match from_stdout(process.stdout.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stdout = pipe, } let stderr : Option<PipeReader>; match from_stderr(process.stderr.take()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(pipe) => stderr = pipe, } let mut subprocess = SubprocessClient::new(stdin, stdout, stderr, input, stdout_bound, stderr_bound, return_on_stdout_fill); let mut poll = Poll::new().unwrap(); match subprocess.stdout { Some(ref sub_stdout) => match poll.register(sub_stdout, subprocess.stdout_token, Ready::readable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) =>{}, }, None => {}, } match subprocess.stderr { Some(ref sub_stderr) => match poll.register(sub_stderr, subprocess.stderr_token, Ready::readable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) => {}, }, None => {}, } match subprocess.stdin { Some (ref sub_stdin) => match poll.register(sub_stdin, subprocess.stdin_token, Ready::writable(), PollOpt::level()) { Err(e) => return (Vec::<u8>::new(), Vec::<u8>::new(), Err(e)), Ok(_) => {}, }, None => {}, } let mut events = Events::with_capacity(1024); while !subprocess.child_shutdown { poll.poll(&mut events, None).unwrap(); for event in events.iter() { subprocess.ready(&mut poll, event.token(), event.kind()) } } let ret_stdout = mem::replace(&mut subprocess.output, Vec::<u8>::new()); let ret_stderr = mem::replace(&mut subprocess.output_stderr, Vec::<u8>::new()); return (ret_stdout, ret_stderr, Ok(())); } #[allow(dead_code)] const TEST_DATA : [u8; 1024 * 4096] = [42; 1024 * 4096]; #[test] fn test_subprocess_pipe() { let mut process = Command::new("/bin/cat") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], None, None, true); process.wait().unwrap(); err.unwrap(); assert_eq!(TEST_DATA.len(), ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in TEST_DATA.iter() { assert_eq!(*item, ret_stdout[i]); i += 1; } } #[test] fn test_subprocess_bounded_pipe() { let mut process = Command::new("/bin/cat") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(TEST_DATA.len() - 1), None, true); process.wait().unwrap(); err.unwrap(); assert_eq!(TEST_DATA.len() - 1, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in TEST_DATA[0..TEST_DATA.len() - 1].iter() { assert_eq!(*item, ret_stdout[i]); i += 1; } } #[test] fn test_subprocess_bounded_yes_stderr0() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), Some(0), false); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } } #[test] fn test_subprocess_bounded_yes() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), Some(bound), true); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } } #[test] fn test_subprocess_bounded_yes_no_stderr() { let mut process = Command::new("/usr/bin/yes") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn().unwrap(); let bound : usize = 130000; let (ret_stdout, ret_stderr, err) = subprocess_communicate(&mut process, &TEST_DATA[..], Some(bound), None, false); err.unwrap(); assert_eq!(bound, ret_stdout.len()); assert_eq!(0usize, ret_stderr.len()); let mut i : usize = 0; for item in ret_stdout.iter() { let val : u8; if (i & 1) == 1 { val = '\n' as u8; } else { val = 'y' as u8; } assert_eq!(*item, val); i += 1; } }
else { *bound = 0; do_extend = false; if self.stdout.is_none() || self.stdout_bound.unwrap_or(1) == 0 { match self.stdout { Some(ref sub_stdout) => match poll.deregister(sub_stdout){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stdout.take()); eof = true; } } }, } if do_extend { self.output_stderr.extend(&self.buf[0..r]); } } } Err(e) => { return Err(e); } } }; if eof { match self.stderr { Some(ref sub_stderr) => match poll.deregister(sub_stderr){ Err(e) => return Err(e), _ => {}, }, _ => {}, } drop(self.stderr.take()); if self.stdout.is_none() { self.has_shutdown = true; self.child_shutdown = true; } } return Ok(()); }
function_block-function_prefix_line
[ { "content": "#subprocess-communicate\n\n[![crates.io](http://meritbadge.herokuapp.com/subprocess-communicate)](https://crates.io/crates/subprocess-communicate)\n\n[![Build Status](https://travis-ci.org/dropbox/rust-subprocess-communicate.svg?branch=master)](https://travis-ci.org/dropbox/rust-subprocess-communicate)\n\n## Project Requirements\n\nThis crate should give an interface to communicating with child processes\n\nsimilar to python's subprocess.communicate interface from the Popen class.\n\n\n\nPass an input u8 slice and the result should be two Vec<u8> one for stdout and for stderr\n\nAlso an error may be returned in case the subprocess pipes were unable to be changed into nonblock mode\n\nor the event loop was unable to be activated.\n\n\n\n\n\nUnlike the python interface, this also supports two optional arguments to bound the maximum output size of\n\nstdout and stderr respectively.\n\nThis is to prevent a process like /usr/bin/yes from actively consuming all system memory and it\n\nhelps reason about maximum resource consumption\n\n\n\n## Usage\n\n\n\n```\n\n let process =\n\n Command::new(\"/bin/cat\")\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn().unwrap();\n\n let (ret_stdout, ret_stderr, err) = subprocess_communicate::subprocess_communicate(process, // child subprocess\n\n &TEST_DATA[..], // stdin input\n\n Some(TEST_DATA.len()), // stdout bound\n\n None); // stderr bound (if any)\n\n err.unwrap();\n\n assert_eq!(TEST_DATA.len() - 1, ret_stdout.len());\n\n assert_eq!(0usize, ret_stderr.len());\n\n let mut i : usize = 0;\n\n for item in TEST_DATA[0..TEST_DATA.len()].iter() {\n\n assert_eq!(*item, ret_stdout[i]);\n\n i += 1;\n\n }\n\n```\n\n\n", "file_path": "README.md", "rank": 24, "score": 11.611199181048962 }, { "content": "extern crate mio;\n\nmod unix;\n\n#[cfg(unix)]\n\npub use unix::subprocess_communicate;\n", "file_path": "src/lib.rs", "rank": 26, "score": 6.898492921197767 } ]
Rust
liblumen_alloc/src/erts/process/alloc.rs
mlwilkerson/lumen
048df6c0840c11496e2d15aa9af2e4a8d07a6e0f
mod heap; mod iter; mod process_heap_alloc; mod semispace; mod stack_alloc; mod stack_primitives; mod term_alloc; mod virtual_alloc; mod virtual_binary_heap; pub use self::heap::{Heap, HeapAlloc}; pub use self::iter::HeapIter; pub use self::process_heap_alloc::ProcessHeapAlloc; pub use self::semispace::{GenerationalHeap, SemispaceHeap}; pub use self::stack_alloc::StackAlloc; pub use self::stack_primitives::StackPrimitives; pub use self::term_alloc::TermAlloc; pub use self::virtual_alloc::{VirtualAlloc, VirtualAllocator, VirtualHeap}; pub use self::virtual_binary_heap::VirtualBinaryHeap; use core::alloc::{AllocErr, Layout}; use core::ffi::c_void; use core::mem::transmute; use core::ptr; use lazy_static::lazy_static; use liblumen_core::sys::dynamic_call::DynamicCallee; use crate::erts::exception::AllocResult; use crate::erts::term::prelude::Term; use super::Frame; pub const DEFAULT_STACK_SIZE: usize = 1; pub const STACK_ALIGNMENT: usize = 16; lazy_static! { static ref PROC_ALLOC: ProcessHeapAlloc = ProcessHeapAlloc::new(); } pub struct Stack { pub base: *mut u8, pub top: *mut u8, pub size: usize, pub end: *mut u8, } impl Stack { fn new(base: *mut u8, pages: usize) -> Self { use liblumen_core::alloc::utils::align_up_to; use liblumen_core::sys::sysconf; let page_size = sysconf::pagesize(); let size = (pages + 1) * page_size; let bottom = unsafe { base.offset(page_size as isize) }; let with_red_zone = unsafe { bottom.offset(128) }; let end = align_up_to(with_red_zone, STACK_ALIGNMENT); let top = unsafe { base.offset(size as isize) }; Self { base, top, size, end, } } pub unsafe fn push_frame(&mut self, frame: &Frame) { let symbol = frame.native().ptr(); let dynamic_callee = transmute::<*const c_void, DynamicCallee>(symbol); self.push64(dynamic_callee as u64) } pub unsafe fn push64(&mut self, value: u64) { let mut top64 = self.top as *mut u64; top64 = top64.offset(-1); top64.write(value); self.top = top64 as *mut u8; } #[inline] pub fn limit(&self) -> *mut u8 { self.end } #[inline] pub fn is_guard_page<T>(&self, addr: *mut T) -> bool { use liblumen_core::util::pointer::in_area_inclusive; in_area_inclusive(addr, self.base, self.end) } } impl Default for Stack { fn default() -> Self { Self { base: ptr::null_mut(), top: ptr::null_mut(), size: 0, end: ptr::null_mut(), } } } unsafe impl Sync for Stack {} impl Drop for Stack { fn drop(&mut self) { use liblumen_core::alloc::mmap; use liblumen_core::sys::sysconf; if self.base.is_null() { return; } let page_size = sysconf::pagesize(); let pages = (self.size / page_size) - 1; let (layout, _offset) = Layout::from_size_align(page_size, page_size) .unwrap() .repeat(pages) .unwrap(); unsafe { mmap::unmap(self.base, layout); } } } #[inline] pub fn default_heap() -> AllocResult<(*mut Term, usize)> { let size = default_heap_size(); PROC_ALLOC.alloc(size).map(|ptr| (ptr, size)) } pub fn default_heap_size() -> usize { ProcessHeapAlloc::HEAP_SIZES[ProcessHeapAlloc::MIN_HEAP_SIZE_INDEX] } #[inline] pub fn heap(size: usize) -> AllocResult<*mut Term> { PROC_ALLOC.alloc(size) } #[inline] pub fn stack(num_pages: usize) -> AllocResult<Stack> { use liblumen_core::alloc::mmap; debug_assert!(num_pages > 0, "stack size in pages must be greater than 0"); let ptr = unsafe { mmap::map_stack(num_pages)? }; Ok(Stack::new(ptr.as_ptr(), num_pages)) } #[inline] pub unsafe fn realloc( heap: *mut Term, size: usize, new_size: usize, ) -> Result<*mut Term, AllocErr> { PROC_ALLOC.realloc_in_place(heap, size, new_size) } #[inline] pub unsafe fn free(heap: *mut Term, size: usize) { PROC_ALLOC.dealloc(heap, size) } #[inline] pub fn next_heap_size(size: usize) -> usize { ProcessHeapAlloc::next_heap_size(size) }
mod heap; mod iter; mod process_heap_alloc; mod semispace; mod stack_alloc; mod stack_primitives; mod term_alloc; mod virtual_alloc; mod virtual_binary_heap; pub use self::heap::{Heap, HeapAlloc}; pub use self::iter::HeapIter; pub use self::process_heap_alloc::ProcessHeapAlloc; pub use s
b unsafe fn realloc( heap: *mut Term, size: usize, new_size: usize, ) -> Result<*mut Term, AllocErr> { PROC_ALLOC.realloc_in_place(heap, size, new_size) } #[inline] pub unsafe fn free(heap: *mut Term, size: usize) { PROC_ALLOC.dealloc(heap, size) } #[inline] pub fn next_heap_size(size: usize) -> usize { ProcessHeapAlloc::next_heap_size(size) }
elf::semispace::{GenerationalHeap, SemispaceHeap}; pub use self::stack_alloc::StackAlloc; pub use self::stack_primitives::StackPrimitives; pub use self::term_alloc::TermAlloc; pub use self::virtual_alloc::{VirtualAlloc, VirtualAllocator, VirtualHeap}; pub use self::virtual_binary_heap::VirtualBinaryHeap; use core::alloc::{AllocErr, Layout}; use core::ffi::c_void; use core::mem::transmute; use core::ptr; use lazy_static::lazy_static; use liblumen_core::sys::dynamic_call::DynamicCallee; use crate::erts::exception::AllocResult; use crate::erts::term::prelude::Term; use super::Frame; pub const DEFAULT_STACK_SIZE: usize = 1; pub const STACK_ALIGNMENT: usize = 16; lazy_static! { static ref PROC_ALLOC: ProcessHeapAlloc = ProcessHeapAlloc::new(); } pub struct Stack { pub base: *mut u8, pub top: *mut u8, pub size: usize, pub end: *mut u8, } impl Stack { fn new(base: *mut u8, pages: usize) -> Self { use liblumen_core::alloc::utils::align_up_to; use liblumen_core::sys::sysconf; let page_size = sysconf::pagesize(); let size = (pages + 1) * page_size; let bottom = unsafe { base.offset(page_size as isize) }; let with_red_zone = unsafe { bottom.offset(128) }; let end = align_up_to(with_red_zone, STACK_ALIGNMENT); let top = unsafe { base.offset(size as isize) }; Self { base, top, size, end, } } pub unsafe fn push_frame(&mut self, frame: &Frame) { let symbol = frame.native().ptr(); let dynamic_callee = transmute::<*const c_void, DynamicCallee>(symbol); self.push64(dynamic_callee as u64) } pub unsafe fn push64(&mut self, value: u64) { let mut top64 = self.top as *mut u64; top64 = top64.offset(-1); top64.write(value); self.top = top64 as *mut u8; } #[inline] pub fn limit(&self) -> *mut u8 { self.end } #[inline] pub fn is_guard_page<T>(&self, addr: *mut T) -> bool { use liblumen_core::util::pointer::in_area_inclusive; in_area_inclusive(addr, self.base, self.end) } } impl Default for Stack { fn default() -> Self { Self { base: ptr::null_mut(), top: ptr::null_mut(), size: 0, end: ptr::null_mut(), } } } unsafe impl Sync for Stack {} impl Drop for Stack { fn drop(&mut self) { use liblumen_core::alloc::mmap; use liblumen_core::sys::sysconf; if self.base.is_null() { return; } let page_size = sysconf::pagesize(); let pages = (self.size / page_size) - 1; let (layout, _offset) = Layout::from_size_align(page_size, page_size) .unwrap() .repeat(pages) .unwrap(); unsafe { mmap::unmap(self.base, layout); } } } #[inline] pub fn default_heap() -> AllocResult<(*mut Term, usize)> { let size = default_heap_size(); PROC_ALLOC.alloc(size).map(|ptr| (ptr, size)) } pub fn default_heap_size() -> usize { ProcessHeapAlloc::HEAP_SIZES[ProcessHeapAlloc::MIN_HEAP_SIZE_INDEX] } #[inline] pub fn heap(size: usize) -> AllocResult<*mut Term> { PROC_ALLOC.alloc(size) } #[inline] pub fn stack(num_pages: usize) -> AllocResult<Stack> { use liblumen_core::alloc::mmap; debug_assert!(num_pages > 0, "stack size in pages must be greater than 0"); let ptr = unsafe { mmap::map_stack(num_pages)? }; Ok(Stack::new(ptr.as_ptr(), num_pages)) } #[inline] pu
random
[]
Rust
connectorx-python/src/pandas/types.rs
ritchie46/connector-x
89c61beb1c2d782ca07445124caa1ca3db3608df
use chrono::{DateTime, Utc}; use connectorx::errors::{ConnectorAgentError, Result}; use connectorx::impl_typesystem; use fehler::throws; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum PandasTypeSystem { F64(bool), I64(bool), Bool(bool), Char(bool), Str(bool), BoxStr(bool), String(bool), Bytes(bool), DateTime(bool), } impl_typesystem! { system = PandasTypeSystem, mappings = { { F64 => f64 } { I64 => i64 } { Bool => bool } { Char => char } { Str => &'r str } { BoxStr => Box<str> } { String => String } { Bytes => Vec<u8> } { DateTime => DateTime<Utc> } } } pub trait PandasDType: Sized { fn dtype(&self) -> &'static str; fn npdtype(&self) -> &'static str; fn parse(ty: &str) -> Result<Self>; fn is_extension(&self) -> bool; fn block_name(&self) -> &'static str; } impl PandasDType for PandasTypeSystem { fn dtype(&self) -> &'static str { match *self { PandasTypeSystem::I64(false) => "int64", PandasTypeSystem::I64(true) => "Int64", PandasTypeSystem::F64(_) => "float64", PandasTypeSystem::Bool(false) => "bool", PandasTypeSystem::Bool(true) => "boolean", PandasTypeSystem::Char(_) => "object", PandasTypeSystem::Str(_) => "object", PandasTypeSystem::BoxStr(_) => "object", PandasTypeSystem::String(_) => "object", PandasTypeSystem::Bytes(_) => "object", PandasTypeSystem::DateTime(_) => "datetime64[ns]", } } fn npdtype(&self) -> &'static str { match *self { PandasTypeSystem::I64(_) => "i8", PandasTypeSystem::F64(_) => "f8", PandasTypeSystem::Bool(_) => "b1", PandasTypeSystem::Char(_) => "O", PandasTypeSystem::Str(_) => "O", PandasTypeSystem::BoxStr(_) => "O", PandasTypeSystem::String(_) => "O", PandasTypeSystem::Bytes(_) => "O", PandasTypeSystem::DateTime(_) => "M8[ns]", } } #[throws(ConnectorAgentError)] fn parse(ty: &str) -> Self { match ty { "int64" => PandasTypeSystem::I64(false), "Int64" => PandasTypeSystem::I64(true), "float64" => PandasTypeSystem::F64(true), "bool" => PandasTypeSystem::Bool(false), "boolean" => PandasTypeSystem::Bool(true), "object" => PandasTypeSystem::String(true), "datetime" => PandasTypeSystem::DateTime(true), ty => unimplemented!("{}", ty), } } fn is_extension(&self) -> bool { match *self { PandasTypeSystem::I64(false) => false, PandasTypeSystem::I64(true) => true, PandasTypeSystem::F64(_) => false, PandasTypeSystem::Bool(false) => false, PandasTypeSystem::Bool(true) => true, PandasTypeSystem::Char(_) => false, PandasTypeSystem::Str(_) => false, PandasTypeSystem::BoxStr(_) => false, PandasTypeSystem::String(_) => false, PandasTypeSystem::Bytes(_) => false, PandasTypeSystem::DateTime(_) => false, } } fn block_name(&self) -> &'static str { match *self { PandasTypeSystem::I64(false) => "IntBlock", PandasTypeSystem::I64(true) => "ExtensionBlock", PandasTypeSystem::F64(_) => "FloatBlock", PandasTypeSystem::Bool(false) => "BoolBlock", PandasTypeSystem::Bool(true) => "ExtensionBlock", PandasTypeSystem::Char(_) => "ObjectBlock", PandasTypeSystem::Str(_) => "ObjectBlock", PandasTypeSystem::BoxStr(_) => "ObjectBlock", PandasTypeSystem::String(_) => "ObjectBlock", PandasTypeSystem::Bytes(_) => "ObjectBlock", PandasTypeSystem::DateTime(_) => "DatetimeBlock", } } }
use chrono::{DateTime, Utc}; use connectorx::errors::{ConnectorAgentError, Result}; use connectorx::impl_typesystem; use fehler::throws; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum PandasTypeSystem { F64(bool), I64(bool), Bool(bool), Char(bool), Str(bool), BoxStr(bool), String(bool), Bytes(bool), DateTime(bool), } impl_typesystem! { system = PandasTypeSystem, mappings = { { F64 => f64 } { I64 => i64 } { Bool => bool } { Char => char } { Str => &'r str } { BoxStr => Box<str> } { String => String } { Bytes => Vec<u8> } { DateTime => DateTime<Utc> } } } pub trait PandasDType: Sized { fn dtype(&self) -> &'static str; fn npdtype(&self) -> &'static str; fn parse(ty: &str) -> Result<Self>; fn is_extension(&self) -> bool; fn block_name(&self) -> &'static str; } impl PandasDType for PandasTypeSystem { fn dtype(&self) -> &'static str {
} fn npdtype(&self) -> &'static str { match *self { PandasTypeSystem::I64(_) => "i8", PandasTypeSystem::F64(_) => "f8", PandasTypeSystem::Bool(_) => "b1", PandasTypeSystem::Char(_) => "O", PandasTypeSystem::Str(_) => "O", PandasTypeSystem::BoxStr(_) => "O", PandasTypeSystem::String(_) => "O", PandasTypeSystem::Bytes(_) => "O", PandasTypeSystem::DateTime(_) => "M8[ns]", } } #[throws(ConnectorAgentError)] fn parse(ty: &str) -> Self { match ty { "int64" => PandasTypeSystem::I64(false), "Int64" => PandasTypeSystem::I64(true), "float64" => PandasTypeSystem::F64(true), "bool" => PandasTypeSystem::Bool(false), "boolean" => PandasTypeSystem::Bool(true), "object" => PandasTypeSystem::String(true), "datetime" => PandasTypeSystem::DateTime(true), ty => unimplemented!("{}", ty), } } fn is_extension(&self) -> bool { match *self { PandasTypeSystem::I64(false) => false, PandasTypeSystem::I64(true) => true, PandasTypeSystem::F64(_) => false, PandasTypeSystem::Bool(false) => false, PandasTypeSystem::Bool(true) => true, PandasTypeSystem::Char(_) => false, PandasTypeSystem::Str(_) => false, PandasTypeSystem::BoxStr(_) => false, PandasTypeSystem::String(_) => false, PandasTypeSystem::Bytes(_) => false, PandasTypeSystem::DateTime(_) => false, } } fn block_name(&self) -> &'static str { match *self { PandasTypeSystem::I64(false) => "IntBlock", PandasTypeSystem::I64(true) => "ExtensionBlock", PandasTypeSystem::F64(_) => "FloatBlock", PandasTypeSystem::Bool(false) => "BoolBlock", PandasTypeSystem::Bool(true) => "ExtensionBlock", PandasTypeSystem::Char(_) => "ObjectBlock", PandasTypeSystem::Str(_) => "ObjectBlock", PandasTypeSystem::BoxStr(_) => "ObjectBlock", PandasTypeSystem::String(_) => "ObjectBlock", PandasTypeSystem::Bytes(_) => "ObjectBlock", PandasTypeSystem::DateTime(_) => "DatetimeBlock", } } }
match *self { PandasTypeSystem::I64(false) => "int64", PandasTypeSystem::I64(true) => "Int64", PandasTypeSystem::F64(_) => "float64", PandasTypeSystem::Bool(false) => "bool", PandasTypeSystem::Bool(true) => "boolean", PandasTypeSystem::Char(_) => "object", PandasTypeSystem::Str(_) => "object", PandasTypeSystem::BoxStr(_) => "object", PandasTypeSystem::String(_) => "object", PandasTypeSystem::Bytes(_) => "object", PandasTypeSystem::DateTime(_) => "datetime64[ns]", }
if_condition
[ { "content": "/// `TypeSystem` describes all the types a source or destination support\n\n/// using enum variants.\n\n/// The variant can be used to type check with a static type `T` through the `check` method.\n\npub trait TypeSystem: Copy + Clone + Send + Sync {\n\n /// Check whether T is the same type as defined by self.\n\n fn check<T: TypeAssoc<Self>>(self) -> Result<()> {\n\n T::check(self)\n\n }\n\n}\n\n\n", "file_path": "connectorx/src/typesystem.rs", "rank": 0, "score": 208428.48558920866 }, { "content": "#[throws(ConnectorAgentError)]\n\npub fn get_partition_range_query<T: Dialect>(query: &str, col: &str, dialect: &T) -> String {\n\n trace!(\"Incoming query: {}\", query);\n\n const RANGE_TMP_TAB_NAME: &'static str = \"CXTMPTAB_RANGE\";\n\n let mut ast = Parser::parse_sql(dialect, query)?;\n\n if ast.len() != 1 {\n\n throw!(ConnectorAgentError::SQLQueryNotSupported(query.to_string()));\n\n }\n\n\n\n let ast_range: Statement;\n\n\n\n match &mut ast[0] {\n\n Statement::Query(q) => {\n\n q.order_by = vec![];\n\n match &mut q.body {\n\n SetExpr::Select(_select) => {\n\n let projection = vec![\n\n SelectItem::UnnamedExpr(Expr::Function(Function {\n\n name: ObjectName(vec![Ident {\n\n value: \"min\".to_string(),\n\n quote_style: None,\n", "file_path": "connectorx/src/sql.rs", "rank": 1, "score": 173928.97086990305 }, { "content": "#[throws(ConnectorAgentError)]\n\npub fn limit1_query<T: Dialect>(sql: &str, dialect: &T) -> String {\n\n trace!(\"Incoming query: {}\", sql);\n\n\n\n let mut ast = Parser::parse_sql(dialect, sql)?;\n\n if ast.len() != 1 {\n\n throw!(ConnectorAgentError::SQLQueryNotSupported(sql.to_string()));\n\n }\n\n\n\n match &mut ast[0] {\n\n Statement::Query(q) => {\n\n q.limit = Some(Expr::Value(Value::Number(\"1\".to_string(), false)));\n\n }\n\n _ => throw!(ConnectorAgentError::SQLQueryNotSupported(sql.to_string())),\n\n };\n\n\n\n let sql = format!(\"{}\", ast[0]);\n\n debug!(\"Transformed limit 1 query: {}\", sql);\n\n sql\n\n}\n\n\n", "file_path": "connectorx/src/sql.rs", "rank": 2, "score": 172919.60811512737 }, { "content": "#[throws(ConnectorAgentError)]\n\npub fn count_query<T: Dialect>(sql: &str, dialect: &T) -> String {\n\n trace!(\"Incoming query: {}\", sql);\n\n\n\n let mut ast = Parser::parse_sql(dialect, sql)?;\n\n if ast.len() != 1 {\n\n throw!(ConnectorAgentError::SQLQueryNotSupported(sql.to_string()));\n\n }\n\n\n\n let ast_count: Statement;\n\n\n\n match &mut ast[0] {\n\n Statement::Query(q) => {\n\n q.order_by = vec![];\n\n match &mut q.body {\n\n SetExpr::Select(select) => {\n\n select.sort_by = vec![];\n\n let projection = vec![SelectItem::UnnamedExpr(Expr::Function(Function {\n\n name: ObjectName(vec![Ident {\n\n value: \"count\".to_string(),\n\n quote_style: None,\n", "file_path": "connectorx/src/sql.rs", "rank": 3, "score": 172919.60811512737 }, { "content": "#[throws(ConnectorAgentError)]\n\nfn sqlite_get_partition_range(conn: &str, query: &str, col: &str) -> (i64, i64) {\n\n let conn = Connection::open(&conn[9..])?;\n\n // SQLite only optimize min max queries when there is only one aggregation\n\n // https://www.sqlite.org/optoverview.html#minmax\n\n let (min_query, max_query) =\n\n get_partition_range_query_sep(query.clone(), col.clone(), &SQLiteDialect {})?;\n\n let mut error = None;\n\n let min_v = conn.query_row(min_query.as_str(), [], |row| {\n\n // declare type for count query will be None, only need to check the returned value type\n\n let col_type = row.get_ref(0)?.data_type();\n\n match col_type {\n\n Type::Integer => row.get(0),\n\n _ => {\n\n error = Some(anyhow!(\"Partition can only be done on integer columns\"));\n\n Ok(0)\n\n }\n\n }\n\n })?;\n\n match error {\n\n None => {}\n", "file_path": "connectorx/src/source_router.rs", "rank": 4, "score": 171380.70829442074 }, { "content": "#[throws(ConnectorAgentError)]\n\nfn pg_get_partition_range(conn: &str, query: &str, col: &str) -> (i64, i64) {\n\n let mut client = Client::connect(conn, NoTls)?;\n\n let range_query = get_partition_range_query(query.clone(), col.clone(), &PostgreSqlDialect {})?;\n\n let row = client.query_one(range_query.as_str(), &[])?;\n\n\n\n let col_type = PostgresTypeSystem::from(row.columns()[0].type_());\n\n let (min_v, max_v) = match col_type {\n\n PostgresTypeSystem::Int4(_) => {\n\n let min_v: i32 = row.get(0);\n\n let max_v: i32 = row.get(1);\n\n (min_v as i64, max_v as i64)\n\n }\n\n PostgresTypeSystem::Int8(_) => {\n\n let min_v: i64 = row.get(0);\n\n let max_v: i64 = row.get(1);\n\n (min_v, max_v)\n\n }\n\n PostgresTypeSystem::Float4(_) => {\n\n let min_v: f32 = row.get(0);\n\n let max_v: f32 = row.get(1);\n", "file_path": "connectorx/src/source_router.rs", "rank": 5, "score": 171380.70829442074 }, { "content": "#[pyfunction]\n\nfn read_pg(py: Python, conn: &str, sqls: Vec<String>, schema: &str) -> PyResult<PyObject> {\n\n let ret: Result<Vec<(String, Vec<(isize, isize)>)>> = py.allow_threads(|| {\n\n let r = runtime::Runtime::new()?;\n\n let ret = r.block_on(pg::read_pg(conn, &sqls, schema))?;\n\n Ok(ret\n\n .into_iter()\n\n .map(|(k, v)| {\n\n (\n\n k,\n\n v.into_iter()\n\n .map(|(a, b)| (a as isize, b as isize))\n\n .collect(),\n\n )\n\n })\n\n .collect())\n\n });\n\n\n\n let ret: Vec<_> = ret\n\n .map_err(|e| PyValueError::new_err(format!(\"{:?}\", e)))?\n\n .into_iter()\n\n .map(|(k, v)| (k, PyTuple::new(py, v)))\n\n .collect();\n\n PyResult::Ok(ret.into_py_dict(py).to_object(py))\n\n}\n\n\n", "file_path": "connectorx-python/src/lib.rs", "rank": 6, "score": 153833.77190433984 }, { "content": "pub fn check_dtype(ob: &PyAny, expected_dtype: &str) -> PyResult<()> {\n\n let dtype = ob.getattr(\"dtype\")?.str()?;\n\n let dtype = dtype.to_str()?;\n\n if dtype != expected_dtype {\n\n throw!(PyRuntimeError::new_err(format!(\n\n \"expecting ndarray to be '{}' found '{}' at {}:{}\",\n\n expected_dtype,\n\n dtype,\n\n file!(),\n\n line!()\n\n )));\n\n }\n\n Ok(())\n\n}\n", "file_path": "connectorx-python/src/pandas/pandas_columns/mod.rs", "rank": 7, "score": 152141.63662883866 }, { "content": "/// A `Destination` is associated with a `TypeSystem` and a `PartitionDestination`.\n\n/// `PartitionDestination` allows multiple threads write data into the buffer owned by `Destination`.\n\npub trait Destination: Sized {\n\n const DATA_ORDERS: &'static [DataOrder];\n\n type TypeSystem: TypeSystem;\n\n type Partition<'a>: DestinationPartition<'a, TypeSystem = Self::TypeSystem>;\n\n\n\n /// Construct the `Destination`.\n\n /// This allocates the memory based on the types of each columns\n\n /// and the number of rows.\n\n fn allocate<S: AsRef<str>>(\n\n &mut self,\n\n nrow: usize,\n\n names: &[S],\n\n schema: &[Self::TypeSystem],\n\n data_order: DataOrder,\n\n ) -> Result<()>;\n\n\n\n /// Create a bunch of partition destinations, with each write `count` number of rows.\n\n fn partition(&mut self, counts: &[usize]) -> Result<Vec<Self::Partition<'_>>>;\n\n /// Return the schema of the destination.\n\n fn schema(&self) -> &[Self::TypeSystem];\n\n}\n\n\n", "file_path": "connectorx/src/destinations/mod.rs", "rank": 8, "score": 146134.49375858536 }, { "content": "pub fn run(nq: usize, conn: &str) {\n\n let conn = env::var(conn).unwrap();\n\n\n\n Python::with_gil(|py| {\n\n read_sql(\n\n py,\n\n &conn,\n\n \"pandas\",\n\n None,\n\n None,\n\n Some(PartitionQuery::new(QUERY, \"L_ORDERKEY\", None, None, nq)),\n\n )\n\n .unwrap();\n\n });\n\n}\n\n\n", "file_path": "connectorx-python/examples/tpch.rs", "rank": 9, "score": 145790.45484232806 }, { "content": "/// In general, a `DataSource` abstracts the data source as a stream, which can produce\n\n/// a sequence of values of variate types by repetitively calling the function `produce`.\n\npub trait SourcePartition: Sized {\n\n type TypeSystem: TypeSystem;\n\n type Parser<'a>: PartitionParser<'a, TypeSystem = Self::TypeSystem>;\n\n /// Run the query and put the result into Self.\n\n fn prepare(&mut self) -> Result<()>;\n\n\n\n fn parser(&mut self) -> Result<Self::Parser<'_>>;\n\n\n\n /// Number of rows this `DataSource` got.\n\n /// Sometimes it is not possible for the source to know how many rows it gets before reading the whole data.\n\n fn nrows(&self) -> usize;\n\n\n\n /// Number of cols this `DataSource` got.\n\n fn ncols(&self) -> usize;\n\n}\n\n\n", "file_path": "connectorx/src/sources/mod.rs", "rank": 10, "score": 143015.95223334501 }, { "content": "/// Associate a static type to a TypeSystem\n\npub trait TypeAssoc<TS: TypeSystem> {\n\n fn check(ts: TS) -> Result<()>;\n\n}\n\n\n", "file_path": "connectorx/src/typesystem.rs", "rank": 12, "score": 136469.8861897192 }, { "content": "// Indicates a type has an associated pandas column\n\npub trait HasPandasColumn: Sized {\n\n type PandasColumn<'a>: PandasColumn<Self>;\n\n}\n\n\n", "file_path": "connectorx-python/src/pandas/pandas_columns/mod.rs", "rank": 13, "score": 134734.90803957084 }, { "content": "/// A type implemented `Produce<T>` means that it can produce a value `T` by consuming part of it's raw data buffer.\n\npub trait Produce<'r, T> {\n\n fn produce(&'r mut self) -> Result<T>;\n\n}\n", "file_path": "connectorx/src/sources/mod.rs", "rank": 14, "score": 131196.0892794699 }, { "content": "#[throws(ConnectorAgentError)]\n\npub fn get_limit<T: Dialect>(sql: &str, dialect: &T) -> Option<usize> {\n\n let mut ast = Parser::parse_sql(dialect, sql)?;\n\n if ast.len() != 1 {\n\n throw!(ConnectorAgentError::SQLQueryNotSupported(sql.to_string()));\n\n }\n\n\n\n match &mut ast[0] {\n\n Statement::Query(q) => match &q.limit {\n\n Some(expr) => {\n\n return Some(\n\n expr.to_string()\n\n .parse()\n\n .map_err(|e: std::num::ParseIntError| anyhow!(e))?,\n\n )\n\n }\n\n _ => {}\n\n },\n\n _ => throw!(ConnectorAgentError::SQLQueryNotSupported(sql.to_string())),\n\n };\n\n None\n\n}\n\n\n", "file_path": "connectorx/src/sql.rs", "rank": 15, "score": 124446.70603856337 }, { "content": "/// Transport defines how to produce a value, do type conversion and then write\n\n/// the value to a destination.\n\npub trait Transport {\n\n type TSS: TypeSystem;\n\n type TSD: TypeSystem;\n\n type S: Source;\n\n type D: Destination;\n\n\n\n /// convert_typesystem convert the source type system TSS to the destination\n\n /// type system TSD.\n\n fn convert_typesystem(ts: Self::TSS) -> Result<Self::TSD>;\n\n\n\n /// convert_type convert the type T1 associated with the source type system\n\n /// TSS to a type T2 which is associated with the destination type system TSD.\n\n fn convert_type<T1, T2>(val: T1) -> T2\n\n where\n\n Self: TypeConversion<T1, T2>,\n\n {\n\n <Self as TypeConversion<T1, T2>>::convert(val)\n\n }\n\n\n\n /// `process` will ask source to produce a value with type T1, based on TSS, and then do\n", "file_path": "connectorx/src/typesystem.rs", "rank": 16, "score": 121513.65422285268 }, { "content": "/// A ParameterizedFunc refers to a function that is parameterized on a type T,\n\n/// where type T will be dynaically determined by the variant of a TypeSystem.\n\n/// An example is the `transmit<S,W,T>` function. When piping values from a source\n\n/// to the destination, its type `T` is determined by the schema at the runtime.\n\npub trait ParameterizedFunc {\n\n type Function;\n\n fn realize<T>() -> Self::Function\n\n where\n\n Self: ParameterizedOn<T>,\n\n {\n\n Self::parameterize()\n\n }\n\n}\n\n\n", "file_path": "connectorx/src/typesystem.rs", "rank": 17, "score": 119156.39864850273 }, { "content": "pub trait Source {\n\n /// Supported data orders, ordering by preference.\n\n const DATA_ORDERS: &'static [DataOrder];\n\n /// The type system this `Source` associated with.\n\n type TypeSystem: TypeSystem;\n\n // Partition needs to be send to different threads for parallel execution\n\n type Partition: SourcePartition<TypeSystem = Self::TypeSystem> + Send;\n\n\n\n fn set_data_order(&mut self, data_order: DataOrder) -> Result<()>;\n\n\n\n fn set_queries<Q: AsRef<str>>(&mut self, queries: &[Q]);\n\n\n\n fn fetch_metadata(&mut self) -> Result<()>;\n\n\n\n fn names(&self) -> Vec<String>;\n\n\n\n fn schema(&self) -> Vec<Self::TypeSystem>;\n\n\n\n fn partition(self) -> Result<Vec<Self::Partition>>;\n\n}\n\n\n", "file_path": "connectorx/src/sources/mod.rs", "rank": 18, "score": 119153.41259106714 }, { "content": "pub trait PandasColumn<V>: Sized + PandasColumnObject {\n\n fn write(&mut self, val: V) -> Result<()>;\n\n}\n\n\n", "file_path": "connectorx-python/src/pandas/pandas_columns/mod.rs", "rank": 19, "score": 118884.52597603375 }, { "content": "#[test]\n\nfn write_mixed_array() -> Result<()> {\n\n let mut dw = MemoryDestination::new();\n\n dw.allocate(\n\n 11,\n\n &[\"a\", \"b\", \"c\", \"d\", \"e\"],\n\n &[\n\n DummyTypeSystem::I64(false),\n\n DummyTypeSystem::F64(false),\n\n DummyTypeSystem::I64(false),\n\n DummyTypeSystem::String(false),\n\n DummyTypeSystem::F64(false),\n\n DummyTypeSystem::String(false),\n\n ],\n\n DataOrder::RowMajor,\n\n )\n\n .unwrap();\n\n let destinations = dw.partition(&[4, 7])?;\n\n\n\n destinations.into_par_iter().for_each(|mut destination| {\n\n for row in 0..destination.nrows() {\n", "file_path": "connectorx/tests/test_mix.rs", "rank": 20, "score": 115561.50300445674 }, { "content": "/// Realize means that a TypeSystem can realize a parameterized func F, based on its current variants.\n\npub trait Realize<F>\n\nwhere\n\n F: ParameterizedFunc,\n\n{\n\n /// realize a parameterized function with the type that self currently is.\n\n fn realize(self) -> Result<F::Function>;\n\n}\n\n\n", "file_path": "connectorx/src/typesystem.rs", "rank": 21, "score": 114802.64235662148 }, { "content": "/// Associate arrow builder with native type\n\npub trait ArrowAssoc {\n\n type Builder: ArrayBuilder + Send;\n\n\n\n fn builder(nrows: usize) -> Self::Builder;\n\n fn append(builder: &mut Self::Builder, value: Self) -> Result<()>;\n\n fn field(header: &str) -> Field;\n\n}\n\n\n\nimpl ArrowAssoc for i32 {\n\n type Builder = Int32Builder;\n\n\n\n fn builder(nrows: usize) -> Int32Builder {\n\n Int32Builder::new(nrows)\n\n }\n\n\n\n #[throws(ConnectorAgentError)]\n\n fn append(builder: &mut Int32Builder, value: i32) {\n\n builder.append_value(value)?;\n\n }\n\n\n", "file_path": "connectorx/src/destinations/arrow/arrow_assoc.rs", "rank": 22, "score": 112927.60222504236 }, { "content": "/// A type implemented `Consume<T>` means that it can consume a value `T` by adding it to it's own buffer.\n\npub trait Consume<T> {\n\n fn consume(&mut self, value: T) -> Result<()>;\n\n}\n", "file_path": "connectorx/src/destinations/mod.rs", "rank": 23, "score": 112590.3579559192 }, { "content": "pub trait PartitionParser<'a> {\n\n type TypeSystem: TypeSystem;\n\n /// Read a value `T` by calling `Produce<T>::produce`. Usually this function does not need to be\n\n /// implemented.\n\n fn parse<'r, T>(&'r mut self) -> Result<T>\n\n where\n\n T: TypeAssoc<Self::TypeSystem>,\n\n Self: Produce<'r, T>,\n\n {\n\n self.produce()\n\n }\n\n}\n\n\n", "file_path": "connectorx/src/sources/mod.rs", "rank": 24, "score": 112590.3579559192 }, { "content": "#[pyfunction]\n\npub fn read_sql<'a>(\n\n py: Python<'a>,\n\n conn: &str,\n\n return_type: &str,\n\n protocol: Option<&str>,\n\n queries: Option<Vec<String>>,\n\n partition_query: Option<read_sql::PartitionQuery>,\n\n) -> PyResult<&'a PyAny> {\n\n read_sql::read_sql(py, conn, return_type, protocol, queries, partition_query)\n\n}\n", "file_path": "connectorx-python/src/lib.rs", "rank": 25, "score": 111458.61447700256 }, { "content": "pub fn read_sql<'a>(\n\n py: Python<'a>,\n\n conn: &str,\n\n return_type: &str,\n\n protocol: Option<&str>,\n\n queries: Option<Vec<String>>,\n\n partition_query: Option<PartitionQuery>,\n\n) -> PyResult<&'a PyAny> {\n\n let source_conn =\n\n SourceConn::try_from(conn).map_err(ConnectorAgentPythonError::ConnectorAgentError)?;\n\n let queries = match (queries, partition_query) {\n\n (Some(queries), None) => queries,\n\n (\n\n None,\n\n Some(PartitionQuery {\n\n query,\n\n column: col,\n\n min,\n\n max,\n\n num,\n", "file_path": "connectorx-python/src/read_sql.rs", "rank": 26, "score": 109394.14222479887 }, { "content": "#[throws(ConnectorAgentPythonError)]\n\npub fn write_pandas<'a>(\n\n py: Python<'a>,\n\n source_conn: &SourceConn,\n\n queries: &[&str],\n\n protocol: &str,\n\n) -> &'a PyAny {\n\n let mut destination = PandasDestination::new(py);\n\n\n\n // TODO: unlock gil if possible\n\n match source_conn.ty {\n\n SourceType::Postgres => {\n\n debug!(\"Protocol: {}\", protocol);\n\n match protocol {\n\n \"csv\" => {\n\n let sb = PostgresSource::<CSV>::new(&source_conn.conn[..], queries.len())?;\n\n let dispatcher = Dispatcher::<_, _, PostgresPandasTransport<CSV>>::new(\n\n sb,\n\n &mut destination,\n\n queries,\n\n );\n", "file_path": "connectorx-python/src/pandas/mod.rs", "rank": 27, "score": 109394.14222479887 }, { "content": "/// `PartitionDestination` writes values to its own region. `PartitionDestination` is parameterized\n\n/// on lifetime `'a`, which is the lifetime of the parent `Destination`. This indicates\n\n/// the `PartitionDestination` can never live longer than the parent.\n\npub trait DestinationPartition<'a>: Send {\n\n type TypeSystem: TypeSystem;\n\n\n\n /// Write a value of type T to the location (row, col). If T mismatch with the\n\n /// schema, `ConnectorAgentError::TypeCheckFailed` will return.\n\n fn write<T>(&mut self, value: T) -> Result<()>\n\n where\n\n T: TypeAssoc<Self::TypeSystem>,\n\n Self: Consume<T>,\n\n {\n\n self.consume(value)\n\n }\n\n\n\n /// Number of rows this `PartitionDestination` controls.\n\n fn nrows(&self) -> usize;\n\n\n\n /// Number of rows this `PartitionDestination` controls.\n\n fn ncols(&self) -> usize;\n\n\n\n /// Final clean ups\n\n fn finalize(&mut self) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "connectorx/src/destinations/mod.rs", "rank": 28, "score": 106857.36778131005 }, { "content": "pub trait TypeConversion<T, U> {\n\n fn convert(val: T) -> U;\n\n}\n\n\n", "file_path": "connectorx/src/typesystem.rs", "rank": 29, "score": 106857.36778131005 }, { "content": "/// `ParameterizedOn` indicates a parameterized function `Self`\n\n/// is parameterized on type `T`\n\npub trait ParameterizedOn<T>: ParameterizedFunc {\n\n fn parameterize() -> Self::Function;\n\n}\n\n\n", "file_path": "connectorx/src/typesystem.rs", "rank": 30, "score": 106857.36778131005 }, { "content": "pub fn process<'s, 'd, 'r, T1, T2, TP, S, D>(\n\n src: &'r mut <<S as Source>::Partition as SourcePartition>::Parser<'s>,\n\n dst: &'r mut <D as Destination>::Partition<'d>,\n\n) -> crate::errors::Result<()>\n\nwhere\n\n T1: TypeAssoc<<S as Source>::TypeSystem>,\n\n T2: TypeAssoc<<D as Destination>::TypeSystem>,\n\n TP: TypeConversion<T1, T2>,\n\n S: Source,\n\n D: Destination,\n\n <<S as Source>::Partition as SourcePartition>::Parser<'s>: Produce<'r, T1>,\n\n <D as Destination>::Partition<'d>: Consume<T2>,\n\n{\n\n let val: T1 = PartitionParser::parse(src)?;\n\n let val: T2 = <TP as TypeConversion<T1, _>>::convert(val);\n\n DestinationPartition::write(dst, val)?;\n\n Ok(())\n\n}\n", "file_path": "connectorx/src/typesystem.rs", "rank": 31, "score": 103695.78836612118 }, { "content": "pub trait PandasColumnObject: Send {\n\n fn typecheck(&self, _: TypeId) -> bool;\n\n fn typename(&self) -> &'static str;\n\n fn len(&self) -> usize;\n\n fn finalize(&mut self) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "connectorx-python/src/pandas/pandas_columns/mod.rs", "rank": 32, "score": 103381.25426149037 }, { "content": "#[throws(ConnectorAgentError)]\n\npub fn single_col_partition_query<T: Dialect>(\n\n query: &str,\n\n col: &str,\n\n lower: i64,\n\n upper: i64,\n\n dialect: &T,\n\n) -> String {\n\n trace!(\"Incoming query: {}\", query);\n\n const PART_TMP_TAB_NAME: &'static str = \"CXTMPTAB_PART\";\n\n\n\n let mut ast = Parser::parse_sql(dialect, query)?;\n\n if ast.len() != 1 {\n\n throw!(ConnectorAgentError::SQLQueryNotSupported(query.to_string()));\n\n }\n\n\n\n let ast_part: Statement;\n\n\n\n match &mut ast[0] {\n\n Statement::Query(q) => match &mut q.body {\n\n SetExpr::Select(_select) => {\n", "file_path": "connectorx/src/sql.rs", "rank": 33, "score": 102039.28026144272 }, { "content": "#[throws(ConnectorAgentError)]\n\npub fn get_partition_range_query_sep<T: Dialect>(\n\n query: &str,\n\n col: &str,\n\n dialect: &T,\n\n) -> (String, String) {\n\n trace!(\"Incoming query: {}\", query);\n\n const RANGE_TMP_TAB_NAME: &'static str = \"CXTMPTAB_RANGE\";\n\n let mut ast = Parser::parse_sql(dialect, query)?;\n\n if ast.len() != 1 {\n\n throw!(ConnectorAgentError::SQLQueryNotSupported(query.to_string()));\n\n }\n\n\n\n let ast_range_min: Statement;\n\n let ast_range_max: Statement;\n\n\n\n match &mut ast[0] {\n\n Statement::Query(q) => {\n\n q.order_by = vec![];\n\n match &mut q.body {\n\n SetExpr::Select(_select) => {\n", "file_path": "connectorx/src/sql.rs", "rank": 34, "score": 100317.0814225467 }, { "content": "#[throws(ConnectorAgentError)]\n\nfn create_dataframe<'a, S: AsRef<str>>(\n\n py: Python<'a>,\n\n names: &[S],\n\n schema: &[PandasTypeSystem],\n\n nrows: usize,\n\n) -> (&'a PyAny, &'a PyList, &'a PyList) {\n\n let names: Vec<_> = names.into_iter().map(|s| s.as_ref()).collect();\n\n debug!(\"names: {:?}\", names);\n\n debug!(\"schema: {:?}\", schema);\n\n\n\n let mut schema_dict: HashMap<PandasTypeSystem, Vec<usize>> = HashMap::new();\n\n schema.iter().enumerate().for_each(|(idx, &dt)| {\n\n let indices = schema_dict.entry(dt).or_insert(vec![]);\n\n indices.push(idx);\n\n });\n\n debug!(\"schema_dict: {:?}\", schema_dict);\n\n\n\n let mut blocks_code = vec![];\n\n schema_dict\n\n .iter()\n", "file_path": "connectorx-python/src/pandas/destination.rs", "rank": 35, "score": 100125.54119402192 }, { "content": "#[throws(ConnectorAgentError)]\n\npub fn coordinate(src: &[DataOrder], dst: &[DataOrder]) -> DataOrder {\n\n assert!(0 < src.len() && 0 < dst.len());\n\n\n\n match (src, dst) {\n\n ([s, ..], [d, ..]) if s == d => *s,\n\n ([s, ..], [_, d, ..]) if s == d => *s,\n\n ([_, s, ..], [d, ..]) if s == d => *s,\n\n _ => throw!(ConnectorAgentError::CannotResolveDataOrder(\n\n src.to_vec(),\n\n dst.to_vec()\n\n )),\n\n }\n\n}\n", "file_path": "connectorx/src/data_order.rs", "rank": 36, "score": 87691.1011221837 }, { "content": "#[pymodule]\n\nfn connectorx_python(_: Python, m: &PyModule) -> PyResult<()> {\n\n START.call_once(|| {\n\n let _ = env_logger::try_init();\n\n });\n\n\n\n m.add_wrapped(wrap_pyfunction!(read_pg))?;\n\n m.add_wrapped(wrap_pyfunction!(read_sql))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "connectorx-python/src/lib.rs", "rank": 37, "score": 76800.35870414435 }, { "content": "fn main() {\n\n // let src = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n // println!(\"src: {}\", src);\n\n // let dst = std::path::Path::new(&std::env::var(\"OUT_DIR\").unwrap()).join(\"built.rs\");\n\n // let mut opts = built::Options::default();\n\n // println!(\"out: {:?}\", dst);\n\n // opts.set_dependencies(true).set_compiler(true).set_env(true);\n\n\n\n // built::write_built_file_with_opts(&opts, std::path::Path::new(&src), &dst)\n\n // .expect(\"Failed to acquire build-time information\");\n\n}\n", "file_path": "connectorx-python/build.rs", "rank": 38, "score": 63689.651904102626 }, { "content": "fn main() {\n\n run(1, \"POSTGRES_URL\");\n\n}\n", "file_path": "connectorx-python/examples/tpch.rs", "rank": 39, "score": 62431.37185708384 }, { "content": "#[test]\n\n#[should_panic]\n\nfn no_file() {\n\n let mut source = CSVSource::new(&[]);\n\n source.set_queries(&[\"./a_fake_file.csv\"]);\n\n let partitions = source.partition().unwrap();\n\n for mut p in partitions {\n\n p.prepare().expect(\"run query\");\n\n }\n\n}\n\n\n", "file_path": "connectorx/tests/test_csv.rs", "rank": 40, "score": 62431.37185708384 }, { "content": "fn wrap_query(\n\n query: Box<Query>,\n\n projection: Vec<SelectItem>,\n\n selection: Option<Expr>,\n\n tmp_tab_name: String,\n\n) -> Statement {\n\n Statement::Query(Box::new(Query {\n\n with: None,\n\n body: SetExpr::Select(Box::new(Select {\n\n distinct: false,\n\n top: None,\n\n projection: projection,\n\n from: vec![TableWithJoins {\n\n relation: TableFactor::Derived {\n\n lateral: false,\n\n subquery: query,\n\n alias: Some(TableAlias {\n\n name: Ident {\n\n value: tmp_tab_name,\n\n quote_style: None,\n", "file_path": "connectorx/src/sql.rs", "rank": 41, "score": 62431.37185708384 }, { "content": "#[test]\n\nfn test_postgres() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n\n\n let dburl = env::var(\"POSTGRES_URL\").unwrap();\n\n\n\n let queries = [\n\n \"select * from test_table where test_int < 2\",\n\n \"select * from test_table where test_int >= 2\",\n\n ];\n\n let builder = PostgresSource::new(&dburl, 2).unwrap();\n\n let mut destination = MemoryDestination::new();\n\n let dispatcher = Dispatcher::<_, _, PostgresMemoryTransport<Binary>>::new(\n\n builder,\n\n &mut destination,\n\n &queries,\n\n );\n\n\n\n dispatcher.run().expect(\"run dispatcher\");\n\n assert_eq!(\n\n array![Some(1), Some(0), Some(2), Some(3), Some(4), Some(1314)],\n", "file_path": "connectorx/tests/test_postgres.rs", "rank": 42, "score": 61256.616569713144 }, { "content": "#[test]\n\nfn test_csv() {\n\n let schema = [DummyTypeSystem::I64(false); 5];\n\n let files = [\"./tests/data/uint_0.csv\", \"./tests/data/uint_1.csv\"];\n\n let source = CSVSource::new(&schema);\n\n\n\n let mut destination = MemoryDestination::new();\n\n let dispatcher = Dispatcher::<_, _, CSVMemoryTransport>::new(source, &mut destination, &files);\n\n\n\n dispatcher.run().expect(\"run dispatcher\");\n\n\n\n assert_eq!(\n\n array![\n\n [0, 1, 2, 3, 4],\n\n [5, 6, 7, 8, 9],\n\n [10, 11, 12, 13, 14],\n\n [15, 16, 17, 18, 19],\n\n [20, 21, 22, 23, 24],\n\n [25, 26, 27, 28, 29],\n\n [30, 31, 32, 33, 34],\n\n [35, 36, 37, 38, 39],\n\n [40, 41, 42, 43, 44],\n\n [45, 46, 47, 48, 49],\n\n [50, 51, 52, 53, 54],\n\n ],\n\n destination.buffer_view::<i64>(0).unwrap()\n\n );\n\n}\n\n\n", "file_path": "connectorx/tests/test_csv.rs", "rank": 43, "score": 61256.616569713144 }, { "content": "#[test]\n\nfn test_arrow() {\n\n let schema = [\n\n DummyTypeSystem::I64(true),\n\n DummyTypeSystem::F64(true),\n\n DummyTypeSystem::Bool(false),\n\n DummyTypeSystem::String(true),\n\n DummyTypeSystem::F64(false),\n\n ];\n\n let nrows = vec![4, 7];\n\n let ncols = schema.len();\n\n let mut headers = vec![];\n\n for c in 0..ncols {\n\n headers.push(format!(\"c{}\", c));\n\n }\n\n let queries: Vec<String> = nrows.iter().map(|v| format!(\"{},{}\", v, ncols)).collect();\n\n let mut destination = ArrowDestination::new();\n\n let dispatcher = Dispatcher::<_, _, DummyArrowTransport>::new(\n\n DummySource::new(&[\"a\", \"b\", \"c\", \"d\", \"e\"], &schema),\n\n &mut destination,\n\n &queries,\n", "file_path": "connectorx/tests/test_arrow.rs", "rank": 44, "score": 61256.616569713144 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let guard = pprof::ProfilerGuard::new(10).unwrap();\n\n\n\n tpch::run(10, &args[1]);\n\n\n\n if let Ok(report) = guard.report().build() {\n\n let file = File::create(\"flamegraph.svg\").unwrap();\n\n report.flamegraph(file).unwrap();\n\n\n\n let mut file = File::create(\"profile.pb\").unwrap();\n\n let profile = report.pprof().unwrap();\n\n\n\n let mut content = Vec::new();\n\n profile.encode(&mut content).unwrap();\n\n file.write_all(&content).unwrap();\n\n };\n\n}\n", "file_path": "connectorx-python/examples/flame_tpch.rs", "rank": 45, "score": 61256.616569713144 }, { "content": "#[test]\n\nfn load_and_parse() {\n\n #[derive(Debug, PartialEq)]\n\n enum Value {\n\n City(String),\n\n State(String),\n\n Population(i64),\n\n Longitude(f64),\n\n Latitude(f64),\n\n }\n\n\n\n let mut source = CSVSource::new(&[\n\n DummyTypeSystem::String(false),\n\n DummyTypeSystem::String(false),\n\n DummyTypeSystem::I64(false),\n\n DummyTypeSystem::F64(false),\n\n DummyTypeSystem::F64(false),\n\n ]);\n\n source.set_queries(&[\"./tests/data/uspop_0.csv\"]);\n\n\n\n let mut partitions = source.partition().unwrap();\n", "file_path": "connectorx/tests/test_csv.rs", "rank": 46, "score": 61256.616569713144 }, { "content": "#[test]\n\nfn test_mixed() {\n\n let schema = [\n\n DummyTypeSystem::I64(false),\n\n DummyTypeSystem::F64(false),\n\n DummyTypeSystem::String(false),\n\n DummyTypeSystem::F64(false),\n\n DummyTypeSystem::Bool(false),\n\n DummyTypeSystem::String(false),\n\n DummyTypeSystem::F64(false),\n\n ];\n\n let nrows = vec![4, 7];\n\n let ncols = schema.len();\n\n let queries: Vec<String> = nrows.iter().map(|v| format!(\"{},{}\", v, ncols)).collect();\n\n\n\n let mut destination = MemoryDestination::new();\n\n let dispatcher = Dispatcher::<_, _, DummyMemoryTransport>::new(\n\n DummySource::new(&[\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\"], &schema),\n\n &mut destination,\n\n &queries,\n\n );\n", "file_path": "connectorx/tests/test_mix.rs", "rank": 47, "score": 61256.616569713144 }, { "content": "#[test]\n\nfn load_and_parse() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n\n\n let dburl = env::var(\"POSTGRES_URL\").unwrap();\n\n #[derive(Debug, PartialEq)]\n\n struct Row(i32, Option<i32>, Option<String>, Option<f64>, Option<bool>);\n\n\n\n let mut source = PostgresSource::<Binary>::new(&dburl, 1).unwrap();\n\n source.set_queries(&[\"select * from test_table\"]);\n\n source.fetch_metadata().unwrap();\n\n\n\n let mut partitions = source.partition().unwrap();\n\n assert!(partitions.len() == 1);\n\n let mut partition = partitions.remove(0);\n\n partition.prepare().expect(\"run query\");\n\n\n\n assert_eq!(6, partition.nrows());\n\n assert_eq!(5, partition.ncols());\n\n\n\n let mut parser = partition.parser().unwrap();\n", "file_path": "connectorx/tests/test_postgres.rs", "rank": 48, "score": 61256.616569713144 }, { "content": "#[test]\n\n#[should_panic]\n\nfn empty_file() {\n\n let mut source = CSVSource::new(&[]);\n\n source.set_queries(&[\"./tests/data/empty.csv\"]);\n\n let mut partitions = source.partition().unwrap();\n\n for p in &mut partitions {\n\n p.prepare().expect(\"run query\");\n\n }\n\n assert_eq!(0, partitions[0].nrows());\n\n assert_eq!(0, partitions[0].ncols());\n\n let parser = partitions[0].parser();\n\n\n\n let _v: i64 = parser.unwrap().produce().expect(\"produce from emtpy\");\n\n}\n\n\n", "file_path": "connectorx/tests/test_csv.rs", "rank": 49, "score": 61256.616569713144 }, { "content": "#[test]\n\nfn load_and_parse_csv() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n\n\n let dburl = env::var(\"POSTGRES_URL\").unwrap();\n\n #[derive(Debug, PartialEq)]\n\n struct Row(i32, Option<i32>, Option<String>, Option<f64>, Option<bool>);\n\n\n\n let mut source = PostgresSource::<CSV>::new(&dburl, 1).unwrap();\n\n source.set_queries(&[\"select * from test_table\"]);\n\n source.fetch_metadata().unwrap();\n\n\n\n let mut partitions = source.partition().unwrap();\n\n assert!(partitions.len() == 1);\n\n let mut partition = partitions.remove(0);\n\n partition.prepare().expect(\"run query\");\n\n\n\n assert_eq!(6, partition.nrows());\n\n assert_eq!(5, partition.ncols());\n\n\n\n let mut parser = partition.parser().unwrap();\n", "file_path": "connectorx/tests/test_postgres.rs", "rank": 50, "score": 60157.33662914421 }, { "content": "#[test]\n\nfn test_postgres_csv() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n\n\n let dburl = env::var(\"POSTGRES_URL\").unwrap();\n\n\n\n let queries = [\n\n \"select * from test_table where test_int < 2\",\n\n \"select * from test_table where test_int >= 2\",\n\n ];\n\n let builder = PostgresSource::<CSV>::new(&dburl, 2).unwrap();\n\n let mut dst = MemoryDestination::new();\n\n let dispatcher =\n\n Dispatcher::<_, _, PostgresMemoryTransport<CSV>>::new(builder, &mut dst, &queries);\n\n\n\n dispatcher.run().expect(\"run dispatcher\");\n\n assert_eq!(\n\n array![Some(1), Some(0), Some(2), Some(3), Some(4), Some(1314)],\n\n dst.column_view::<Option<i64>>(0).unwrap()\n\n );\n\n assert_eq!(\n", "file_path": "connectorx/tests/test_postgres.rs", "rank": 51, "score": 60157.33662914421 }, { "content": "#[test]\n\nfn test_postgres_agg() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n\n\n let dburl = env::var(\"POSTGRES_URL\").unwrap();\n\n\n\n let queries = [\"SELECT test_bool, SUM(test_float) FROM test_table GROUP BY test_bool\"];\n\n let builder = PostgresSource::new(&dburl, 1).unwrap();\n\n let mut destination = MemoryDestination::new();\n\n let dispatcher = Dispatcher::<_, _, PostgresMemoryTransport<Binary>>::new(\n\n builder,\n\n &mut destination,\n\n &queries,\n\n );\n\n\n\n dispatcher.run().expect(\"run dispatcher\");\n\n assert_eq!(\n\n array![None, Some(false), Some(true)],\n\n destination.column_view::<Option<bool>>(0).unwrap()\n\n );\n\n assert_eq!(\n\n array![Some(10.9), Some(5.2), Some(-10.0)],\n\n destination.column_view::<Option<f64>>(1).unwrap()\n\n );\n\n}\n\n\n", "file_path": "connectorx/tests/test_postgres.rs", "rank": 52, "score": 60157.33662914421 }, { "content": "#[test]\n\nfn test_csv_infer_schema() {\n\n let files = [\"./tests/data/infer_0.csv\"];\n\n let source = CSVSource::new(&[]);\n\n\n\n let mut writer = MemoryDestination::new();\n\n let dispatcher = Dispatcher::<_, _, CSVMemoryTransport>::new(source, &mut writer, &files);\n\n\n\n dispatcher.run().expect(\"run dispatcher\");\n\n\n\n let expected_schema = vec![\n\n DummyTypeSystem::I64(false),\n\n DummyTypeSystem::F64(false),\n\n DummyTypeSystem::Bool(true),\n\n DummyTypeSystem::String(true),\n\n DummyTypeSystem::F64(false),\n\n DummyTypeSystem::String(true),\n\n DummyTypeSystem::String(false),\n\n DummyTypeSystem::String(true),\n\n ];\n\n\n\n assert_eq!(expected_schema, writer.schema());\n\n}\n", "file_path": "connectorx/tests/test_csv.rs", "rank": 53, "score": 59126.48474336072 }, { "content": "#[test]\n\n#[should_panic]\n\nfn mixed_destination_col_major() {\n\n let mut dw = MemoryDestination::new();\n\n let _ = dw\n\n .allocate(\n\n 11,\n\n &[\"a\", \"b\", \"c\"],\n\n &[\n\n DummyTypeSystem::I64(false),\n\n DummyTypeSystem::F64(true),\n\n DummyTypeSystem::String(true),\n\n ],\n\n DataOrder::ColumnMajor,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "connectorx/tests/test_mix.rs", "rank": 54, "score": 59126.48474336072 }, { "content": "#[test]\n\n#[should_panic]\n\nfn mixed_source_col_major() {\n\n let mut source = DummySource::new(&[\"a\"], &[DummyTypeSystem::F64(false)]);\n\n source.set_data_order(DataOrder::ColumnMajor).unwrap();\n\n}\n\n\n", "file_path": "connectorx/tests/test_mix.rs", "rank": 55, "score": 59126.48474336072 }, { "content": "trait AnyArrayObject<D> {\n\n fn view_mut<'a>(&'a mut self) -> Box<dyn ArrayViewMutObject<'a, D> + 'a>;\n\n fn view<'a>(&'a self) -> Box<dyn ArrayViewObject<'a, D> + 'a>;\n\n fn as_any(&self) -> &dyn Any;\n\n fn as_mut_any(&mut self) -> &mut dyn Any;\n\n}\n\n\n\nimpl<A, D> AnyArrayObject<D> for Array<A, D>\n\nwhere\n\n A: 'static + Send,\n\n D: 'static + Dimension,\n\n{\n\n fn view<'a>(&'a self) -> Box<dyn ArrayViewObject<'a, D> + 'a> {\n\n Box::new(Array::<A, D>::view(self))\n\n }\n\n\n\n fn view_mut<'a>(&'a mut self) -> Box<dyn ArrayViewMutObject<'a, D> + 'a> {\n\n Box::new(Array::<A, D>::view_mut(self))\n\n }\n\n\n", "file_path": "connectorx/src/destinations/memory/any_array.rs", "rank": 56, "score": 57781.64188426721 }, { "content": "trait ArrayViewObject<'a, D> {\n\n fn split_at(\n\n self: Box<Self>,\n\n axis: Axis,\n\n index: Ix,\n\n ) -> (\n\n Box<dyn ArrayViewObject<'a, D> + 'a>,\n\n Box<dyn ArrayViewObject<'a, D> + 'a>,\n\n );\n\n}\n\n\n\nimpl<'a, A, D> ArrayViewObject<'a, D> for ArrayView<'a, A, D>\n\nwhere\n\n A: 'static,\n\n D: Dimension + 'static,\n\n{\n\n fn split_at(\n\n self: Box<Self>,\n\n axis: Axis,\n\n index: Ix,\n", "file_path": "connectorx/src/destinations/memory/any_array.rs", "rank": 57, "score": 54604.97612560676 }, { "content": "fn array_to_jsonl(data: &mut [u8]) {\n\n let mut indent = 0;\n\n let n = data.len();\n\n for i in 0..n {\n\n if data[i] == b',' && indent == 0 {\n\n data[i] = b'\\n';\n\n } else if data[i] == b'{' {\n\n indent += 1;\n\n } else if data[i] == b'}' {\n\n indent -= 1;\n\n } else if i < n - 6 && &data[i..i + 6] == b\"[\\\"new\\\"\" {\n\n data[i..i + 6].copy_from_slice(b\"[10001\");\n\n } else if i < n - 9 && &data[i..i + 9] == b\"[\\\"change\\\"\" {\n\n data[i..i + 9].copy_from_slice(b\"[10000002\");\n\n } else if i < n - 9 && &data[i..i + 9] == b\"[\\\"delete\\\"\" {\n\n data[i..i + 9].copy_from_slice(b\"[10000004\");\n\n }\n\n }\n\n}\n", "file_path": "connectorx/src/s3.rs", "rank": 58, "score": 52676.165015056016 }, { "content": "trait ArrayViewMutObject<'a, D>: Send {\n\n fn split_at(\n\n self: Box<Self>,\n\n axis: Axis,\n\n index: Ix,\n\n ) -> (\n\n Box<dyn ArrayViewMutObject<'a, D> + 'a>,\n\n Box<dyn ArrayViewMutObject<'a, D> + 'a>,\n\n );\n\n\n\n fn reborrow<'b>(self: Box<Self>) -> Box<dyn ArrayViewMutObject<'b, D> + 'b>\n\n where\n\n 'a: 'b;\n\n}\n\n\n\nimpl<'a, A, D> ArrayViewMutObject<'a, D> for ArrayViewMut<'a, A, D>\n\nwhere\n\n A: 'static + Send,\n\n D: 'static + Dimension,\n\n{\n", "file_path": "connectorx/src/destinations/memory/any_array.rs", "rank": 59, "score": 50970.14723251999 }, { "content": "CREATE TYPE happiness AS ENUM ('happy', 'very happy', 'ecstatic');\n", "file_path": "scripts/postgres.sql", "rank": 60, "score": 49471.359224994885 }, { "content": "fn create_default_array<T>(nrows: usize, ncols: usize) -> AnyArray<Ix2>\n\nwhere\n\n T: Default + Send + 'static,\n\n{\n\n Array2::<T>::default((nrows, ncols)).into()\n\n}\n\n\n\nimpl ParameterizedOn<DateTime<Utc>> for FArray2 {\n\n fn parameterize() -> Self::Function {\n\n fn imp(nrows: usize, ncols: usize) -> AnyArray<Ix2> {\n\n let t = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc);\n\n Array2::from_elem((nrows, ncols), t).into()\n\n }\n\n imp\n\n }\n\n}\n\n\n\nimpl ParameterizedOn<Option<DateTime<Utc>>> for FArray2 {\n\n fn parameterize() -> Self::Function {\n\n fn imp(nrows: usize, ncols: usize) -> AnyArray<Ix2> {\n\n Array2::<Option<DateTime<Utc>>>::from_elem((nrows, ncols), None).into()\n\n }\n\n imp\n\n }\n\n}\n", "file_path": "connectorx/src/destinations/memory/mod.rs", "rank": 61, "score": 41967.33379210853 }, { "content": "CREATE TABLE IF NOT EXISTS test_str(\n\n id INTEGER NOT NULL,\n\n test_language TEXT,\n\n test_hello TEXT\n\n);\n\n\n\nINSERT INTO test_str VALUES (0, 'English', 'Hello');\n\nINSERT INTO test_str VALUES (1, '中文', '你好');\n\nINSERT INTO test_str VALUES (2, '日本語', 'こんにちは');\n\nINSERT INTO test_str VALUES (3, 'русский', 'Здра́вствуйте');\n\nINSERT INTO test_str VALUES (4, 'Emoji', '😁😂😜');\n\nINSERT INTO test_str VALUES (5, 'Latin1', '¥§¤®ð');\n\nINSERT INTO test_str VALUES (6, 'Extra', 'y̆');\n\nINSERT INTO test_str VALUES (7, 'Mixed', 'Ha好ち😁ðy̆');\n\n\n", "file_path": "scripts/postgres.sql", "rank": 62, "score": 34566.78621672745 }, { "content": "DROP TABLE IF EXISTS test_str;\n", "file_path": "scripts/postgres.sql", "rank": 63, "score": 34566.78621672745 }, { "content": " i: 0,\n\n })\n\n }\n\n ret\n\n }\n\n}\n\n\n\npub struct DateTimeColumn<'a> {\n\n data: &'a mut [i64],\n\n i: usize,\n\n}\n\n\n\nimpl<'a> PandasColumnObject for DateTimeColumn<'a> {\n\n fn typecheck(&self, id: TypeId) -> bool {\n\n id == TypeId::of::<DateTime<Utc>>() || id == TypeId::of::<Option<DateTime<Utc>>>()\n\n }\n\n fn len(&self) -> usize {\n\n self.data.len()\n\n }\n\n fn typename(&self) -> &'static str {\n", "file_path": "connectorx-python/src/pandas/pandas_columns/datetime.rs", "rank": 64, "score": 33548.574516332345 }, { "content": "use super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse chrono::{DateTime, Utc};\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse ndarray::{ArrayViewMut2, Axis, Ix2};\n\nuse numpy::PyArray;\n\nuse pyo3::{FromPyObject, PyAny, PyResult};\n\nuse std::any::TypeId;\n\n\n\n// datetime64 is represented in int64 in numpy\n\n// https://github.com/numpy/numpy/blob/master/numpy/core/include/numpy/npy_common.h#L1104\n\npub struct DateTimeBlock<'a> {\n\n data: ArrayViewMut2<'a, i64>,\n\n}\n\n\n\nimpl<'a> FromPyObject<'a> for DateTimeBlock<'a> {\n\n fn extract(ob: &'a PyAny) -> PyResult<Self> {\n\n check_dtype(ob, \"datetime64[ns]\")?;\n\n let array = ob.downcast::<PyArray<i64, Ix2>>()?;\n", "file_path": "connectorx-python/src/pandas/pandas_columns/datetime.rs", "rank": 65, "score": 33543.75447594746 }, { "content": " std::any::type_name::<DateTime<Utc>>()\n\n }\n\n}\n\n\n\nimpl<'a> PandasColumn<DateTime<Utc>> for DateTimeColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: DateTime<Utc>) {\n\n unsafe { *self.data.get_unchecked_mut(self.i) = val.timestamp_nanos() };\n\n self.i += 1;\n\n }\n\n}\n\n\n\nimpl<'a> PandasColumn<Option<DateTime<Utc>>> for DateTimeColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Option<DateTime<Utc>>) {\n\n // numpy use i64::MIN as NaT\n\n unsafe {\n\n *self.data.get_unchecked_mut(self.i) =\n\n val.map(|t| t.timestamp_nanos()).unwrap_or(i64::MIN);\n\n };\n", "file_path": "connectorx-python/src/pandas/pandas_columns/datetime.rs", "rank": 66, "score": 33543.41096722353 }, { "content": " self.i += 1;\n\n }\n\n}\n\n\n\nimpl HasPandasColumn for DateTime<Utc> {\n\n type PandasColumn<'a> = DateTimeColumn<'a>;\n\n}\n\n\n\nimpl HasPandasColumn for Option<DateTime<Utc>> {\n\n type PandasColumn<'a> = DateTimeColumn<'a>;\n\n}\n\n\n\nimpl<'a> DateTimeColumn<'a> {\n\n pub fn partition(self, counts: &[usize]) -> Vec<DateTimeColumn<'a>> {\n\n let mut partitions = vec![];\n\n let mut data = self.data;\n\n\n\n for &c in counts {\n\n let (splitted_data, rest) = data.split_at_mut(c);\n\n data = rest;\n", "file_path": "connectorx-python/src/pandas/pandas_columns/datetime.rs", "rank": 67, "score": 33538.388236556915 }, { "content": " let data = unsafe { array.as_array_mut() };\n\n Ok(DateTimeBlock { data })\n\n }\n\n}\n\n\n\nimpl<'a> DateTimeBlock<'a> {\n\n #[throws(ConnectorAgentError)]\n\n pub fn split(self) -> Vec<DateTimeColumn<'a>> {\n\n let mut ret = vec![];\n\n let mut view = self.data;\n\n\n\n let nrows = view.ncols();\n\n while view.nrows() > 0 {\n\n let (col, rest) = view.split_at(Axis(0), 1);\n\n view = rest;\n\n ret.push(DateTimeColumn {\n\n data: col\n\n .into_shape(nrows)?\n\n .into_slice()\n\n .ok_or_else(|| anyhow!(\"get None for splitted DateTime data\"))?,\n", "file_path": "connectorx-python/src/pandas/pandas_columns/datetime.rs", "rank": 68, "score": 33533.02891193374 }, { "content": "\n\n partitions.push(DateTimeColumn {\n\n data: splitted_data,\n\n i: 0,\n\n });\n\n }\n\n\n\n partitions\n\n }\n\n}\n", "file_path": "connectorx-python/src/pandas/pandas_columns/datetime.rs", "rank": 69, "score": 33528.5957734899 }, { "content": " mutex: self.mutex.clone(),\n\n })\n\n }\n\n ret\n\n }\n\n}\n\n\n\npub struct BytesColumn<'a> {\n\n data: &'a mut [PyBytes],\n\n next_write: usize,\n\n bytes_buf: Vec<u8>,\n\n bytes_lengths: Vec<usize>,\n\n buf_size: usize,\n\n mutex: Arc<Mutex<()>>,\n\n}\n\n\n\nimpl<'a> PandasColumnObject for BytesColumn<'a> {\n\n fn typecheck(&self, id: TypeId) -> bool {\n\n id == TypeId::of::<&'static [u8]>() || id == TypeId::of::<Option<&'static [u8]>>()\n\n }\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 70, "score": 33513.28834635249 }, { "content": "use super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse ndarray::{ArrayViewMut2, Axis, Ix2};\n\nuse numpy::{npyffi::NPY_TYPES, Element, PyArray, PyArrayDescr};\n\nuse pyo3::{FromPyObject, Py, PyAny, PyResult, Python};\n\nuse std::any::TypeId;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[derive(Clone)]\n\n#[repr(transparent)]\n\npub struct PyBytes(Py<pyo3::types::PyBytes>);\n\n\n\n// In order to put it into a numpy array\n\nimpl Element for PyBytes {\n\n const DATA_TYPE: numpy::DataType = numpy::DataType::Object;\n\n fn is_same_type(dtype: &PyArrayDescr) -> bool {\n\n unsafe { *dtype.as_dtype_ptr() }.type_num == NPY_TYPES::NPY_OBJECT as i32\n\n }\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 71, "score": 33509.039610160056 }, { "content": "}\n\n\n\npub struct BytesBlock<'a> {\n\n data: ArrayViewMut2<'a, PyBytes>,\n\n mutex: Arc<Mutex<()>>,\n\n buf_size_mb: usize,\n\n}\n\n\n\nimpl<'a> FromPyObject<'a> for BytesBlock<'a> {\n\n fn extract(ob: &'a PyAny) -> PyResult<Self> {\n\n check_dtype(ob, \"object\")?;\n\n let array = ob.downcast::<PyArray<PyBytes, Ix2>>()?;\n\n let data = unsafe { array.as_array_mut() };\n\n Ok(BytesBlock {\n\n data,\n\n mutex: Arc::new(Mutex::new(())), // allocate the lock here since only BytesBlock needs to aquire the GIL for now\n\n buf_size_mb: 16, // in MB\n\n })\n\n }\n\n}\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 72, "score": 33506.93572392208 }, { "content": "\n\nimpl<'a> BytesBlock<'a> {\n\n #[throws(ConnectorAgentError)]\n\n pub fn split(self) -> Vec<BytesColumn<'a>> {\n\n let mut ret = vec![];\n\n let mut view = self.data;\n\n\n\n let nrows = view.ncols();\n\n while view.nrows() > 0 {\n\n let (col, rest) = view.split_at(Axis(0), 1);\n\n view = rest;\n\n ret.push(BytesColumn {\n\n data: col\n\n .into_shape(nrows)?\n\n .into_slice()\n\n .ok_or_else(|| anyhow!(\"get None for splitted String data\"))?,\n\n next_write: 0,\n\n bytes_lengths: vec![],\n\n bytes_buf: Vec::with_capacity(self.buf_size_mb * 2 << 20 * 11 / 10), // allocate a little bit more memory to avoid Vec growth\n\n buf_size: self.buf_size_mb * 2 << 20,\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 73, "score": 33506.90731284245 }, { "content": "\n\nimpl HasPandasColumn for Option<Vec<u8>> {\n\n type PandasColumn<'a> = BytesColumn<'a>;\n\n}\n\n\n\nimpl<'a> BytesColumn<'a> {\n\n pub fn partition(self, counts: &[usize]) -> Vec<BytesColumn<'a>> {\n\n let mut partitions = vec![];\n\n let mut data = self.data;\n\n\n\n for &c in counts {\n\n let (splitted_data, rest) = data.split_at_mut(c);\n\n data = rest;\n\n\n\n partitions.push(BytesColumn {\n\n data: splitted_data,\n\n next_write: 0,\n\n bytes_lengths: vec![],\n\n bytes_buf: Vec::with_capacity(self.buf_size),\n\n buf_size: self.buf_size,\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 74, "score": 33506.77132233149 }, { "content": " fn len(&self) -> usize {\n\n self.data.len()\n\n }\n\n fn typename(&self) -> &'static str {\n\n std::any::type_name::<&'static [u8]>()\n\n }\n\n #[throws(ConnectorAgentError)]\n\n fn finalize(&mut self) {\n\n self.flush()?;\n\n }\n\n}\n\n\n\nimpl<'a> PandasColumn<Vec<u8>> for BytesColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Vec<u8>) {\n\n self.bytes_lengths.push(val.len());\n\n self.bytes_buf.extend_from_slice(&val[..]);\n\n self.try_flush()?;\n\n }\n\n}\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 75, "score": 33505.69183642712 }, { "content": " mutex: self.mutex.clone(),\n\n });\n\n }\n\n\n\n partitions\n\n }\n\n\n\n #[throws(ConnectorAgentError)]\n\n pub fn flush(&mut self) {\n\n let nstrings = self.bytes_lengths.len();\n\n\n\n if nstrings > 0 {\n\n let py = unsafe { Python::assume_gil_acquired() };\n\n\n\n {\n\n // allocation in python is not thread safe\n\n let _guard = self\n\n .mutex\n\n .lock()\n\n .map_err(|e| anyhow!(\"mutex poisoned {}\", e))?;\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 76, "score": 33504.38410102806 }, { "content": " let mut start = 0;\n\n for (i, &len) in self.bytes_lengths.iter().enumerate() {\n\n let end = start + len;\n\n if len != 0 {\n\n unsafe {\n\n // allocate and write in the same time\n\n *self.data.get_unchecked_mut(self.next_write + i) = PyBytes(\n\n pyo3::types::PyBytes::new(py, &self.bytes_buf[start..end]).into(),\n\n );\n\n };\n\n }\n\n start = end;\n\n }\n\n }\n\n\n\n self.bytes_buf.truncate(0);\n\n self.next_write += nstrings;\n\n }\n\n }\n\n\n\n #[throws(ConnectorAgentError)]\n\n pub fn try_flush(&mut self) {\n\n if self.bytes_buf.len() >= self.buf_size {\n\n self.flush()?;\n\n }\n\n }\n\n}\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 77, "score": 33502.48311671532 }, { "content": "\n\nimpl<'a> PandasColumn<Option<Vec<u8>>> for BytesColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Option<Vec<u8>>) {\n\n match val {\n\n Some(b) => {\n\n self.bytes_lengths.push(b.len());\n\n self.bytes_buf.extend_from_slice(&b[..]);\n\n self.try_flush()?;\n\n }\n\n None => {\n\n self.bytes_lengths.push(0);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl HasPandasColumn for Vec<u8> {\n\n type PandasColumn<'a> = BytesColumn<'a>;\n\n}\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 78, "score": 33500.83089940828 }, { "content": " string_buf: Vec<u8>,\n\n string_lengths: Vec<usize>,\n\n buf_size: usize,\n\n mutex: Arc<Mutex<()>>,\n\n}\n\n\n\nimpl<'a> PandasColumnObject for StringColumn<'a> {\n\n fn typecheck(&self, id: TypeId) -> bool {\n\n id == TypeId::of::<&'static [u8]>() || id == TypeId::of::<Option<&'static [u8]>>()\n\n }\n\n fn len(&self) -> usize {\n\n self.data.len()\n\n }\n\n fn typename(&self) -> &'static str {\n\n std::any::type_name::<&'static [u8]>()\n\n }\n\n #[throws(ConnectorAgentError)]\n\n fn finalize(&mut self) {\n\n self.flush(true)?;\n\n }\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 79, "score": 33364.53831448185 }, { "content": "use super::super::pystring::{PyString, StringInfo};\n\nuse super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse itertools::Itertools;\n\nuse ndarray::{ArrayViewMut2, Axis, Ix2};\n\nuse numpy::PyArray;\n\nuse pyo3::{FromPyObject, PyAny, PyResult, Python};\n\nuse std::any::TypeId;\n\nuse std::sync::{Arc, Mutex};\n\n\n\npub struct StringBlock<'a> {\n\n data: ArrayViewMut2<'a, PyString>,\n\n mutex: Arc<Mutex<()>>,\n\n buf_size_mb: usize,\n\n}\n\n\n\nimpl<'a> FromPyObject<'a> for StringBlock<'a> {\n\n fn extract(ob: &'a PyAny) -> PyResult<Self> {\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 80, "score": 33363.94367049226 }, { "content": "}\n\n\n\nimpl<'r, 'a> PandasColumn<&'r str> for StringColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: &'r str) {\n\n let bytes = val.as_bytes();\n\n self.string_lengths.push(bytes.len());\n\n self.string_buf.extend_from_slice(bytes);\n\n self.try_flush()?;\n\n }\n\n}\n\n\n\nimpl<'a> PandasColumn<Box<str>> for StringColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Box<str>) {\n\n let bytes = val.as_bytes();\n\n self.string_lengths.push(bytes.len());\n\n self.string_buf.extend_from_slice(bytes);\n\n self.try_flush()?;\n\n }\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 81, "score": 33363.63498286351 }, { "content": "}\n\n\n\nimpl<'a> PandasColumn<String> for StringColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: String) {\n\n let bytes = val.as_bytes();\n\n self.string_lengths.push(bytes.len());\n\n self.string_buf.extend_from_slice(bytes);\n\n self.try_flush()?;\n\n }\n\n}\n\n\n\nimpl<'a> PandasColumn<char> for StringColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: char) {\n\n let mut buffer = [0; 4]; // a char is max to 4 bytes\n\n let bytes = val.encode_utf8(&mut buffer).as_bytes();\n\n self.string_lengths.push(bytes.len());\n\n self.string_buf.extend_from_slice(bytes);\n\n self.try_flush()?;\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 82, "score": 33363.35234043706 }, { "content": " self.string_lengths.push(bytes.len());\n\n self.string_buf.extend_from_slice(bytes);\n\n self.try_flush()?;\n\n }\n\n None => {\n\n self.string_lengths.push(0);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<'r> HasPandasColumn for &'r str {\n\n type PandasColumn<'a> = StringColumn<'a>;\n\n}\n\n\n\nimpl<'r> HasPandasColumn for Option<&'r str> {\n\n type PandasColumn<'a> = StringColumn<'a>;\n\n}\n\n\n\nimpl HasPandasColumn for String {\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 83, "score": 33363.22862921016 }, { "content": " type PandasColumn<'a> = StringColumn<'a>;\n\n}\n\n\n\nimpl HasPandasColumn for Option<String> {\n\n type PandasColumn<'a> = StringColumn<'a>;\n\n}\n\n\n\nimpl HasPandasColumn for char {\n\n type PandasColumn<'a> = StringColumn<'a>;\n\n}\n\n\n\nimpl HasPandasColumn for Option<char> {\n\n type PandasColumn<'a> = StringColumn<'a>;\n\n}\n\n\n\nimpl HasPandasColumn for Box<str> {\n\n type PandasColumn<'a> = StringColumn<'a>;\n\n}\n\n\n\nimpl HasPandasColumn for Option<Box<str>> {\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 84, "score": 33363.015041043545 }, { "content": " Some(b) => {\n\n let bytes = b.as_bytes();\n\n self.string_lengths.push(bytes.len());\n\n self.string_buf.extend_from_slice(bytes);\n\n self.try_flush()?;\n\n }\n\n None => {\n\n self.string_lengths.push(0);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> PandasColumn<Option<char>> for StringColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Option<char>) {\n\n match val {\n\n Some(b) => {\n\n let mut buffer = [0; 4]; // a char is max to 4 bytes\n\n let bytes = b.encode_utf8(&mut buffer).as_bytes();\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 85, "score": 33362.734647622165 }, { "content": " }\n\n}\n\n\n\nimpl<'r, 'a> PandasColumn<Option<&'r str>> for StringColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Option<&'r str>) {\n\n match val {\n\n Some(b) => {\n\n let bytes = b.as_bytes();\n\n self.string_lengths.push(bytes.len());\n\n self.string_buf.extend_from_slice(bytes);\n\n self.try_flush()?;\n\n }\n\n None => {\n\n self.string_lengths.push(0);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 86, "score": 33362.51701418248 }, { "content": " type PandasColumn<'a> = StringColumn<'a>;\n\n}\n\n\n\nimpl<'a> StringColumn<'a> {\n\n pub fn partition(self, counts: &[usize]) -> Vec<StringColumn<'a>> {\n\n let mut partitions = vec![];\n\n let mut data = self.data;\n\n\n\n for &c in counts {\n\n let (splitted_data, rest) = data.split_at_mut(c);\n\n data = rest;\n\n\n\n partitions.push(StringColumn {\n\n data: splitted_data,\n\n next_write: 0,\n\n string_lengths: vec![],\n\n string_buf: Vec::with_capacity(self.buf_size),\n\n buf_size: self.buf_size,\n\n mutex: self.mutex.clone(),\n\n });\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 87, "score": 33362.42989782675 }, { "content": "impl<'a> PandasColumn<Option<Box<str>>> for StringColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Option<Box<str>>) {\n\n match val {\n\n Some(b) => {\n\n let bytes = b.as_bytes();\n\n self.string_lengths.push(bytes.len());\n\n self.string_buf.extend_from_slice(bytes);\n\n self.try_flush()?;\n\n }\n\n None => {\n\n self.string_lengths.push(0);\n\n }\n\n }\n\n }\n\n}\n\nimpl<'a> PandasColumn<Option<String>> for StringColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Option<String>) {\n\n match val {\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 88, "score": 33362.425949502336 }, { "content": " view = rest;\n\n ret.push(StringColumn {\n\n data: col\n\n .into_shape(nrows)?\n\n .into_slice()\n\n .ok_or_else(|| anyhow!(\"get None for splitted String data\"))?,\n\n next_write: 0,\n\n string_lengths: vec![],\n\n string_buf: Vec::with_capacity(self.buf_size_mb * 2 << 20 * 11 / 10), // allocate a little bit more memory to avoid Vec growth\n\n buf_size: self.buf_size_mb * 2 << 20,\n\n mutex: self.mutex.clone(),\n\n })\n\n }\n\n ret\n\n }\n\n}\n\n\n\npub struct StringColumn<'a> {\n\n data: &'a mut [PyString],\n\n next_write: usize,\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 89, "score": 33359.94141957308 }, { "content": " }\n\n\n\n partitions\n\n }\n\n\n\n #[throws(ConnectorAgentError)]\n\n pub fn flush(&mut self, force_flush: bool) {\n\n let nstrings = self.string_lengths.len();\n\n if nstrings == 0 {\n\n return;\n\n }\n\n\n\n let py = unsafe { Python::assume_gil_acquired() };\n\n let string_infos = match force_flush {\n\n true => {\n\n // allocation in python is not thread safe\n\n let _guard = self\n\n .mutex\n\n .lock()\n\n .map_err(|e| anyhow!(\"mutex poisoned {}\", e))?;\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 90, "score": 33358.62250826928 }, { "content": " start = end;\n\n }\n\n\n\n self.string_buf.truncate(0);\n\n self.next_write += nstrings;\n\n }\n\n }\n\n\n\n #[throws(ConnectorAgentError)]\n\n pub fn try_flush(&mut self) {\n\n if self.string_buf.len() >= self.buf_size {\n\n self.flush(true)?;\n\n return;\n\n }\n\n #[cfg(feature = \"nbstr\")]\n\n if self.string_buf.len() >= self.buf_size / 2 {\n\n self.flush(false)?;\n\n }\n\n }\n\n}\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 91, "score": 33358.059051697 }, { "content": " check_dtype(ob, \"object\")?;\n\n let array = ob.downcast::<PyArray<PyString, Ix2>>()?;\n\n let data = unsafe { array.as_array_mut() };\n\n Ok(StringBlock {\n\n data,\n\n mutex: Arc::new(Mutex::new(())), // allocate the lock here since only StringBlock needs to aquire the GIL for now\n\n buf_size_mb: 4, // in MB\n\n })\n\n }\n\n}\n\n\n\nimpl<'a> StringBlock<'a> {\n\n #[throws(ConnectorAgentError)]\n\n pub fn split(self) -> Vec<StringColumn<'a>> {\n\n let mut ret = vec![];\n\n let mut view = self.data;\n\n\n\n let nrows = view.ncols();\n\n while view.nrows() > 0 {\n\n let (col, rest) = view.split_at(Axis(0), 1);\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 92, "score": 33357.471570897986 }, { "content": " Err(_e) => {\n\n vec![]\n\n }\n\n },\n\n };\n\n\n\n if string_infos.len() > 0 {\n\n let mut start = 0;\n\n for (i, (len, info)) in self\n\n .string_lengths\n\n .drain(..)\n\n .zip_eq(string_infos)\n\n .enumerate()\n\n {\n\n let end = start + len;\n\n if len != 0 {\n\n unsafe {\n\n self.data[self.next_write + i].write(&self.string_buf[start..end], info)\n\n };\n\n }\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 93, "score": 33354.41122416441 }, { "content": " Ok(_guard) => {\n\n let mut string_infos = Vec::with_capacity(self.string_lengths.len());\n\n let mut start = 0;\n\n for (i, &len) in self.string_lengths.iter().enumerate() {\n\n let end = start + len;\n\n unsafe {\n\n string_infos.push(StringInfo::detect(&self.string_buf[start..end]));\n\n }\n\n if len != 0 {\n\n unsafe {\n\n *self.data.get_unchecked_mut(self.next_write + i) = string_infos\n\n .last()\n\n .ok_or_else(|| anyhow!(\"empty string_info vector\"))?\n\n .pystring(py)\n\n };\n\n }\n\n start = end;\n\n }\n\n string_infos\n\n }\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 94, "score": 33351.55969385983 }, { "content": " let mut string_infos = Vec::with_capacity(self.string_lengths.len());\n\n let mut start = 0;\n\n for (i, &len) in self.string_lengths.iter().enumerate() {\n\n let end = start + len;\n\n unsafe {\n\n string_infos.push(StringInfo::detect(&self.string_buf[start..end]));\n\n }\n\n if len != 0 {\n\n unsafe {\n\n *self.data.get_unchecked_mut(self.next_write + i) = string_infos\n\n .last()\n\n .ok_or_else(|| anyhow!(\"empty string_info vector\"))?\n\n .pystring(py)\n\n };\n\n }\n\n start = end;\n\n }\n\n string_infos\n\n }\n\n false => match self.mutex.try_lock() {\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 95, "score": 33351.5409211826 }, { "content": "def test_empty_result(sqlite_db: str) -> None:\n\n query = \"SELECT * FROM test_table where test_int < -100\"\n\n df = read_sql(sqlite_db, query)\n\n expected = pd.DataFrame(\n\n data={\n\n \"test_int\": pd.Series([], dtype=\"object\"),\n\n \"test_nullint\": pd.Series([], dtype=\"object\"),\n\n \"test_str\": pd.Series([], dtype=\"object\"),\n\n \"test_float\": pd.Series([], dtype=\"object\"),\n\n \"test_bool\": pd.Series([], dtype=\"object\"),\n\n \"test_date\": pd.Series([], dtype=\"object\"),\n\n \"test_time\": pd.Series([], dtype=\"object\"),\n\n \"test_datetime\": pd.Series([], dtype=\"object\"),\n\n }\n\n )\n", "file_path": "connectorx-python/connectorx/tests/test_sqlite.py", "rank": 96, "score": 28867.826473508263 }, { "content": "def test_empty_result_on_some_partition(sqlite_db: str) -> None:\n\n query = \"SELECT * FROM test_table where test_int < 1\"\n\n df = read_sql(sqlite_db, query, partition_on=\"test_int\", partition_num=3)\n\n expected = pd.DataFrame(\n\n data={\n\n \"test_int\": pd.Series([0], dtype=\"Int64\"),\n\n \"test_nullint\": pd.Series([5], dtype=\"Int64\"),\n\n \"test_str\": pd.Series([\"こんにちは\"], dtype=\"object\"),\n\n \"test_float\": pd.Series([3.1], dtype=\"float\"),\n\n \"test_bool\": pd.Series([None], dtype=\"boolean\"),\n\n \"test_date\": pd.Series(\n\n [\n\n np.datetime64(\"1996-02-28\"),\n\n ], dtype=\"datetime64[ns]\"\n\n ),\n\n \"test_time\": pd.Series(\n\n [\"23:00:10\"], dtype=\"object\"\n\n ),\n\n \"test_datetime\": pd.Series(\n\n [\n\n None,\n\n ], dtype=\"datetime64[ns]\"\n\n ),\n\n\n\n }\n\n )\n", "file_path": "connectorx-python/connectorx/tests/test_sqlite.py", "rank": 97, "score": 28090.3363759349 }, { "content": "def test_empty_result(postgres_url: str) -> None:\n\n query = \"SELECT * FROM test_table where test_int < -100\"\n\n df = read_sql(postgres_url, query)\n\n expected = pd.DataFrame(\n\n data={\n\n \"test_int\": pd.Series([], dtype=\"object\"),\n\n \"test_nullint\": pd.Series([], dtype=\"object\"),\n\n \"test_str\": pd.Series([], dtype=\"object\"),\n\n \"test_float\": pd.Series([], dtype=\"object\"),\n\n \"test_bool\": pd.Series([], dtype=\"object\"),\n\n }\n\n )\n", "file_path": "connectorx-python/connectorx/tests/test_read_sql.py", "rank": 98, "score": 28090.3363759349 }, { "content": "def test_empty_result_on_some_partition(postgres_url: str) -> None:\n\n query = \"SELECT * FROM test_table where test_int < 1\"\n\n df = read_sql(postgres_url, query, partition_on=\"test_int\", partition_num=3)\n\n expected = pd.DataFrame(\n\n data={\n\n \"test_int\": pd.Series([0], dtype=\"Int64\"),\n\n \"test_nullint\": pd.Series([5], dtype=\"Int64\"),\n\n \"test_str\": pd.Series([\"a\"], dtype=\"object\"),\n\n \"test_float\": pd.Series([3.1], dtype=\"float\"),\n\n \"test_bool\": pd.Series([None], dtype=\"boolean\"),\n\n }\n\n )\n", "file_path": "connectorx-python/connectorx/tests/test_read_sql.py", "rank": 99, "score": 27353.627819713965 } ]
Rust
src/config.rs
theotherjimmy/lanta
2579205c210b9bc58d0636e1af3538f433eff736
use std::collections::HashMap; use std::fs::File; use std::os::raw::c_uint; use std::path::PathBuf; use std::str::FromStr; use log::warn; use miette::IntoDiagnostic; use serde::de; use serde::{Deserialize, Deserializer}; use crate::keysym::*; use crate::layout::*; use crate::{ self as lanta, cmd::Command, Borders, KeyHandlers, ModKey, WindowId, }; #[derive(Hash, PartialEq, Eq, Debug)] struct KeyInner { mods: Vec<ModKey>, key: c_uint, } impl FromStr for KeyInner { type Err = String; fn from_str(frm: &str) -> Result<Self, String> { let mut iter = frm.rsplit("-"); let key = match iter.next().ok_or(String::from("no key found"))? { "a" => XK_a, "b" => XK_b, "c" => XK_c, "d" => XK_d, "e" => XK_e, "f" => XK_f, "g" => XK_g, "h" => XK_h, "i" => XK_i, "j" => XK_j, "k" => XK_k, "l" => XK_l, "m" => XK_m, "n" => XK_n, "o" => XK_o, "p" => XK_p, "q" => XK_q, "r" => XK_r, "s" => XK_s, "t" => XK_t, "u" => XK_u, "v" => XK_v, "w" => XK_w, "x" => XK_x, "y" => XK_y, "z" => XK_z, "space" => XK_space, "enter" => XK_Return, "tab" => XK_Tab, "down" => XK_Down, "up" => XK_Up, "right" => XK_Right, "left" => XK_Left, "print" => XK_Print, a => Err(format!("Could not match key {}", a))?, }; let mods = iter .map(|mod_key| match mod_key { "C" => Ok(ModKey::Control), "M" => Ok(ModKey::Mod1), "S" => Ok(ModKey::Shift), "H" => Ok(ModKey::Mod4), a => Err(format!("Did not understand modifier {}", a)), }) .collect::<Result<Vec<_>, String>>()?; Ok(KeyInner { mods, key }) } } impl<'de> Deserialize<'de> for KeyInner { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; s.parse().map_err(de::Error::custom) } } #[derive(Deserialize, Debug)] #[serde(tag = "type")] enum LayoutSelectInner { ThreeColumn { #[serde(default)] padding: u32, }, Stack { #[serde(default)] padding: u32, }, } #[derive(Deserialize, Debug)] struct LayoutSelect { name: String, #[serde(flatten)] layout: LayoutSelectInner, } impl Into<Box<dyn Layout<WindowId>>> for LayoutSelect { fn into(self) -> Box<dyn Layout<WindowId>> { match self.layout { LayoutSelectInner::ThreeColumn { padding } => { Box::new(ThreeColumn::new(self.name, padding, 40)) } LayoutSelectInner::Stack { padding } => { Box::new(StackLayout::new(self.name, padding)) } } } } #[derive(Deserialize, Debug)] struct Config { keys: HashMap<KeyInner, Command>, layouts: Vec<LayoutSelect>, borders: Borders, } pub fn load_config_yaml(config_path: PathBuf) -> miette::Result<lanta::Config> { let config_file = File::open(config_path).into_diagnostic()?; let Config { keys, layouts, borders, } = serde_yaml::from_reader(config_file).into_diagnostic()?; let keys: KeyHandlers = keys .into_iter() .map(|(k, v)| (k.mods, k.key, v.into())) .collect(); let layouts: Vec<_> = layouts.into_iter().map(|l| l.into()).collect(); Ok(lanta::Config { keys, layouts, borders, }) } pub fn load_state(state_path: PathBuf) -> std::io::Result<lanta::State> { let state_file = File::open(state_path)?; Ok(match serde_yaml::from_reader(state_file) { Ok(state) => state, Err(e) => { warn!("Could not deserialize state: {}", e); Default::default() } }) }
use std::collections::HashMap; use std::fs::File; use std::os::raw::c_uint; use std::path::PathBuf; use std::str::FromStr; use log::warn; use miette::IntoDiagnostic; use serde::de; use serde::{Deserialize, Deserializer}; use crate::keysym::*; use crate::layout::*; use crate::{ self as lanta, cmd::Command, Borders, KeyHandlers, ModKey, WindowId, }; #[derive(Hash, PartialEq, Eq, Debug)] struct KeyInner { mods: Vec<ModKey>, key: c_uint, } impl FromStr for KeyInner { type Err = String; fn from_str(frm: &str) -> Result<Self, String> { let mut iter = frm.rsplit("-"); let key = match iter.next().ok_or(String::from("no key found"))? { "a" => XK_a, "b" => XK_b, "c" => XK_c, "d" => XK_d, "e" => XK_e, "f" => XK_f, "g" => XK_g, "h" => XK_h, "i" => XK_i, "j" => XK_j, "k" => XK_k, "l" => XK_l, "m" => XK_m, "n" => XK_n, "o" => XK_o, "p" => XK_p, "q" => XK_q, "r" => XK_r, "s" => XK_s, "t" => XK_t, "u" => XK_u, "v" => XK_v, "w" => XK_w, "x" => XK_x, "y" => XK_y, "z" => XK_z, "space" => XK_space, "enter" => XK_Return, "tab" => XK_Tab, "down" => XK_Down, "up" => XK_Up, "right" => XK_Right, "left" => XK_Left,
, Command>, layouts: Vec<LayoutSelect>, borders: Borders, } pub fn load_config_yaml(config_path: PathBuf) -> miette::Result<lanta::Config> { let config_file = File::open(config_path).into_diagnostic()?; let Config { keys, layouts, borders, } = serde_yaml::from_reader(config_file).into_diagnostic()?; let keys: KeyHandlers = keys .into_iter() .map(|(k, v)| (k.mods, k.key, v.into())) .collect(); let layouts: Vec<_> = layouts.into_iter().map(|l| l.into()).collect(); Ok(lanta::Config { keys, layouts, borders, }) } pub fn load_state(state_path: PathBuf) -> std::io::Result<lanta::State> { let state_file = File::open(state_path)?; Ok(match serde_yaml::from_reader(state_file) { Ok(state) => state, Err(e) => { warn!("Could not deserialize state: {}", e); Default::default() } }) }
"print" => XK_Print, a => Err(format!("Could not match key {}", a))?, }; let mods = iter .map(|mod_key| match mod_key { "C" => Ok(ModKey::Control), "M" => Ok(ModKey::Mod1), "S" => Ok(ModKey::Shift), "H" => Ok(ModKey::Mod4), a => Err(format!("Did not understand modifier {}", a)), }) .collect::<Result<Vec<_>, String>>()?; Ok(KeyInner { mods, key }) } } impl<'de> Deserialize<'de> for KeyInner { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; s.parse().map_err(de::Error::custom) } } #[derive(Deserialize, Debug)] #[serde(tag = "type")] enum LayoutSelectInner { ThreeColumn { #[serde(default)] padding: u32, }, Stack { #[serde(default)] padding: u32, }, } #[derive(Deserialize, Debug)] struct LayoutSelect { name: String, #[serde(flatten)] layout: LayoutSelectInner, } impl Into<Box<dyn Layout<WindowId>>> for LayoutSelect { fn into(self) -> Box<dyn Layout<WindowId>> { match self.layout { LayoutSelectInner::ThreeColumn { padding } => { Box::new(ThreeColumn::new(self.name, padding, 40)) } LayoutSelectInner::Stack { padding } => { Box::new(StackLayout::new(self.name, padding)) } } } } #[derive(Deserialize, Debug)] struct Config { keys: HashMap<KeyInner
random
[ { "content": "/// A single key, of the same type as the `x11::keysym` constants.\n\ntype Key = c_uint;\n\n\n\n/// A combination of zero or more mods and a key.\n\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\n\npub struct KeyCombo {\n\n pub mod_mask: ModMask,\n\n pub keysym: Key,\n\n}\n\n\n\nimpl KeyCombo {\n\n fn new(mods: &[ModKey], keysym: Key) -> KeyCombo {\n\n let mod_mask =\n\n mods.iter().fold(0, |mask, mod_key| mask | mod_key.mask());\n\n KeyCombo { mod_mask, keysym }\n\n }\n\n}\n\n\n\npub struct KeyHandlers {\n\n hashmap: HashMap<KeyCombo, Command>,\n\n}\n", "file_path": "src/keys.rs", "rank": 0, "score": 108058.76200934025 }, { "content": "type ModMask = c_uint;\n\n\n\nimpl ModKey {\n\n pub fn mask_all() -> ModMask {\n\n xcb::MOD_MASK_SHIFT\n\n | xcb::MOD_MASK_LOCK\n\n | xcb::MOD_MASK_CONTROL\n\n | xcb::MOD_MASK_1\n\n | xcb::MOD_MASK_2\n\n | xcb::MOD_MASK_3\n\n | xcb::MOD_MASK_4\n\n | xcb::MOD_MASK_5\n\n }\n\n\n\n fn mask(self) -> ModMask {\n\n match self {\n\n ModKey::Shift => xcb::MOD_MASK_SHIFT,\n\n ModKey::Lock => xcb::MOD_MASK_LOCK,\n\n ModKey::Control => xcb::MOD_MASK_CONTROL,\n\n ModKey::Mod1 => xcb::MOD_MASK_1,\n\n ModKey::Mod2 => xcb::MOD_MASK_2,\n\n ModKey::Mod3 => xcb::MOD_MASK_3,\n\n ModKey::Mod4 => xcb::MOD_MASK_4,\n\n ModKey::Mod5 => xcb::MOD_MASK_5,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/keys.rs", "rank": 1, "score": 102978.34300423374 }, { "content": "fn desktops(conn: &ewmh::Connection, screen_idx: i32) -> Vec<String> {\n\n let cur_d = ewmh::get_number_of_desktops(conn, screen_idx)\n\n .get_reply()\n\n .unwrap_or(1);\n\n let mut desktop_names: Vec<_> = ewmh::get_desktop_names(conn, screen_idx)\n\n .get_reply()\n\n .map(|r| r.strings().into_iter().map(String::from).collect())\n\n .unwrap_or_default();\n\n if (cur_d as usize) >= desktop_names.len() {\n\n for num in desktop_names.len()..(cur_d as usize) {\n\n desktop_names.push(format!(\"{}\", num));\n\n }\n\n } else {\n\n for _ in (cur_d as usize)..desktop_names.len() {\n\n desktop_names.pop();\n\n }\n\n }\n\n desktop_names\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 51831.874534940594 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct Window {\n\n id: WindowId,\n\n group: GroupId,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 45046.47202999408 }, { "content": "#[derive(Debug, Default)]\n\nstruct Desktop<'i> {\n\n name: &'i str,\n\n cwd: Option<&'i str>,\n\n}\n\n\n\nimpl<'i> Desktop<'i> {\n\n fn with_cwd(self, cwd: &'i str) -> Self {\n\n if self.name.len() == 0 {\n\n let path = Path::new(cwd);\n\n let name = path\n\n .file_name()\n\n .and_then(std::ffi::OsStr::to_str)\n\n .unwrap_or(cwd);\n\n Self {\n\n name,\n\n cwd: Some(cwd),\n\n }\n\n } else {\n\n Self {\n\n cwd: Some(cwd),\n", "file_path": "src/main.rs", "rank": 9, "score": 42478.738987546894 }, { "content": "#[test]\n\nfn bar_in_the_middle_2k() {\n\n let vp_up = Viewport {\n\n x: 0,\n\n y: 0,\n\n width: 2560,\n\n height: 1440,\n\n };\n\n let vp_down = Viewport {\n\n x: 0,\n\n y: 1440,\n\n width: 2560,\n\n height: 1440,\n\n };\n\n let strut = Strut {\n\n left: 0,\n\n right: 0,\n\n top: 0,\n\n bottom: 1468,\n\n left_start_y: 0,\n\n left_end_y: 0,\n", "file_path": "src/viewport.rs", "rank": 10, "score": 41623.97645163425 }, { "content": "fn new_desktop(\n\n conn: &ewmh::Connection,\n\n screen_idx: i32,\n\n name: String,\n\n) -> Result<()> {\n\n let mut desktop_names = desktops(&conn, screen_idx);\n\n desktop_names.push(name);\n\n ewmh::set_desktop_names_checked(\n\n &conn,\n\n screen_idx,\n\n desktop_names.iter().map(String::as_str),\n\n );\n\n ewmh::request_change_current_desktop(\n\n &conn,\n\n screen_idx,\n\n desktop_names.len() as u32 - 1,\n\n 0,\n\n )\n\n .request_check()\n\n .into_diagnostic()\n\n}\n\n\n\n/// A parsed representation of a desktop. Desktops have a name, and optionally,\n\n/// a working directory.\n", "file_path": "src/main.rs", "rank": 11, "score": 41623.97645163425 }, { "content": "#[test]\n\nfn bar_in_the_middle_1080() {\n\n let vp_up = Viewport {\n\n x: 0,\n\n y: 0,\n\n width: 2560,\n\n height: 1440,\n\n };\n\n let vp_down = Viewport {\n\n x: 0,\n\n y: 1440,\n\n width: 1920,\n\n height: 1080,\n\n };\n\n let strut = Strut {\n\n left: 0,\n\n right: 0,\n\n top: 1440 + 38,\n\n bottom: 0,\n\n left_start_y: 0,\n\n left_end_y: 0,\n", "file_path": "src/viewport.rs", "rank": 12, "score": 41623.97645163425 }, { "content": "type LayoutId = usize;\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Group {\n\n pub layout_id: LayoutId,\n\n pub focused_window: Option<WindowId>,\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct Borders {\n\n pub width: u32,\n\n #[serde(rename = \"focused-color\")]\n\n pub focused_color: u32,\n\n #[serde(rename = \"normal-color\")]\n\n pub normal_color: u32,\n\n}\n\n\n\npub struct Config {\n\n pub keys: KeyHandlers,\n\n pub layouts: Vec<Box<dyn Layout<WindowId>>>,\n", "file_path": "src/lib.rs", "rank": 13, "score": 39758.285976811036 }, { "content": "type GroupId = usize;\n\n\n", "file_path": "src/lib.rs", "rank": 14, "score": 39758.285976811036 }, { "content": "#[cfg(test)]\n\nstruct TestDock(Strut);\n\n#[cfg(test)]\n\nimpl Dockable for TestDock {\n\n fn get_strut(&self) -> Option<Strut> {\n\n Some(self.0.clone())\n\n }\n\n}\n\n\n", "file_path": "src/screen.rs", "rank": 15, "score": 39448.912060052804 }, { "content": "fn main() -> Result<()> {\n\n miette::set_hook(Box::new(|_| {\n\n let mut theme = miette::GraphicalTheme::unicode();\n\n theme.styles = miette::ThemeStyles::ansi();\n\n Box::new(miette::GraphicalReportHandler::new_themed(theme))\n\n }))?;\n\n let app = App::parse();\n\n let (conn, screen_idx) =\n\n xcb::Connection::connect(None).map_err(Error::from)?;\n\n let conn =\n\n ewmh::Connection::connect(conn).map_err(|(e, _)| Error::from(e))?;\n\n match app.cmd {\n\n Some(\n\n Command::Rename(StrOpt { name })\n\n | Command::RenameDesktop(StrOpt { name }),\n\n ) => {\n\n if !name.is_empty() {\n\n let mut desktop_names = desktops(&conn, screen_idx);\n\n let cur_d = ewmh::get_current_desktop(&conn, screen_idx)\n\n .get_reply()\n", "file_path": "src/main.rs", "rank": 16, "score": 39262.987263224306 }, { "content": "#[test]\n\nfn bottom_strut_within_shrinks() {\n\n let vp = Viewport {\n\n x: 0,\n\n y: 0,\n\n width: 2560,\n\n height: 1440,\n\n };\n\n let strut = Strut {\n\n left: 0,\n\n right: 0,\n\n top: 0,\n\n bottom: 1315,\n\n left_start_y: 0,\n\n left_end_y: 0,\n\n right_start_y: 0,\n\n right_end_y: 0,\n\n top_start_x: 0,\n\n top_end_x: 0,\n\n bottom_start_x: 0,\n\n bottom_end_x: 2559,\n", "file_path": "src/viewport.rs", "rank": 17, "score": 38970.95762276781 }, { "content": "#[test]\n\nfn top_strut_within_shrinks() {\n\n let vp = Viewport {\n\n x: 0,\n\n y: 0,\n\n width: 2560,\n\n height: 1440,\n\n };\n\n let strut = Strut {\n\n left: 0,\n\n right: 0,\n\n top: 35,\n\n bottom: 0,\n\n left_start_y: 0,\n\n left_end_y: 0,\n\n right_start_y: 0,\n\n right_end_y: 0,\n\n top_start_x: 0,\n\n top_end_x: 2559,\n\n bottom_start_x: 0,\n\n bottom_end_x: 0,\n", "file_path": "src/viewport.rs", "rank": 18, "score": 38970.95762276781 }, { "content": "#[test]\n\nfn bottom_strut_outside_does_not_change() {\n\n let vp = Viewport {\n\n x: 0,\n\n y: 1440,\n\n width: 1920,\n\n height: 1280,\n\n };\n\n let strut = Strut {\n\n left: 0,\n\n right: 0,\n\n top: 0,\n\n bottom: 1315,\n\n left_start_y: 0,\n\n left_end_y: 0,\n\n right_start_y: 0,\n\n right_end_y: 0,\n\n top_start_x: 0,\n\n top_end_x: 0,\n\n bottom_start_x: 0,\n\n bottom_end_x: 2559,\n", "file_path": "src/viewport.rs", "rank": 19, "score": 38970.95762276781 }, { "content": "#[test]\n\nfn horizontal_move_picks_the_nearest_candidate() {\n\n let windows: Vec<MappedWindow<u32>> = vec![\n\n MappedWindow {\n\n id: 0,\n\n vp: Viewport {\n\n x: 0,\n\n y: 35,\n\n width: 851,\n\n height: 1405,\n\n },\n\n },\n\n MappedWindow {\n\n id: 1,\n\n vp: Viewport {\n\n x: 854,\n\n y: 35,\n\n width: 851,\n\n height: 1405,\n\n },\n\n },\n", "file_path": "src/navigation.rs", "rank": 20, "score": 37851.03067616467 }, { "content": "#[test]\n\nfn top_dock_only_affects_top_monitor() {\n\n let vps = vec![\n\n Viewport {\n\n x: 0,\n\n y: 0,\n\n width: 2560,\n\n height: 1440,\n\n },\n\n Viewport {\n\n x: 0,\n\n y: 1440,\n\n width: 1920,\n\n height: 1280,\n\n },\n\n ];\n\n let strut = Strut {\n\n left: 0,\n\n right: 0,\n\n top: 0,\n\n bottom: 1315,\n", "file_path": "src/screen.rs", "rank": 21, "score": 37851.03067616467 }, { "content": "fn main() -> Result<(), Error> {\n\n // OUT_DIR is set by Cargo and it's where any additional build artifacts\n\n // are written.\n\n let outdir = env::var_os(\"OUT_DIR\")\n\n .expect(\"OUT_DIR environment variable not defined. Please file a bug.\");\n\n\n\n create_dir_all(&outdir)?;\n\n\n\n // Use clap to build completion files.\n\n let mut app = app::App::into_app();\n\n let name = app.get_name().to_string();\n\n generate_to(Bash, &mut app, &name, &outdir)?;\n\n generate_to(Fish, &mut app, &name, &outdir)?;\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 22, "score": 37466.250024635134 }, { "content": "pub trait Layout<T>: fmt::Debug {\n\n fn name(&self) -> &str;\n\n fn layout(\n\n &self,\n\n viewport: &Viewport,\n\n windows: &[T],\n\n focus: usize,\n\n ) -> Vec<MappedWindow<T>>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct StackLayout {\n\n name: String,\n\n padding: u32,\n\n}\n\n\n\nimpl StackLayout {\n\n pub fn new<S: Into<String>>(name: S, padding: u32) -> StackLayout {\n\n StackLayout {\n\n name: name.into(),\n", "file_path": "src/layout.rs", "rank": 23, "score": 32896.217930911036 }, { "content": "fn get_paths() -> (PathBuf, PathBuf) {\n\n let dirs = ProjectDirs::from(\"org\", \"foo\", \"lanta\").unwrap();\n\n let mut config_path = dirs.config_dir().to_path_buf();\n\n config_path.push(\"lanta.yaml\");\n\n let mut cache_path = dirs.cache_dir().to_path_buf();\n\n cache_path.push(\"state.yaml\");\n\n (config_path, cache_path)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 32683.912970815807 }, { "content": "fn percent(width: u32, percent: u32) -> u32 {\n\n (width * percent) / 100\n\n}\n\n\n\nimpl<T: Copy> Layout<T> for ThreeColumn {\n\n fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn layout(\n\n &self,\n\n viewport: &Viewport,\n\n stack: &[T],\n\n _focus: usize,\n\n ) -> Vec<MappedWindow<T>> {\n\n match stack.len() {\n\n 0 => Default::default(),\n\n 1 => vec![MappedWindow {\n\n // Note unwrap will never fail because we just checked that the\n\n // length is 1 with the match\n", "file_path": "src/layout.rs", "rank": 25, "score": 29190.233434422036 }, { "content": "\n\nimpl KeyHandlers {\n\n pub fn key_combos(&self) -> Vec<&KeyCombo> {\n\n self.hashmap.keys().collect()\n\n }\n\n\n\n pub fn get(&self, key_combo: &KeyCombo) -> Option<Command> {\n\n self.hashmap.get(key_combo).cloned()\n\n }\n\n}\n\n\n\nimpl FromIterator<(Vec<ModKey>, Key, Command)> for KeyHandlers {\n\n fn from_iter<T>(handlers: T) -> Self\n\n where\n\n T: IntoIterator<Item = (Vec<ModKey>, Key, Command)>,\n\n {\n\n let mut hashmap = HashMap::new();\n\n for (modkeys, keysym, handler) in handlers.into_iter() {\n\n hashmap.insert(KeyCombo::new(&modkeys, keysym), handler);\n\n }\n\n KeyHandlers { hashmap }\n\n }\n\n}\n", "file_path": "src/keys.rs", "rank": 26, "score": 28007.56026269184 }, { "content": "use std::collections::HashMap;\n\nuse std::iter::FromIterator;\n\nuse std::os::raw::c_uint;\n\n\n\nuse crate::cmd::Command;\n\n\n\n/// Represents a modifier key.\n\n#[allow(dead_code)]\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum ModKey {\n\n Shift,\n\n Lock,\n\n Control,\n\n Mod1,\n\n Mod2,\n\n Mod3,\n\n Mod4,\n\n Mod5,\n\n}\n\n\n", "file_path": "src/keys.rs", "rank": 27, "score": 28007.513455516404 }, { "content": "## Configuration\n\n\n\nThe configuration file, `~/.config/lanta/lanta.yaml`, containes a section each\n\nfor borders, layouts and keybindings.\n\n\n\n### Borders\n\n\n\nWindows may be surounded by a border.\n\nWindow borders must be one of two colors, one for focused windows and one for\n\n\"normal\" or non-focused windows, and must have a width, in pixels.\n\n\n\nfor example:\n\n\n\n```yaml\n\nborders:\n\n width: 2\n\n focused-color: 65280\n\n normal-color: 0\n\n```\n\n\n\n### Layouts\n\n\n\nThe `layouts` section contains a list of objects that describe a single layout.\n\nA layout is given a name, type, and optional layout-specific attributes, such as padding.\n\n\n\nThe first layout in the list is the default.\n\n\n\nFor example, the layouts section of my config file looks like:\n\n\n\n```yaml\n\nlayouts:\n\n - name: 3-column\n\n type: ThreeColumn\n\n padding: 5\n\n - name: full screen\n\n type: Stack\n\n```\n\n\n\n### Keys\n\n\n\nThe `keys` section is a map from emacs-like key combination descriptions to actions.\n\nThe valid actions are:\n\n - CloseFocused\n\n - Focus Direction\n\n - Swap Direction\n\n - GroupNext \n\n - MoveToNextGroup\n\n - GroupPrev \n\n - MoveToPrevGroup\n\n - RotateCrtc\n\n - RotateLayout\n\n - RotateFocus\n\n - Spawn\n\n - Restart\n\n\n\nFor example, my keybinding configuration looks like:\n\n\n\n```yaml\n\nkeys:\n\n M-d: CloseFocused\n\n M-l:\n\n Focus: Right\n\n M-S-l:\n\n Swap: Right\n\n M-h:\n\n Focus: Left\n\n M-S-h:\n\n Swap: Left\n\n M-j:\n\n Focus: Down\n\n M-S-j:\n\n Swap: Down\n\n M-k:\n\n Focus: Up\n\n M-S-k:\n\n Swap: Up\n\n M-enter: RotateCrtc\n\n M-space: RotateLayout\n\n M-tab: RotateFocus\n\n M-S-r: Restart\n\n M-r:\n\n Spawn: rofi-rename-workspace\n\n M-g:\n\n Spawn: rofi-switch-workspaces\n\n M-S-g:\n\n Spawn: rofi-move-workspaces\n\n M-n:\n\n Spawn: rofi-new-workspace\n\n M-p:\n\n Spawn: rofi -show drun\n\n M-S-s:\n\n Spawn: xset dpms force off\n\n M-c:\n\n Spawn: alacritty\n\n```\n\n\n\n## License\n\n\n\nMIT\n", "file_path": "README.md", "rank": 28, "score": 16444.98433856339 }, { "content": "# lanta\n\n\n\nExperiments in creating a tiling X11 window manager in Rust.\n\n\n\nLanta is written to be customisable, simple and fast-ish.\n\n\n\n## Features\n\n\n\nLanta doesn't implement all of\n\n[EWMH](https://specifications.freedesktop.org/wm-spec/wm-spec-latest.html) or\n\n[ICCCM](https://www.x.org/releases/X11R7.6/doc/xorg-docs/specs/ICCCM/icccm.html),\n\nnor will it ever. It aims to implement just enough for use as my primary WM.\n\n\n\nLanta tiles windows in desktops, called groups, and each window must be in\n\nexactly one group.\n\nWindows may be moved between groups, focused inside a group and swapped with\n\neathother.\n\nEach group may cycle through a global set of layouts which tile its windows.\n\n\n\nThere are currently a few simple layouts implemented:\n\n\n\n - Stack — Maximises the currently focused window.\n\n - 3 column — Balances Windows into 3 columns of windows.\n\n\n\nFurther, 3 modes of navigation are available:\n\n - Rotate through the groups windows.\n\n - Navigate between visible windows on all screens by picking the nearest\n\n window that's center point lies within a cone in a cardinal direction.\n\n\n\n## Dependencies\n\n\n\nIn addition to the Rust dependencies in `Cargo.toml`, Lanta also depends on these system libraries:\n\n\n\n - `x11-xcb`\n\n - `xcb-util`: `xcb-ewmh` / `xcb-icccm` / `xcb-keysyms`\n\n\n\nThe following Ubuntu packages should allow your system to meet these requirements:\n\n\n\n```sh\n\nsudo apt install -y libx11-xcb-dev libxcb-ewmh-dev libxcb-icccm4-dev libxcb-keysyms1-dev libxcb-render0-dev libxcb-randr0-dev\n\n```\n\n\n\n## Installing\n\n\n\nLanta currently requires the stable version of Rust to compile. \n\n\n\nYour system must first have all of the required [dependencies](#dependencies)\n\n\n\nClone this repo, and run the following script in this directory:\n\n\n\n```sh\n\ncargo install --path .\n\n```\n\n\n\nLanta, the window manager may be installed as the current window manager by\n\nadding the following to your `~/.xinitrc` file:\n\n```sh\n\nlanta\n\n```\n\nor, if running within systemd, and you prefer to have the logs available\n\nthough `journalct`:\n\n``` sh\n\nsystemd-cat -t wm -- lanta\n\n```\n\n\n", "file_path": "README.md", "rank": 29, "score": 16436.841832269514 }, { "content": "MIT License\n\n\n\nCopyright (c) 2017 Michael Killough\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "LICENSE.md", "rank": 30, "score": 16436.62047320343 }, { "content": "# Contributed scripts\n\n\n\nYou may find these scripts useful when using lanta.\n\nThese scripts require `rofi`, `xdotool`, `wmctrl`, `fre` and `lanta`.\n\nYou can install the first 3 deps with your favorite package manage.\n\n`fre` may be found here: https://github.com/camdencheek/fre .\n\n`lanta` is part of this repo.\n\n\n\n## rofi-rename-workspace\n\n\n\nRename the current group/desktop/workspace, selecting the new name with rofi.\n\n\n\n## rofi-switch-workspaces\n\n\n\nSwitch the active group/desktop/workspace on the current monitor, selecting the \n\ndestination with rofi.\n\n\n\n## rofi-move-workspaces\n\n\n\nMove the currently focused window to another group/desktop/workspace, selecting\n\nthe destination by name with rofi, and then switch to that\n\ngroup/desktop/workspace.\n\n\n\n## rofi-new-workspace\n\n\n\nCreate a new group/desktop/workspace, with name text entry using rofi.\n", "file_path": "contrib/README.md", "rank": 31, "score": 15789.191451319219 }, { "content": "use crate::layout::MappedWindow;\n\nuse crate::viewport::Viewport;\n\nuse serde::Deserialize;\n\n\n\n#[derive(Copy, Clone, Debug, Deserialize)]\n\npub enum Direction {\n\n Up,\n\n Down,\n\n Left,\n\n Right,\n\n}\n\n\n\nimpl Direction {\n\n pub fn next_window<'a, T>(\n\n &self,\n\n focus: &Viewport,\n\n windows: &'a Vec<MappedWindow<T>>,\n\n ) -> Option<&'a MappedWindow<T>> {\n\n let center_x = focus.x + (focus.width / 2);\n\n let center_y = focus.y + (focus.height / 2);\n", "file_path": "src/navigation.rs", "rank": 34, "score": 18.26695776204892 }, { "content": "#[macro_use]\n\nextern crate log;\n\n\n\nuse std::collections::hash_map::Entry;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::rc::Rc;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\npub mod app;\n\npub mod cmd;\n\npub mod config;\n\nmod keys;\n\npub mod layout;\n\nmod navigation;\n\nmod screen;\n\nmod viewport;\n\nmod x;\n\n\n\nuse crate::x::{Crtc, CrtcChange, WindowType};\n", "file_path": "src/lib.rs", "rank": 35, "score": 17.52008140674855 }, { "content": "use crate::x::{CrtcInfo, StrutPartial};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Strut {\n\n pub left: u32,\n\n pub right: u32,\n\n pub top: u32,\n\n pub bottom: u32,\n\n pub left_start_y: u32,\n\n pub left_end_y: u32,\n\n pub right_start_y: u32,\n\n pub right_end_y: u32,\n\n pub top_start_x: u32,\n\n pub top_end_x: u32,\n\n pub bottom_start_x: u32,\n\n pub bottom_end_x: u32,\n\n}\n\n\n\nimpl Strut {\n", "file_path": "src/viewport.rs", "rank": 36, "score": 16.60878376265937 }, { "content": "use std::cmp::Eq;\n\nuse std::fmt;\n\n\n\nuse crate::Viewport;\n\n\n\n#[derive(Debug, Hash)]\n\npub struct MappedWindow<T> {\n\n pub id: T,\n\n pub vp: Viewport,\n\n}\n\n\n\nimpl<T: PartialEq> PartialEq for MappedWindow<T> {\n\n fn eq(&self, rhs: &Self) -> bool {\n\n self.id == rhs.id && self.vp == rhs.vp\n\n }\n\n}\n\n\n\nimpl<T: Eq> Eq for MappedWindow<T> {}\n\n\n", "file_path": "src/layout.rs", "rank": 37, "score": 15.19713591337397 }, { "content": " },\n\n None => (Self { name: f, cwd: None }),\n\n }\n\n }\n\n\n\n fn into_string(self) -> String {\n\n match self.cwd {\n\n None => String::from(self.name),\n\n Some(cwd) => {\n\n let mut res = String::from(self.name);\n\n res.push_str(Self::DELIMITER);\n\n res.push_str(cwd);\n\n res\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 38, "score": 15.0753007640501 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse xcb::randr;\n\nuse xcb_util::keysyms::KeySymbols;\n\nuse xcb_util::{ewmh, icccm};\n\n\n\nuse crate::keys::{KeyCombo, KeyHandlers};\n\nuse crate::viewport::Viewport;\n\nuse miette::Diagnostic;\n\nuse thiserror::Error;\n\n\n\npub use self::ewmh::StrutPartial;\n\npub use randr::Crtc;\n\n\n\n#[derive(Error, Debug, Diagnostic)]\n\npub enum Error {\n\n #[error(\"The screen used is invalid\")]\n\n InvalidScreen,\n", "file_path": "src/x.rs", "rank": 39, "score": 14.609981954145827 }, { "content": "use crate::Direction;\n\nuse crate::Lanta;\n\nuse directories::ProjectDirs;\n\nuse log::error;\n\nuse serde::Deserialize;\n\nuse std::fs::{create_dir_all, File};\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub enum Command {\n\n CloseFocused,\n\n Focus(Direction),\n\n Swap(Direction),\n\n GroupNext,\n\n GroupPrev,\n\n MoveToNextGroup,\n\n MoveToPrevGroup,\n\n RotateCrtc,\n\n WindowToNextCrtc,\n\n RotateLayout,\n\n RotateFocus,\n", "file_path": "src/cmd.rs", "rank": 40, "score": 14.344488592572969 }, { "content": " pub fn from_strut_partial(frm: &StrutPartial) -> Strut {\n\n Strut {\n\n left: frm.left(),\n\n right: frm.right(),\n\n top: frm.top(),\n\n bottom: frm.bottom(),\n\n left_start_y: frm.left_start_y(),\n\n left_end_y: frm.left_end_y(),\n\n right_start_y: frm.right_start_y(),\n\n right_end_y: frm.right_end_y(),\n\n top_start_x: frm.top_start_x(),\n\n top_end_x: frm.top_end_x(),\n\n bottom_start_x: frm.bottom_start_x(),\n\n bottom_end_x: frm.bottom_end_x(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct Viewport {\n", "file_path": "src/viewport.rs", "rank": 42, "score": 12.389569828769076 }, { "content": " let cmd: Vec<String> =\n\n cmd.split_whitespace().map(String::from).collect();\n\n let mut command = std::process::Command::new(&cmd[0]);\n\n command.args(&cmd[1..]);\n\n info!(\"Spawning: {:?}\", command);\n\n if let Err(e) = command.spawn() {\n\n error!(\"Failed to spawn child {:?} because {}\", command, e);\n\n }\n\n }\n\n Restart => {\n\n let dirs = ProjectDirs::from(\"org\", \"foo\", \"lanta\").unwrap();\n\n let mut cache_path = dirs.cache_dir().to_path_buf();\n\n let _ = create_dir_all(&cache_path);\n\n cache_path.push(\"state.yaml\");\n\n let config_file = match File::create(cache_path) {\n\n Ok(c) => c,\n\n Err(e) => return error!(\"Failed to write out state {}\", e),\n\n };\n\n info!(\"{}\", serde_yaml::to_string(&wm.s).unwrap());\n\n serde_yaml::to_writer(config_file, &wm.s).unwrap();\n", "file_path": "src/cmd.rs", "rank": 43, "score": 11.940873872831478 }, { "content": " strut: &Strut,\n\n ) -> Viewport {\n\n let mut left = self.x;\n\n let mut right = self.x + self.width;\n\n let mut top = self.y;\n\n let mut bottom = self.y + self.height;\n\n // Turns out that strut right and bottom have an insane definition.\n\n // In particular, they're defined as a reservation of space at the\n\n // associated edge of the combined screen. This means that we have\n\n // to calculate the actual edge of the strut, and be extra sure to\n\n // check that it's actually within the specified viewport.\n\n let actual_right = screen_width - strut.right;\n\n let actual_bottom = screen_height - strut.bottom;\n\n if (strut.left_start_y >= top)\n\n && (strut.left_end_y <= bottom)\n\n && (strut.left < left)\n\n && (strut.left > right)\n\n {\n\n left = strut.left\n\n }\n", "file_path": "src/viewport.rs", "rank": 44, "score": 11.855506232737449 }, { "content": " ConfigureRequest { window: WindowId, vp: Viewport },\n\n}\n\n\n\n/// An iterator that yields events from the X event loop.\n\n///\n\n/// Use `Connection::get_event_loop()` to get one.\n\npub struct EventLoop<'a> {\n\n connection: &'a Connection,\n\n}\n\n\n\nimpl<'a> Iterator for EventLoop<'a> {\n\n type Item = Event;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n // Flush any pending operations that came out of the event we (might\n\n // have) just yielded.\n\n self.connection.flush();\n\n\n\n let event = self.connection.conn.wait_for_event()?;\n", "file_path": "src/x.rs", "rank": 45, "score": 11.464896550465554 }, { "content": "impl fmt::Display for WindowId {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum WindowType {\n\n Desktop,\n\n Dock,\n\n Toolbar,\n\n Menu,\n\n Utility,\n\n Splash,\n\n Dialog,\n\n DropdownMenu,\n\n PopupMenu,\n\n Tooltip,\n\n Notification,\n\n Combo,\n", "file_path": "src/x.rs", "rank": 46, "score": 10.697759942282001 }, { "content": " .enable_window_key_events(&window_id, &self.c.keys);\n\n\n\n if !dock {\n\n let attrs = self.connection.get_window_attributes(&window_id);\n\n match attrs {\n\n Ok(wattrs) => {\n\n if wattrs.override_redirect() {\n\n self.connection.enable_property_tracking(&window_id);\n\n debug!(\"Tracking override redirect window {:x?} for window type changes\", window_id);\n\n return false;\n\n }\n\n }\n\n Err(e) => {\n\n warn!(\n\n \"Could not get window attrs for {}: {}\",\n\n window_id, e\n\n );\n\n }\n\n }\n\n if window_types.contains(&WindowType::Notification)\n", "file_path": "src/lib.rs", "rank": 47, "score": 10.690953723162131 }, { "content": " ..self\n\n }\n\n }\n\n }\n\n\n\n fn clear_cwd(self) -> Self {\n\n Self { cwd: None, ..self }\n\n }\n\n\n\n fn with_name(self, name: &'i str) -> Self {\n\n Self { name, ..self }\n\n }\n\n\n\n const DELIMITER: &'static str = \"::\";\n\n\n\n fn from_str(f: &'i str) -> Self {\n\n match f.split_once(Self::DELIMITER) {\n\n Some((name, cwd)) => Self {\n\n name,\n\n cwd: Some(cwd),\n", "file_path": "src/main.rs", "rank": 48, "score": 10.511438980370123 }, { "content": " left_start_y: 0,\n\n left_end_y: 0,\n\n right_start_y: 0,\n\n right_end_y: 0,\n\n top_start_x: 0,\n\n top_end_x: 0,\n\n bottom_start_x: 0,\n\n bottom_end_x: 2559,\n\n };\n\n let mut screen = Screen::<TestDock>::default();\n\n screen.add(TestDock(strut));\n\n assert_eq!(\n\n screen.viewports(vps),\n\n vec![\n\n Viewport {\n\n x: 0,\n\n y: 0,\n\n width: 2560,\n\n height: 1405,\n\n },\n", "file_path": "src/screen.rs", "rank": 49, "score": 10.170548163164337 }, { "content": "use layout::{Layout, MappedWindow};\n\nuse screen::{Dock, Screen};\n\n\n\npub use keys::{KeyHandlers, ModKey};\n\npub use navigation::Direction;\n\npub use viewport::Viewport;\n\npub use x::{Connection, CrtcInfo, Error, Event, Result, WindowId};\n\n\n\npub mod keysym {\n\n pub use x11::keysym::*;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 50, "score": 10.162613276312278 }, { "content": "use clap::Parser;\n\nuse directories::ProjectDirs;\n\nuse lanta::app::{App, Command, StrOpt};\n\nuse lanta::config::{load_config_yaml, load_state};\n\nuse lanta::Error;\n\nuse lanta::Lanta;\n\nuse miette::{IntoDiagnostic, Result};\n\nuse std::path::{Path, PathBuf};\n\nuse xcb_util::ewmh;\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 9.9903889225041 }, { "content": " #[error(\"This screen does not support the randr extension\")]\n\n RandrUnsupported,\n\n #[error(\"Xorg message returned error\")]\n\n XError(#[from] xcb::base::GenericError),\n\n #[error(\"Could not connect to the Xorg server\")]\n\n ConnError(#[from] xcb::base::ConnError),\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// A handle to an X Window.\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy, Deserialize, Serialize)]\n\npub struct WindowId(xcb::Window);\n\n\n\nimpl WindowId {\n\n fn to_x(&self) -> xcb::Window {\n\n self.0\n\n }\n\n}\n\n\n", "file_path": "src/x.rs", "rank": 52, "score": 9.968853496916894 }, { "content": " pub borders: Borders,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Default)]\n\npub struct State {\n\n windows: Vec<Window>,\n\n groups: Vec<Group>,\n\n current_crtc: Option<Crtc>,\n\n crtc: HashMap<Crtc, (CrtcInfo, GroupId)>,\n\n}\n\n\n\npub struct Lanta {\n\n connection: Rc<Connection>,\n\n c: Config,\n\n s: State,\n\n mapped: Vec<MappedWindow<WindowId>>,\n\n screen: Screen<Dock>,\n\n}\n\n\n\nimpl Lanta {\n", "file_path": "src/lib.rs", "rank": 53, "score": 9.445358531679194 }, { "content": " .unwrap_or_default() as usize;\n\n let name = name.join(\" \");\n\n desktop_names[cur_d] =\n\n Desktop::from_str(desktop_names[cur_d].as_str())\n\n .with_name(name.as_str())\n\n .into_string();\n\n ewmh::set_desktop_names_checked(\n\n &conn,\n\n screen_idx,\n\n desktop_names.iter().map(String::as_str),\n\n )\n\n .request_check()\n\n .into_diagnostic()?;\n\n }\n\n }\n\n Some(Command::SetCwd { cwd }) => {\n\n if cwd.is_dir() {\n\n if let Some(cwd) = cwd.to_str() {\n\n let mut desktop_names = desktops(&conn, screen_idx);\n\n let cur_d = ewmh::get_current_desktop(&conn, screen_idx)\n", "file_path": "src/main.rs", "rank": 54, "score": 9.259324827340622 }, { "content": " .get_reply()\n\n .unwrap_or_default()\n\n as usize;\n\n desktop_names[cur_d] =\n\n Desktop::from_str(desktop_names[cur_d].as_str())\n\n .with_cwd(cwd)\n\n .into_string();\n\n ewmh::set_desktop_names_checked(\n\n &conn,\n\n screen_idx,\n\n desktop_names.iter().map(String::as_str),\n\n )\n\n .request_check()\n\n .into_diagnostic()?;\n\n }\n\n }\n\n }\n\n Some(Command::ClearCwd) => {\n\n let mut desktop_names = desktops(&conn, screen_idx);\n\n let cur_d = ewmh::get_current_desktop(&conn, screen_idx)\n", "file_path": "src/main.rs", "rank": 55, "score": 9.244883042911098 }, { "content": "\n\n fn on_destroy_notify(\n\n &self,\n\n event: &xcb::DestroyNotifyEvent,\n\n ) -> Option<Event> {\n\n Some(Event::DestroyNotify(WindowId(event.window())))\n\n }\n\n\n\n fn on_key_press(&self, event: &xcb::KeyPressEvent) -> Option<Event> {\n\n let key_symbols = KeySymbols::new(&self.connection.conn);\n\n let keysym = key_symbols.press_lookup_keysym(event, 0);\n\n let mod_mask = u32::from(event.state());\n\n let key = KeyCombo { mod_mask, keysym };\n\n Some(Event::KeyPress(key))\n\n }\n\n\n\n fn on_enter_notify(&self, event: &xcb::EnterNotifyEvent) -> Option<Event> {\n\n Some(Event::EnterNotify(WindowId(event.event())))\n\n }\n\n\n", "file_path": "src/x.rs", "rank": 56, "score": 9.05577983315563 }, { "content": "\n\n fn swap_windows(&mut self, lhs: WindowId, rhs: WindowId) {\n\n let lhs_pos = self.s.windows.iter().position(|w| w.id == lhs);\n\n let rhs_pos = self.s.windows.iter().position(|w| w.id == rhs);\n\n match (lhs_pos, rhs_pos) {\n\n (Some(lhs_pos), Some(rhs_pos)) => {\n\n self.s.windows.get_mut(lhs_pos).unwrap().id = rhs;\n\n self.s.windows.get_mut(rhs_pos).unwrap().id = lhs;\n\n let warp_pointer = self.s.windows.get(lhs_pos).map(|w| w.group)\n\n != self.s.windows.get(rhs_pos).map(|w| w.group);\n\n self.activate_current_groups(warp_pointer);\n\n }\n\n (Some(_), None) => {\n\n error!(\"Could not swap; Right window is not present\");\n\n }\n\n (None, Some(_)) => {\n\n error!(\"Could not swap; Left window is not present\");\n\n }\n\n (None, None) => {\n\n error!(\"Could not swap; Both windows are not present\");\n", "file_path": "src/lib.rs", "rank": 58, "score": 8.596806444066324 }, { "content": " ( $( $name:ident ),+ , ) => (atoms!($( $name ),+);)\n\n}\n\n\n\natoms!(WM_DELETE_WINDOW, WM_PROTOCOLS);\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct CrtcInfo {\n\n pub x: i16,\n\n pub y: i16,\n\n pub width: u16,\n\n pub height: u16,\n\n}\n\n\n\nimpl From<randr::GetCrtcInfoReply> for CrtcInfo {\n\n fn from(reply: randr::GetCrtcInfoReply) -> Self {\n\n let (width, height) = match reply.rotation() {\n\n // NOTE: this is backwards.\n\n 90 | 270 => (reply.height(), reply.width()),\n\n _ => (reply.width(), reply.height()),\n\n };\n", "file_path": "src/x.rs", "rank": 59, "score": 8.52078718832393 }, { "content": " let viewports = self\n\n .s\n\n .crtc\n\n .values()\n\n .map(|(info, _)| Viewport::clone_from_crtc_info(info))\n\n .collect();\n\n self.screen.viewports(viewports)\n\n }\n\n\n\n pub fn rotate_crtc(&mut self) {\n\n let mut iter = self.s.crtc.keys().skip_while(|&crtc_id| {\n\n self.s.current_crtc.as_ref() != Some(crtc_id)\n\n });\n\n let _ = iter.next();\n\n if let Some(next_crtc) = iter.chain(self.s.crtc.keys()).next() {\n\n self.s.current_crtc = Some(*next_crtc);\n\n }\n\n self.activate_current_groups(true)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 60, "score": 8.392139269755319 }, { "content": "use clap::Parser;\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Parser)]\n\npub struct StrOpt {\n\n pub name: Vec<String>,\n\n}\n\n\n\n#[derive(Parser)]\n\npub struct App {\n\n #[clap(subcommand)]\n\n pub cmd: Option<Command>,\n\n}\n\n\n\n#[derive(Parser)]\n\npub enum Command {\n\n New(StrOpt),\n\n NewDesktop(StrOpt),\n\n NewFromWd { cwd: PathBuf },\n\n Rename(StrOpt),\n\n RenameDesktop(StrOpt),\n\n SetCwd { cwd: PathBuf },\n\n ClearCwd,\n\n ExecInCwd { cmd: Vec<String> },\n\n Manage,\n\n ManageWindows,\n\n Check,\n\n}\n", "file_path": "src/app.rs", "rank": 61, "score": 8.228164587532918 }, { "content": " Spawn(String),\n\n Restart,\n\n}\n\n\n\nimpl Command {\n\n pub fn run(self, wm: &mut Lanta) {\n\n use Command::*;\n\n match self {\n\n CloseFocused => wm.close_focused(),\n\n Focus(dir) => wm.focus_in_direction(&dir),\n\n Swap(dir) => wm.swap_in_direction(&dir),\n\n GroupNext => wm.next_group(),\n\n GroupPrev => wm.prev_group(),\n\n MoveToNextGroup => wm.move_focused_to_next_group(),\n\n MoveToPrevGroup => wm.move_focused_to_prev_group(),\n\n RotateCrtc => wm.rotate_crtc(),\n\n WindowToNextCrtc => wm.window_to_next_crtc(),\n\n RotateLayout => wm.group_cycle_layouts(),\n\n RotateFocus => wm.rotate_focus_in_group(),\n\n Spawn(cmd) => {\n", "file_path": "src/cmd.rs", "rank": 62, "score": 8.215953321130712 }, { "content": " windows\n\n .iter()\n\n .filter_map(|w| {\n\n let x = w.vp.x + (w.vp.width / 2);\n\n let y = w.vp.y + (w.vp.height / 2);\n\n let delta_x: i64 = x as i64 - center_x as i64;\n\n let delta_y: i64 = y as i64 - center_y as i64;\n\n // Normalize all movements to pretend to the to the Right\n\n let (delta_x, delta_y) = match self {\n\n Direction::Right => (delta_x, delta_y),\n\n Direction::Left => (-delta_x, delta_y),\n\n Direction::Down => (delta_y, delta_x),\n\n Direction::Up => (-delta_y, -delta_x),\n\n };\n\n if &w.vp != focus\n\n && delta_x > 0\n\n && delta_y < delta_x\n\n && delta_y >= -delta_x\n\n {\n\n Some(((delta_x, delta_y), w))\n", "file_path": "src/navigation.rs", "rank": 63, "score": 8.106321446016201 }, { "content": " }\n\n\n\n pub fn disable_window_key_events(\n\n &self,\n\n window_id: &WindowId,\n\n key_handlers: &KeyHandlers,\n\n ) {\n\n let key_symbols = KeySymbols::new(&self.conn);\n\n for key in key_handlers.key_combos() {\n\n match key_symbols.get_keycode(key.keysym).next() {\n\n Some(keycode) => {\n\n xcb::ungrab_key(\n\n &self.conn,\n\n keycode,\n\n window_id.to_x(),\n\n key.mod_mask as u16,\n\n );\n\n }\n\n None => {\n\n error!(\n", "file_path": "src/x.rs", "rank": 64, "score": 8.081168712530406 }, { "content": " error!(\"Could not lookup window {:?} to remove\", id);\n\n }\n\n self.s.windows.retain(|w| &w.id != id);\n\n self.activate_current_groups(false);\n\n self.update_ewmh_desktops();\n\n }\n\n\n\n fn modify_focus_group_window_with(\n\n &mut self,\n\n fun: impl FnOnce(usize, usize) -> Option<usize>,\n\n ) {\n\n if let Some(gid) = self.group_idx() {\n\n if let Some(group) = self.s.groups.get_mut(gid) {\n\n let windows = self.s.windows.in_group(gid);\n\n if let Some(new_focus) = group\n\n .focused_window\n\n .map(|wid| {\n\n windows\n\n .iter()\n\n .position(|&w| w == wid)\n", "file_path": "src/lib.rs", "rank": 65, "score": 8.01559019861099 }, { "content": " MappedWindow {\n\n id: 2,\n\n vp: Viewport {\n\n x: 1708,\n\n y: 35,\n\n width: 851,\n\n height: 1405,\n\n },\n\n },\n\n ];\n\n assert_eq!(\n\n Direction::Left\n\n .next_window(&windows[2].vp, &windows)\n\n .unwrap()\n\n .id,\n\n 1\n\n );\n\n assert_eq!(\n\n Direction::Right\n\n .next_window(&windows[0].vp, &windows)\n\n .unwrap()\n\n .id,\n\n 1\n\n );\n\n}\n", "file_path": "src/navigation.rs", "rank": 66, "score": 7.856076806061335 }, { "content": " // and focused windows are mapped).\n\n self.manage_window(window_id);\n\n self.activate_current_groups(false);\n\n } else if let Some(w) =\n\n self.s.windows.iter().find(|w| w.id == window_id)\n\n {\n\n if let Some(group) = self.s.groups.get_mut(w.group) {\n\n group.focused_window = Some(w.id)\n\n }\n\n }\n\n }\n\n\n\n fn on_crtc_change(&mut self, change: &CrtcChange) {\n\n debug!(\n\n \"Crtc's Changed! Before: {:?}, {:?}\",\n\n &self.s.crtc, &self.s.current_crtc\n\n );\n\n if change.width > 0 && change.height > 0 {\n\n let gidx = self.find_next_unallocated_group();\n\n match self.s.crtc.entry(change.crtc) {\n", "file_path": "src/lib.rs", "rank": 67, "score": 7.807812983830502 }, { "content": " id: stack.iter().next().unwrap().clone(),\n\n vp: viewport.clone(),\n\n }],\n\n 2 => {\n\n let ungapped_width = viewport.width - self.inner_padding;\n\n let left_width = percent(ungapped_width, self.center_width);\n\n let right_width = ungapped_width - left_width;\n\n let viewports = vec![\n\n Viewport {\n\n x: viewport.x,\n\n y: viewport.y,\n\n width: left_width,\n\n height: viewport.height,\n\n },\n\n Viewport {\n\n x: viewport.x + self.inner_padding + left_width,\n\n y: viewport.y,\n\n width: right_width,\n\n height: viewport.height,\n\n },\n", "file_path": "src/layout.rs", "rank": 68, "score": 7.8048405055527335 }, { "content": " .into_iter()\n\n .map(|(crtc, cookie)| match cookie.get_reply() {\n\n Ok(info) => Ok((crtc, info.into())),\n\n Err(e) => Err(e.into()),\n\n })\n\n .collect()\n\n }\n\n\n\n /// Installs the Connection as a window manager, by registers for\n\n /// SubstructureNotify and SubstructureRedirect events on the root window.\n\n /// If there is already a window manager on the display, then this will\n\n /// fail.\n\n pub fn install_as_wm(&self, key_handlers: &KeyHandlers) -> Result<()> {\n\n let values = [(\n\n xcb::CW_EVENT_MASK,\n\n xcb::EVENT_MASK_SUBSTRUCTURE_NOTIFY\n\n | xcb::EVENT_MASK_SUBSTRUCTURE_REDIRECT\n\n | xcb::EVENT_MASK_PROPERTY_CHANGE,\n\n )];\n\n xcb::change_window_attributes_checked(\n", "file_path": "src/x.rs", "rank": 69, "score": 7.794327658874101 }, { "content": " match key_symbols.get_keycode(key.keysym).next() {\n\n Some(keycode) => {\n\n xcb::grab_key(\n\n &self.conn,\n\n false,\n\n window_id.to_x(),\n\n key.mod_mask as u16,\n\n keycode,\n\n xcb::GRAB_MODE_ASYNC as u8,\n\n xcb::GRAB_MODE_ASYNC as u8,\n\n );\n\n }\n\n None => {\n\n error!(\n\n \"Failed to get keycode for keysym {} - could not register handler on {}\",\n\n key.keysym, window_id\n\n );\n\n }\n\n }\n\n }\n", "file_path": "src/x.rs", "rank": 70, "score": 7.666504344860305 }, { "content": " if (strut.right_start_y >= top)\n\n && (strut.right_end_y <= bottom)\n\n && (actual_right < left)\n\n && (actual_right > right)\n\n {\n\n right = actual_right\n\n }\n\n if (strut.top_start_x >= left)\n\n && (strut.top_end_x <= right)\n\n && (strut.top < bottom)\n\n && (strut.top > top)\n\n {\n\n top = strut.top\n\n }\n\n if (strut.bottom_start_x >= left)\n\n && (strut.bottom_end_x <= right)\n\n && (actual_bottom < bottom)\n\n && (actual_bottom > top)\n\n {\n\n bottom = actual_bottom\n", "file_path": "src/viewport.rs", "rank": 71, "score": 7.2537060914047675 }, { "content": " .get_reply()\n\n .unwrap_or_default() as usize;\n\n desktop_names[cur_d] =\n\n Desktop::from_str(desktop_names[cur_d].as_str())\n\n .clear_cwd()\n\n .into_string();\n\n ewmh::set_desktop_names_checked(\n\n &conn,\n\n screen_idx,\n\n desktop_names.iter().map(String::as_str),\n\n )\n\n .request_check()\n\n .into_diagnostic()?;\n\n }\n\n Some(Command::ExecInCwd { cmd }) => {\n\n if cmd.len() >= 1 {\n\n let desktop_names = desktops(&conn, screen_idx);\n\n let cur_d =\n\n &desktop_names[ewmh::get_current_desktop(&conn, screen_idx)\n\n .get_reply()\n", "file_path": "src/main.rs", "rank": 72, "score": 7.207449462298856 }, { "content": " height,\n\n }\n\n }\n\n}\n\n\n\npub struct Connection {\n\n pub conn: ewmh::Connection,\n\n root: WindowId,\n\n pub screen_idx: i32,\n\n pub atoms: InternedAtoms,\n\n window_type_lookup: HashMap<xcb::Atom, WindowType>,\n\n window_state_lookup: HashMap<xcb::Atom, WindowState>,\n\n randr_base: u8,\n\n}\n\n\n\nimpl Connection {\n\n /// Opens a connection to the X server, returning a new Connection object.\n\n pub fn connect() -> Result<Connection> {\n\n let (conn, screen_idx) = xcb::Connection::connect(None)?;\n\n let conn = ewmh::Connection::connect(conn).map_err(|(e, _)| e)?;\n", "file_path": "src/x.rs", "rank": 73, "score": 7.153170401797894 }, { "content": " pub fn run(mut self) {\n\n info!(\"Started WM, entering event loop.\");\n\n let event_loop_connection = self.connection.clone();\n\n let event_loop = event_loop_connection.get_event_loop();\n\n for event in event_loop {\n\n match event {\n\n Event::MapRequest(window_id) => self.on_map_request(window_id),\n\n Event::UnmapNotify(window_id) => {\n\n // Turns out that we will recieve unmap notifications when\n\n // a window decides that it should be minimized. We don't\n\n // support minimized windows, so map that window immediately\n\n if self.is_window_managed(&window_id) {\n\n self.connection.map_window(&window_id);\n\n }\n\n }\n\n Event::DestroyNotify(window_id) => {\n\n self.unmanage_window(&window_id);\n\n }\n\n Event::KeyPress(key) => {\n\n if let Some(handler) = self.c.keys.get(&key) {\n", "file_path": "src/lib.rs", "rank": 74, "score": 7.082355063563777 }, { "content": " y: viewport.y + self.padding,\n\n width: viewport.width - (self.padding * 2),\n\n height: viewport.height - (self.padding * 2),\n\n };\n\n vec![MappedWindow { vp, id }]\n\n }\n\n None => Default::default(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ThreeColumn {\n\n name: String,\n\n inner_padding: u32,\n\n center_width: u32,\n\n}\n\n\n\nimpl ThreeColumn {\n\n pub fn new<S: Into<String>>(\n", "file_path": "src/layout.rs", "rank": 75, "score": 7.079705864052853 }, { "content": " _ => (),\n\n }\n\n }\n\n\n\n fn shift_group(&mut self, fun: impl FnOnce(usize, usize) -> Option<usize>) {\n\n if let Some(next_group) = self\n\n .group_idx()\n\n .and_then(|cur| fun(cur, self.s.groups.len()))\n\n {\n\n self.focus_group(next_group);\n\n self.activate_current_groups(false);\n\n }\n\n }\n\n\n\n pub fn next_group(&mut self) {\n\n self.shift_group(|cur, len| {\n\n if cur + 1 < len + 1 {\n\n Some(cur + 1)\n\n } else {\n\n None\n", "file_path": "src/lib.rs", "rank": 76, "score": 7.078216047684843 }, { "content": " /// Draw all active windows across all screens using a diffing algorithm.\n\n ///\n\n /// The algorithm works by first generating a list of all the windows that must\n\n /// be mapped, and where they will be mapped. Then it unmappes any windows that\n\n /// used to be mapped and no longer are, repositions windows that are in both sets\n\n /// and have different positions and finally maps windows that were not mapped\n\n /// previously.\n\n fn activate_current_groups(&mut self, warp_pointer: bool) {\n\n let vps = self.viewports();\n\n let crtc = &self.s.crtc;\n\n let mut new_mapped_windows = Vec::new();\n\n for ((_crtc_id, (_info, grp_id)), viewport) in\n\n crtc.iter().zip(vps.into_iter())\n\n {\n\n let (windows, focus, layout) = self.groupref(*grp_id);\n\n new_mapped_windows\n\n .extend(layout.layout(&viewport, &windows, focus).into_iter());\n\n }\n\n\n\n let prev_ids: HashSet<_> = self.mapped.iter().map(|w| w.id).collect();\n", "file_path": "src/lib.rs", "rank": 77, "score": 6.939365588030798 }, { "content": " }\n\n })\n\n }\n\n\n\n pub fn prev_group(&mut self) {\n\n self.shift_group(|cur, _len| cur.checked_sub(1))\n\n }\n\n\n\n fn move_focused_to_group(\n\n &mut self,\n\n fun: impl FnOnce(usize, usize) -> Option<usize>,\n\n ) {\n\n if let Some(gid) = self\n\n .group_idx()\n\n .and_then(|idx| fun(idx, self.s.groups.len()))\n\n {\n\n if let Some(id) = self.focused_window() {\n\n self.move_window_to_group(id, gid);\n\n }\n\n self.focus_group(gid);\n", "file_path": "src/lib.rs", "rank": 78, "score": 6.934605738994077 }, { "content": "use std::cmp;\n\n\n\nuse crate::viewport::{Strut, Viewport};\n\nuse crate::x::{Connection, StrutPartial, WindowId};\n\n\n\npub struct Dock {\n\n window_id: WindowId,\n\n strut_partial: Option<StrutPartial>,\n\n}\n\n\n", "file_path": "src/screen.rs", "rank": 79, "score": 6.892608057761488 }, { "content": " .unwrap_or_default()\n\n as usize];\n\n if let Some(cwd) = Desktop::from_str(cur_d.as_str()).cwd {\n\n std::env::set_current_dir(cwd).into_diagnostic()?;\n\n }\n\n Err(exec::execvp(&cmd[0], &cmd[..])).into_diagnostic()?\n\n }\n\n }\n\n Some(\n\n Command::New(StrOpt { name })\n\n | Command::NewDesktop(StrOpt { name }),\n\n ) => {\n\n if !name.is_empty() {\n\n new_desktop(&conn, screen_idx, name.join(\" \"))?;\n\n }\n\n }\n\n Some(Command::NewFromWd { cwd }) => {\n\n if cwd.is_dir() {\n\n let name =\n\n cwd.file_name().unwrap_or_default().to_string_lossy();\n", "file_path": "src/main.rs", "rank": 80, "score": 6.879088264289471 }, { "content": " padding,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Copy> Layout<T> for StackLayout {\n\n fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn layout(\n\n &self,\n\n viewport: &Viewport,\n\n stack: &[T],\n\n focus: usize,\n\n ) -> Vec<MappedWindow<T>> {\n\n match stack.get(focus) {\n\n Some(&id) => {\n\n let vp = Viewport {\n\n x: viewport.x + self.padding,\n", "file_path": "src/layout.rs", "rank": 81, "score": 6.801198378466864 }, { "content": " }\n\n\n\n pub fn manage_window(&mut self, window_id: WindowId) -> bool {\n\n debug!(\"Managing window: {}\", window_id);\n\n\n\n // If we are already managing the window, then do nothing. We do not\n\n // want the window to end up in two groups at once. We shouldn't\n\n // be called in such cases, so treat it as an error.\n\n if self.is_window_managed(&window_id) {\n\n error!(\n\n \"Asked to manage window that's already managed: {}\",\n\n window_id\n\n );\n\n return false;\n\n }\n\n\n\n let window_types = self.connection.get_window_types(&window_id);\n\n\n\n let dock = window_types.contains(&WindowType::Dock);\n\n self.connection\n", "file_path": "src/lib.rs", "rank": 82, "score": 6.719586989696328 }, { "content": " Entry::Vacant(v) => {\n\n v.insert((change.into(), gidx));\n\n }\n\n Entry::Occupied(ref mut o) => {\n\n o.get_mut().0 = change.into();\n\n }\n\n }\n\n if self.s.current_crtc.is_none() {\n\n self.s.current_crtc = Some(change.crtc);\n\n }\n\n } else {\n\n self.s.crtc.remove(&change.crtc);\n\n if self.s.current_crtc == Some(change.crtc) {\n\n self.s.current_crtc = self.s.crtc.keys().next().cloned();\n\n }\n\n }\n\n self.activate_current_groups(true);\n\n debug!(\n\n \"Crtc's Changed! After: {:?}, {:?}\",\n\n &self.s.crtc, &self.s.current_crtc\n\n );\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 83, "score": 6.5877066639126225 }, { "content": " CrtcInfo {\n\n x: reply.x(),\n\n y: reply.y(),\n\n width,\n\n height,\n\n }\n\n }\n\n}\n\n\n\nimpl From<&CrtcChange> for CrtcInfo {\n\n fn from(change: &CrtcChange) -> Self {\n\n let (width, height) = match change.rotation {\n\n // NOTE: this is backwards.\n\n 90 | 270 => (change.height, change.width),\n\n _ => (change.width, change.height),\n\n };\n\n CrtcInfo {\n\n x: change.x,\n\n y: change.y,\n\n width,\n", "file_path": "src/x.rs", "rank": 84, "score": 6.476093150128037 }, { "content": "use std::env;\n\nuse std::fs::create_dir_all;\n\nuse std::io::Error;\n\n\n\nuse clap::IntoApp;\n\nuse clap_generate::{\n\n generate_to,\n\n generators::{Bash, Fish},\n\n};\n\n\n\n#[path = \"src/app.rs\"]\n\nmod app;\n\n\n", "file_path": "build.rs", "rank": 85, "score": 6.368110027562155 }, { "content": " right_start_y: 0,\n\n right_end_y: 0,\n\n top_start_x: 0,\n\n top_end_x: 0,\n\n bottom_start_x: 0,\n\n bottom_end_x: 1919,\n\n };\n\n assert_eq!(\n\n vp_up.without_strut(2560, 2880, &strut),\n\n Viewport {\n\n x: 0,\n\n y: 0,\n\n width: 2560,\n\n height: 1440,\n\n }\n\n );\n\n assert_eq!(\n\n vp_down.without_strut(2560, 1440 + 1080, &strut),\n\n Viewport {\n\n x: 0,\n\n y: 1440 + 38,\n\n width: 1920,\n\n height: 1080 - 38,\n\n }\n\n )\n\n}\n", "file_path": "src/viewport.rs", "rank": 86, "score": 6.27522010848042 }, { "content": " right_start_y: 0,\n\n right_end_y: 0,\n\n top_start_x: 0,\n\n top_end_x: 0,\n\n bottom_start_x: 0,\n\n bottom_end_x: 2559,\n\n };\n\n assert_eq!(\n\n vp_up.without_strut(2560, 2880, &strut),\n\n Viewport {\n\n x: 0,\n\n y: 0,\n\n width: 2560,\n\n height: 1412,\n\n }\n\n );\n\n assert_eq!(\n\n vp_down.without_strut(2560, 2880, &strut),\n\n Viewport {\n\n x: 0,\n\n y: 1440,\n\n width: 2560,\n\n height: 1440,\n\n }\n\n )\n\n}\n\n\n", "file_path": "src/viewport.rs", "rank": 87, "score": 6.27522010848042 }, { "content": " }\n\n Viewport {\n\n x: left,\n\n y: top,\n\n width: right.checked_sub(left).unwrap_or(self.width),\n\n height: bottom.checked_sub(top).unwrap_or(self.height),\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/viewport.rs", "rank": 88, "score": 6.247276052868368 }, { "content": " );\n\n } else {\n\n info!(\"Closing window {} using xcb::destroy_window()\", window_id);\n\n xcb::destroy_window(&self.conn, window_id.to_x());\n\n }\n\n }\n\n\n\n /// Sets the window's position and size.\n\n pub fn configure_window(\n\n &self,\n\n window_id: &WindowId,\n\n vp: Viewport,\n\n border: u32,\n\n ) {\n\n let values = [\n\n (xcb::CONFIG_WINDOW_X as u16, vp.x),\n\n (xcb::CONFIG_WINDOW_Y as u16, vp.y),\n\n (xcb::CONFIG_WINDOW_WIDTH as u16, vp.width),\n\n (xcb::CONFIG_WINDOW_HEIGHT as u16, vp.height),\n\n (xcb::CONFIG_WINDOW_BORDER_WIDTH as u16, border),\n", "file_path": "src/x.rs", "rank": 89, "score": 6.158034488488903 }, { "content": " pub fn viewports(&self, mut ports: Vec<Viewport>) -> Vec<Viewport> {\n\n let docks: Vec<Strut> =\n\n self.docks.iter().filter_map(T::get_strut).collect();\n\n let width = ports.iter().map(|v| v.x + v.width).fold(0, cmp::max);\n\n let height = ports.iter().map(|v| v.y + v.height).fold(0, cmp::max);\n\n for vp in ports.iter_mut() {\n\n *vp = docks\n\n .iter()\n\n .fold(*vp, |v, s| v.without_strut(width, height, s));\n\n }\n\n ports\n\n }\n\n}\n\n\n", "file_path": "src/screen.rs", "rank": 90, "score": 6.131728812787662 }, { "content": " }\n\n Some(_) | None => (),\n\n };\n\n if let Some((_info, idx)) =\n\n self.s.current_crtc.and_then(|c| self.s.crtc.get_mut(&c))\n\n {\n\n *idx = new_idx;\n\n }\n\n self.truncate_groups();\n\n self.update_ewmh_desktops();\n\n }\n\n\n\n fn truncate_groups(&mut self) {\n\n let min_crtc_grp = self.s.crtc.values().map(|(_info, grp)| *grp).max();\n\n let min_win_grp = self.s.windows.iter().map(|w| w.group).max();\n\n match (min_win_grp, min_crtc_grp) {\n\n (Some(w), Some(c)) => {\n\n let min_grp = std::cmp::max(w, c);\n\n self.s.groups.truncate(min_grp + 1);\n\n }\n", "file_path": "src/lib.rs", "rank": 91, "score": 5.983602598597324 }, { "content": " pub fn new(c: Config, mut s: State) -> Result<Self> {\n\n let connection = Rc::new(Connection::connect()?);\n\n connection.install_as_wm(&c.keys)?;\n\n\n\n // Ensure that we have the CRTC map correct. This will grab the\n\n // current list of CRTCs from the server, remove any saved CRTCs\n\n // that are no longer mapped, and add any new CRTCs, perfering the\n\n // saved state.\n\n let mut crtc = connection.list_crtc()?;\n\n crtc.retain(|(_, ci)| ci.width > 0 && ci.height > 0);\n\n // Keep just the prior groups from the stored state. Things may have\n\n // happened to the CRTC positions and sizes in the mean time.\n\n s.crtc = crtc\n\n .into_iter()\n\n .enumerate()\n\n .map(|(grp, (crtc, info))| {\n\n let group = s.crtc.get(&crtc).map(|(_, g)| *g).unwrap_or(grp);\n\n (crtc, (info, group))\n\n })\n\n .collect::<HashMap<_, _>>();\n", "file_path": "src/lib.rs", "rank": 92, "score": 5.948517442691287 }, { "content": " let args: Vec<_> = std::env::args_os().collect();\n\n // Not much we can do if this failes\n\n let _ = wm.connection.stop_being_the_wm(&wm.c.keys);\n\n let err = exec::Command::new(&args[0]).args(&args[1..]).exec();\n\n error!(\"Failed to exec: {}\", err);\n\n // Not much we can do if this failes\n\n let _ = wm.connection.install_as_wm(&wm.c.keys);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/cmd.rs", "rank": 93, "score": 5.901938770569528 }, { "content": " let root = conn\n\n .get_setup()\n\n .roots()\n\n .nth(screen_idx as usize)\n\n .ok_or_else(|| Error::InvalidScreen)?\n\n .root();\n\n let randr_base = conn\n\n .get_extension_data(&mut randr::id())\n\n .ok_or_else(|| Error::RandrUnsupported)?\n\n .first_event();\n\n\n\n let atoms = InternedAtoms::new(&conn)?;\n\n\n\n let mut types = HashMap::new();\n\n types.insert(conn.WM_WINDOW_TYPE_DESKTOP(), WindowType::Desktop);\n\n types.insert(conn.WM_WINDOW_TYPE_DOCK(), WindowType::Dock);\n\n types.insert(conn.WM_WINDOW_TYPE_TOOLBAR(), WindowType::Toolbar);\n\n types.insert(conn.WM_WINDOW_TYPE_MENU(), WindowType::Menu);\n\n types.insert(conn.WM_WINDOW_TYPE_UTILITY(), WindowType::Utility);\n\n types.insert(conn.WM_WINDOW_TYPE_SPLASH(), WindowType::Splash);\n", "file_path": "src/x.rs", "rank": 94, "score": 5.876569432881714 }, { "content": " }\n\n }\n\n }\n\n\n\n fn focus_window(&mut self, wid: &WindowId, can_warp_pointer: bool) {\n\n if let Some(w) = self.s.windows.iter().find(|w| &w.id == wid) {\n\n if let Some(g) = self.s.groups.get_mut(w.group) {\n\n g.focused_window = Some(w.id);\n\n let mut warp_pointer = false;\n\n if let Some((&crtc_id, _)) =\n\n self.s.crtc.iter().find(|(_id, (_, gid))| w.group == *gid)\n\n {\n\n warp_pointer = self.s.current_crtc != Some(crtc_id);\n\n self.s.current_crtc = Some(crtc_id);\n\n self.update_ewmh_desktops();\n\n }\n\n self.activate_current_groups(can_warp_pointer && warp_pointer);\n\n }\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 95, "score": 5.3773096719134905 }, { "content": " self.on_destroy_notify(xcb::cast_event(&event))\n\n }\n\n xcb::KEY_PRESS => {\n\n self.on_key_press(xcb::cast_event(&event))\n\n }\n\n xcb::ENTER_NOTIFY => {\n\n self.on_enter_notify(xcb::cast_event(&event))\n\n }\n\n xcb::PROPERTY_NOTIFY => {\n\n self.on_property_notify(xcb::cast_event(&event))\n\n }\n\n xcb::CLIENT_MESSAGE => {\n\n self.on_client_message(xcb::cast_event(&event))\n\n }\n\n n if n == randr_notify => {\n\n self.on_randr_notify(xcb::cast_event(&event))\n\n }\n\n _ => None,\n\n };\n\n\n", "file_path": "src/x.rs", "rank": 96, "score": 5.288988927852033 }, { "content": "impl Screen<Dock> {\n\n pub fn add_dock(&mut self, conn: &Connection, window_id: WindowId) {\n\n let strut_partial = conn.get_strut_partial(&window_id);\n\n self.add(Dock {\n\n window_id,\n\n strut_partial,\n\n });\n\n }\n\n\n\n pub fn remove_dock(&mut self, window_id: &WindowId) {\n\n self.docks.retain(|d| &d.window_id != window_id);\n\n }\n\n}\n\n\n\nimpl<T: Dockable> Screen<T> {\n\n pub fn add(&mut self, dock: T) {\n\n self.docks.push(dock)\n\n }\n\n /// Figure out the usable area of the screen based on the STRUT_PARTIAL of\n\n /// all docks.\n", "file_path": "src/screen.rs", "rank": 97, "score": 5.232302685472518 }, { "content": " types.insert(conn.WM_WINDOW_TYPE_DIALOG(), WindowType::Dialog);\n\n types.insert(\n\n conn.WM_WINDOW_TYPE_DROPDOWN_MENU(),\n\n WindowType::DropdownMenu,\n\n );\n\n types.insert(conn.WM_WINDOW_TYPE_POPUP_MENU(), WindowType::PopupMenu);\n\n types.insert(conn.WM_WINDOW_TYPE_TOOLTIP(), WindowType::Tooltip);\n\n types.insert(\n\n conn.WM_WINDOW_TYPE_NOTIFICATION(),\n\n WindowType::Notification,\n\n );\n\n types.insert(conn.WM_WINDOW_TYPE_COMBO(), WindowType::Combo);\n\n types.insert(conn.WM_WINDOW_TYPE_DND(), WindowType::Dnd);\n\n types.insert(conn.WM_WINDOW_TYPE_NORMAL(), WindowType::Normal);\n\n\n\n let mut state = HashMap::new();\n\n state.insert(conn.WM_STATE_MODAL(), WindowState::Modal);\n\n state.insert(conn.WM_STATE_STICKY(), WindowState::Sticky);\n\n state\n\n .insert(conn.WM_STATE_MAXIMIZED_VERT(), WindowState::MaximizedVert);\n", "file_path": "src/x.rs", "rank": 98, "score": 5.14653336792847 }, { "content": " let reply = icccm::get_wm_protocols(\n\n &self.conn,\n\n window_id.to_x(),\n\n self.atoms.WM_PROTOCOLS,\n\n )\n\n .get_reply()?;\n\n Ok(reply.atoms().to_vec())\n\n }\n\n\n\n pub fn get_window_types(&self, window_id: &WindowId) -> Vec<WindowType> {\n\n // Filter out any types we don't understand, as that's what the EWMH\n\n // spec suggests we should do. Don't error if _NET_WM_WINDOW_TYPE\n\n // is not set - lots of applications don't bother.\n\n ewmh::get_wm_window_type(&self.conn, window_id.to_x())\n\n .get_reply()\n\n .map(|reply| {\n\n reply\n\n .atoms()\n\n .iter()\n\n .filter_map(|a| self.window_type_lookup.get(a).cloned())\n", "file_path": "src/x.rs", "rank": 99, "score": 4.999853492760737 } ]
Rust
migration/src/audio.rs
NEU-DSG/dailp-encoding
3cbfca2538e65ab1b797e120781252368063a755
use dailp::{AudioSlice, DocumentAudioId}; use reqwest::Client; use serde::{Deserialize, Serialize}; extern crate pretty_env_logger; use itertools::Itertools; use log::{error, info}; use serde_json::Value; use std::collections::{HashMap, HashSet}; #[derive(Serialize, Deserialize, Clone, Debug)] struct DrsId(String); #[derive(Serialize, Deserialize, Clone, Debug)] struct ComplexDrsObject(HashMap<String, Value>); #[derive(Serialize, Deserialize, Clone, Debug)] struct DrsRes { pid: DrsId, parent: DrsId, thumbnails: Vec<String>, canonical_object: ComplexDrsObject, } impl DrsRes { pub async fn new(client: &Client, drs_id: &str) -> Result<Self, anyhow::Error> { let drs = "https://repository.library.northeastern.edu/api/v1/files/"; Ok(client .get(format!("{}{}", drs, drs_id)) .send() .await? .json::<DrsRes>() .await?) } } #[derive(Serialize, Deserialize, Clone, Debug)] struct AudioAnnotationRow { layer: Option<String>, start_time: f64, end_time: f64, word: String, } #[non_exhaustive] struct AudioLayer; impl AudioLayer { pub const UNLABELLED: &'static str = ""; pub const DOCUMENT: &'static str = "Document"; pub const WORD: &'static str = "Syllabary Source"; } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct AudioRes { audio_url: String, annotations: String, } impl AudioRes { pub async fn new(audio_drs_id: &str, annotation_drs_id: &str) -> Result<Self, anyhow::Error> { info!("Creating new Audio Resource"); let client = Client::new(); let audio_response = DrsRes::new(&client, audio_drs_id).await?; let annotation_response = DrsRes::new(&client, annotation_drs_id).await?; Ok(Self { audio_url: audio_response .canonical_object .0 .keys() .next() .unwrap() .clone(), annotations: client .get( annotation_response .canonical_object .0 .keys() .next() .unwrap() .clone(), ) .send() .await? .text() .await?, }) } pub fn into_document_audio(self) -> AudioSlice { AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: Some(self.into_audio_slices()), index: 0, start_time: None, end_time: None, } } pub fn into_audio_slices(self /*from_layer: String*/) -> Vec<AudioSlice> { let mut result: Vec<AudioSlice> = vec![]; use csv::{Error, ReaderBuilder}; let mut reader = ReaderBuilder::new() .delimiter(b'\t') .has_headers(false) .from_reader(self.annotations.as_bytes()); for (annotation_line, i) in reader.deserialize::<AudioAnnotationRow>().zip(0..) { if annotation_line.is_err() { error!("Failed to add line {}", i); result.push(AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: None, index: i, start_time: None, end_time: None, }); } else { let annotation = annotation_line.unwrap(); result.push(AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: None, index: i, start_time: Some((annotation.start_time * 1000.0) as i32), end_time: Some((annotation.end_time * 1000.0) as i32), }); info!( "Successfully added from line {}.\nURL: {}\nStart:{}ms\nEnd:{}ms", i, self.audio_url.clone(), annotation.start_time * 1000.0, annotation.end_time * 1000.0 ); }; } result } }
use dailp::{AudioSlice, DocumentAudioId}; use reqwest::Client; use serde::{Deserialize, Serialize}; extern crate pretty_env_logger; use itertools::Itertools; use log::{error, info}; use serde_json::Value; use std::collections::{HashMap, HashSet}; #[derive(Serialize, Deserialize, Clone, Debug)] struct DrsId(String); #[derive(Serialize, Deserialize, Clone, Debug)] struct ComplexDrsObject(HashMap<String, Value>); #[derive(Serialize, Deserialize, Clone, Debug)] struct DrsRes { pid: DrsId, parent: DrsId, thumbnails: Vec<String>, canonical_object: ComplexDrsObject, } impl DrsRes { pub async fn new(client: &Client, drs_id: &str) -> Result<Self, anyhow::Error> { let drs = "https://repository.library.northeastern.edu/api/v1/files/"; Ok(client .get(format!("{}{}", drs, drs_id)) .send() .await? .json::<DrsRes>() .await?) } } #[derive(Serialize, Deserialize, Clone, Debug)] struct AudioAnnotationRow { layer: Option<String>, start_time: f64, end_time: f64, word: String, } #[non_exhaustive] struct AudioLayer; impl AudioLayer { pub const UNLABELLED: &'static str = ""; pub const DOCUMENT: &'static str = "Document"; pub const WORD: &'static str = "Syllabary Source"; } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct AudioRes { audio_url: String, annotations: String, } impl AudioRes { pub async fn new(audio_drs_id: &str, annotation_drs_id: &str) -> Result<Self, anyhow::Error> {
pub fn into_document_audio(self) -> AudioSlice { AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: Some(self.into_audio_slices()), index: 0, start_time: None, end_time: None, } } pub fn into_audio_slices(self /*from_layer: String*/) -> Vec<AudioSlice> { let mut result: Vec<AudioSlice> = vec![]; use csv::{Error, ReaderBuilder}; let mut reader = ReaderBuilder::new() .delimiter(b'\t') .has_headers(false) .from_reader(self.annotations.as_bytes()); for (annotation_line, i) in reader.deserialize::<AudioAnnotationRow>().zip(0..) { if annotation_line.is_err() { error!("Failed to add line {}", i); result.push(AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: None, index: i, start_time: None, end_time: None, }); } else { let annotation = annotation_line.unwrap(); result.push(AudioSlice { resource_url: self.audio_url.clone(), parent_track: Some(DocumentAudioId("".to_string())), annotations: None, index: i, start_time: Some((annotation.start_time * 1000.0) as i32), end_time: Some((annotation.end_time * 1000.0) as i32), }); info!( "Successfully added from line {}.\nURL: {}\nStart:{}ms\nEnd:{}ms", i, self.audio_url.clone(), annotation.start_time * 1000.0, annotation.end_time * 1000.0 ); }; } result } }
info!("Creating new Audio Resource"); let client = Client::new(); let audio_response = DrsRes::new(&client, audio_drs_id).await?; let annotation_response = DrsRes::new(&client, annotation_drs_id).await?; Ok(Self { audio_url: audio_response .canonical_object .0 .keys() .next() .unwrap() .clone(), annotations: client .get( annotation_response .canonical_object .0 .keys() .next() .unwrap() .clone(), ) .send() .await? .text() .await?, }) }
function_block-function_prefix_line
[ { "content": "pub fn simple_phonetics_to_worcester(input: &str) -> String {\n\n use {\n\n lazy_static::lazy_static,\n\n regex::{Captures, Regex},\n\n };\n\n // Convert the t/th consonants to d/t\n\n lazy_static! {\n\n static ref TTH_PATTERN: Regex = Regex::new(r\"(gw|kw|j|ʔ|:)\").unwrap();\n\n }\n\n let result = TTH_PATTERN.replace_all(input, |cap: &Captures| match &cap[0] {\n\n \"gw\" | \"kw\" => \"qu\",\n\n \"j\" => \"ts\",\n\n \"ʔ\" => \"'\",\n\n \":\" => \"\",\n\n _ => unreachable!(),\n\n });\n\n result.into_owned()\n\n}\n\n\n", "file_path": "types/src/lexical.rs", "rank": 0, "score": 165249.11529911245 }, { "content": "/// Converts a given phonemic string from the Uchihara representation to the\n\n/// DAILP representation.\n\n/// For example: \"a:!\" => \"áá\"\n\npub fn convert_udb(input: &str) -> PhonemicString {\n\n // UDB represents glottal stops with the single quote.\n\n // TODO Move this to the output conversion step.\n\n let input = input.replace(\"'\", \"ʔ\");\n\n let pat = regex::Regex::new(\"([^aeiouv]*)([aeiouv]:?)([!*`^\\\"])?\").unwrap();\n\n let mut syllables = Vec::new();\n\n for caps in pat.captures_iter(&input) {\n\n let consonant = &caps[1];\n\n syllables.push(PhonemicString::Consonant(consonant.to_owned()));\n\n let vowel = &caps[2];\n\n let is_long = vowel.ends_with(':');\n\n let accent = caps.get(3).map(|x| x.as_str()).unwrap_or(\"\");\n\n let vowel_type = match accent {\n\n \"\" => {\n\n if is_long {\n\n VowelType::LongLow\n\n } else {\n\n VowelType::ShortLow\n\n }\n\n }\n", "file_path": "types/src/lexical.rs", "rank": 1, "score": 150433.45405196413 }, { "content": "pub fn is_root_morpheme(s: &str) -> bool {\n\n s.contains(|c: char| c.is_lowercase())\n\n}\n", "file_path": "types/src/form.rs", "rank": 2, "score": 129737.03962333595 }, { "content": "/// Group all the morpheme shapes with their paired glosses, return them along\n\n/// with the phonemic shape of the whole root.\n\nfn all_tags(cols: &mut impl Iterator<Item = String>) -> (Vec<(String, String)>, Option<String>) {\n\n let mut tags = Vec::new();\n\n let mut cols = cols.by_ref().peekable();\n\n // Tags are all uppercase ascii and numbers, followed by the corresponding morpheme.\n\n while let Some(true) = cols\n\n .peek()\n\n .map(|x| x.starts_with(|c: char| c.is_ascii_uppercase() || c.is_numeric()) || x.is_empty())\n\n {\n\n if let (Some(a), Some(b)) = (cols.next(), cols.next()) {\n\n if !a.is_empty() || !b.is_empty() {\n\n tags.push((a, b));\n\n }\n\n }\n\n }\n\n (tags, cols.next())\n\n}\n\n\n", "file_path": "types/src/lexical.rs", "rank": 4, "score": 127650.3883965108 }, { "content": "pub fn parse_gloss_layers<'a>(\n\n layer_one: &'a str,\n\n layer_two: &'a str,\n\n) -> IResult<&'a [u8], Vec<MorphemeSegment>> {\n\n let (_, one) = gloss_line(layer_one.as_bytes())?;\n\n let (_, two) = gloss_line(layer_two.as_bytes())?;\n\n Ok((\n\n &[],\n\n one.into_iter()\n\n .zip(two)\n\n .map(|(morpheme, gloss)| {\n\n MorphemeSegment::new(\n\n String::from_utf8_lossy(morpheme.tag).trim().to_owned(),\n\n String::from_utf8_lossy(gloss.tag).trim().to_owned(),\n\n // The gloss line is most likely to have the correct separator.\n\n gloss.followed_by,\n\n )\n\n })\n\n .collect(),\n\n ))\n\n}\n\n\n", "file_path": "types/src/gloss.rs", "rank": 5, "score": 125137.08622154317 }, { "content": "fn tth_to_dt(input: &str, keep_glottal_stops: bool, replace_colons: Option<&str>) -> String {\n\n use {\n\n lazy_static::lazy_static,\n\n regex::{Captures, Regex},\n\n };\n\n // Convert the t/th consonants to d/t\n\n lazy_static! {\n\n static ref TTH_PATTERN: Regex =\n\n Regex::new(r\"(qu|ts|ks|tlh|kwh|tl|kw|kh|th|ch|k|t|c|ʔ|:)\").unwrap();\n\n }\n\n let result = TTH_PATTERN.replace_all(input, |cap: &Captures| match &cap[0] {\n\n \"tlh\" => \"tl\",\n\n \"tl\" => \"dl\",\n\n \"qu\" => \"gw\",\n\n \"kwh\" => \"kw\",\n\n \"kw\" => \"gw\",\n\n \"kh\" => \"k\",\n\n \"th\" => \"t\",\n\n \"ch\" => \"ch\", // Not sure I've ever seen this segment in data before.\n\n \"k\" => \"g\",\n", "file_path": "types/src/lexical.rs", "rank": 7, "score": 114675.57059833792 }, { "content": "fn dt_to_tth(input: &str, keep_glottal_stops: bool, replace_colons: Option<&str>) -> String {\n\n use {\n\n lazy_static::lazy_static,\n\n regex::{Captures, Regex},\n\n };\n\n // Convert the t/th consonants to d/t\n\n lazy_static! {\n\n static ref DT_PATTERN: Regex = Regex::new(r\"(ts|ks|tl|kw|gw|k|t|c|g|d|j|'|ʔ|:)\").unwrap();\n\n }\n\n let result = DT_PATTERN.replace_all(input, |cap: &Captures| match &cap[0] {\n\n \"tl\" => \"tlh\",\n\n \"kw\" => \"kwh\",\n\n \"gw\" => \"kw\",\n\n \"k\" => \"kh\",\n\n \"t\" => \"th\",\n\n \"c\" => \"ch\",\n\n \"j\" => \"c\",\n\n \"g\" => \"k\",\n\n \"d\" => \"t\",\n\n \"'\" | \"ʔ\" => {\n", "file_path": "types/src/lexical.rs", "rank": 8, "score": 114675.57059833793 }, { "content": "fn parse_tag_section(values: &mut impl Iterator<Item = String>, has_page: bool) -> Option<TagForm> {\n\n let tag = values.next()?;\n\n let title = values.next()?;\n\n let definition = values.next().unwrap_or_default();\n\n let _page_num = if has_page { values.next() } else { None };\n\n let shape = values.next().filter(|x| !x.is_empty());\n\n let details_url = values.next().filter(|x| !x.is_empty());\n\n if !tag.is_empty() {\n\n Some(TagForm {\n\n tag,\n\n title,\n\n definition,\n\n shape,\n\n details_url,\n\n })\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "migration/src/tags.rs", "rank": 9, "score": 113681.16875398697 }, { "content": "fn render_template(doc: &AnnotatedDoc) -> Result<String> {\n\n let mut tera = tera::Tera::default();\n\n tera.add_raw_template(\"macros.tera.xml\", include_str!(\"../macros.tera.xml\"))?;\n\n tera.add_raw_template(\"template.tera.xml\", include_str!(\"../template.tera.xml\"))?;\n\n tera.register_filter(\"convert_breaks\", convert_breaks);\n\n let contents = tera.render(\"template.tera.xml\", &tera::Context::from_serialize(doc)?)?;\n\n Ok(contents)\n\n}\n\n\n\n/// Result obtained directly from the raw Google sheet.\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct SheetResult {\n\n /// Each element here represents one row.\n\n /// Semantic lines in our documents are delimited by empty rows.\n\n /// The line number sits in the first cell of the first row of each semantic line.\n\n pub values: Vec<Vec<String>>,\n\n}\n\n\n\nimpl SheetResult {\n\n pub async fn from_sheet(sheet_id: &str, sheet_name: Option<&str>) -> Result<Self> {\n", "file_path": "migration/src/spreadsheets.rs", "rank": 11, "score": 108994.376277039 }, { "content": "/// Takes an unprocessed document with metadata, passing it through our TEI\n\n/// template to produce an xml document named like the given title.\n\npub fn write_to_file(doc: &AnnotatedDoc) -> Result<()> {\n\n let contents = render_template(doc)?;\n\n // Make sure the output folder exists.\n\n std::fs::create_dir_all(OUTPUT_DIR)?;\n\n let file_name = format!(\"{}/{}.xml\", OUTPUT_DIR, doc.meta.id.0);\n\n info!(\"writing to {}\", file_name);\n\n let mut f = File::create(file_name)?;\n\n f.write_all(contents.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "migration/src/spreadsheets.rs", "rank": 12, "score": 108828.84018068509 }, { "content": "/// Encode all mid-word line breaks as `lb` tags and page breaks as `pb` tags.\n\npub fn convert_breaks(\n\n value: &tera::Value,\n\n context: &HashMap<String, tera::Value>,\n\n) -> tera::Result<tera::Value> {\n\n if let tera::Value::String(s) = value {\n\n let pb_tag = context.get(\"pb\").and_then(|page_num| {\n\n if let tera::Value::Number(num) = page_num {\n\n Some(format!(\"<pb n=\\\"{}\\\" />\", num))\n\n } else {\n\n None\n\n }\n\n });\n\n let lb_tag = context.get(\"lb\").and_then(|line_num| {\n\n if let tera::Value::Number(num) = line_num {\n\n Some(format!(\"<lb n=\\\"{}\\\" />\", num))\n\n } else {\n\n None\n\n }\n\n });\n\n let mut replaced = if let Some(pb_tag) = pb_tag {\n", "file_path": "migration/src/spreadsheets.rs", "rank": 13, "score": 102531.3395253798 }, { "content": "pub fn seg_verb_surface_forms(\n\n position: &PositionInDocument,\n\n date: &Date,\n\n cols: &mut impl Iterator<Item = String>,\n\n translation_count: usize,\n\n has_numeric: bool,\n\n has_comment: bool,\n\n) -> Vec<AnnotatedForm> {\n\n let mut forms = Vec::new();\n\n while let Some(form) = seg_verb_surface_form(\n\n position.clone(),\n\n date,\n\n cols,\n\n translation_count,\n\n has_numeric,\n\n has_comment,\n\n ) {\n\n forms.push(form);\n\n }\n\n forms\n\n}\n\n\n", "file_path": "types/src/lexical.rs", "rank": 14, "score": 97891.50361122855 }, { "content": "pub fn seg_verb_surface_form(\n\n position: PositionInDocument,\n\n date: &Date,\n\n cols: &mut impl Iterator<Item = String>,\n\n translation_count: usize,\n\n has_numeric: bool,\n\n has_comment: bool,\n\n) -> Option<AnnotatedForm> {\n\n // Skip empty cells until we find a form.\n\n let mut morpheme_layer = cols.next()?;\n\n while morpheme_layer.is_empty() {\n\n morpheme_layer = cols.next()?;\n\n }\n\n let gloss_layer = cols.next().filter(|s| !s.is_empty())?;\n\n\n\n // Then, the representations of the full word.\n\n let phonemic = cols.next().filter(|s| !s.is_empty())?;\n\n let _numeric = if has_numeric { cols.next() } else { None };\n\n let phonetic = cols.next().filter(|s| !s.is_empty())?;\n\n let syllabary = cols.next().filter(|s| !s.is_empty())?;\n", "file_path": "types/src/lexical.rs", "rank": 15, "score": 97891.50361122855 }, { "content": "pub fn root_noun_surface_forms(\n\n position: &PositionInDocument,\n\n date: &Date,\n\n cols: &mut impl Iterator<Item = String>,\n\n has_comment: bool,\n\n) -> Vec<AnnotatedForm> {\n\n let mut result = Vec::new();\n\n while let Some(form) = root_noun_surface_form(position, date, cols, has_comment) {\n\n result.push(form);\n\n }\n\n result\n\n}\n\n\n", "file_path": "types/src/lexical.rs", "rank": 16, "score": 97891.50361122855 }, { "content": "/// Build a single verb surface form from the given row.\n\npub fn root_verb_surface_form(\n\n position: &PositionInDocument,\n\n date: &Date,\n\n root: &str,\n\n root_gloss: &str,\n\n cols: &mut impl Iterator<Item = String>,\n\n translation_count: usize,\n\n has_numeric: bool,\n\n has_comment: bool,\n\n has_spacer: bool,\n\n) -> Option<AnnotatedForm> {\n\n use itertools::Itertools as _;\n\n\n\n // Each form has an empty column before it.\n\n // Then follows the morphemic segmentation.\n\n // All tags except the last one come before the root.\n\n let (mut morpheme_tags, phonemic) = all_tags(&mut cols.filter(|x| !x.is_empty()));\n\n if morpheme_tags.is_empty() {\n\n return None;\n\n }\n", "file_path": "types/src/lexical.rs", "rank": 17, "score": 97891.50361122855 }, { "content": "/// Gather many verb surface forms from the given row.\n\npub fn root_verb_surface_forms(\n\n position: &PositionInDocument,\n\n date: &Date,\n\n root: &str,\n\n root_gloss: &str,\n\n cols: &mut impl Iterator<Item = String>,\n\n translation_count: usize,\n\n has_numeric: bool,\n\n has_comment: bool,\n\n has_spacer: bool,\n\n) -> Vec<AnnotatedForm> {\n\n let mut forms = Vec::new();\n\n while let Some(form) = root_verb_surface_form(\n\n position,\n\n date,\n\n root,\n\n root_gloss,\n\n cols,\n\n translation_count,\n\n has_numeric,\n\n has_comment,\n\n has_spacer,\n\n ) {\n\n forms.push(form);\n\n }\n\n forms\n\n}\n\n\n", "file_path": "types/src/lexical.rs", "rank": 18, "score": 97891.50361122855 }, { "content": "/// TODO Convert all phonemic representations into the TAOC/DAILP format.\n\n/// TODO Store forms in any format with a tag defining the format so that\n\n/// GraphQL can do the conversion instead of the migration process.\n\npub fn root_noun_surface_form(\n\n position: &PositionInDocument,\n\n date: &Date,\n\n cols: &mut impl Iterator<Item = String>,\n\n has_comment: bool,\n\n) -> Option<AnnotatedForm> {\n\n let mut morpheme_layer = cols.next()?;\n\n while morpheme_layer.is_empty() {\n\n morpheme_layer = cols.next()?;\n\n }\n\n let gloss_layer = cols.next()?;\n\n let phonemic = cols.next()?;\n\n let _numeric = cols.next()?;\n\n let phonetic = cols.next()?;\n\n let syllabary = cols.next()?;\n\n let mut translations = Vec::new();\n\n for _ in 0..3 {\n\n if let Some(s) = cols.next() {\n\n if !s.is_empty() {\n\n translations.push(s);\n", "file_path": "types/src/lexical.rs", "rank": 19, "score": 97891.50361122855 }, { "content": "#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\", tag = \"type\")]\n\nstruct ImageInfo {\n\n width: u32,\n\n height: u32,\n\n}\n\n\n\n/// A creative agent, which may be publisher or editor of manifest content.\n\n#[derive(Serialize)]\n\n#[serde(rename_all = \"camelCase\", tag = \"type\")]\n\npub struct Agent {\n\n id: String,\n\n label: LanguageString,\n\n homepage: Vec<Text>,\n\n}\n\nimpl Agent {\n\n /// [`Agent`] object representing Northeastern University Library\n\n pub fn neu_library() -> Self {\n\n Self {\n\n id: \"https://library.northeastern.edu/\".to_owned(),\n\n label: LanguageString::english(\"Northeastern University Library\"),\n\n homepage: vec![Text {\n", "file_path": "types/src/iiif.rs", "rank": 20, "score": 88669.83639676098 }, { "content": "type Error = Box<dyn std::error::Error + Sync + Send + 'static>;\n\n\n\nlazy_static::lazy_static! {\n\n // Share database connection between executions.\n\n // This prevents each lambda invocation from creating a new connection to\n\n // the database.\n\n static ref DATABASE: dailp::Database = dailp::Database::new().unwrap();\n\n static ref SCHEMA: Schema<Query, Mutation, EmptySubscription> = {\n\n Schema::build(Query, Mutation, EmptySubscription)\n\n .data(dailp::Database::new().unwrap())\n\n .data(DataLoader::new(dailp::Database::new().unwrap()))\n\n .finish()\n\n };\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Error> {\n\n pretty_env_logger::init();\n\n lambda_runtime::run(lambda_http::handler(handler)).await?;\n\n Ok(())\n", "file_path": "graphql/src/lambda.rs", "rank": 22, "score": 71776.7704257303 }, { "content": "#[derive(async_graphql::SimpleObject)]\n\nstruct FormsInTime {\n\n start: Option<dailp::Date>,\n\n end: Option<dailp::Date>,\n\n forms: Vec<dailp::AnnotatedForm>,\n\n}\n\n\n\n#[derive(Deserialize, Debug, async_graphql::SimpleObject)]\n\npub struct UserInfo {\n\n email: String,\n\n #[serde(\n\n default,\n\n rename = \"cognito:groups\",\n\n with = \"StringWithSeparator::<CommaSeparator>\"\n\n )]\n\n groups: Vec<UserGroup>,\n\n}\n\n\n", "file_path": "graphql/src/query.rs", "rank": 24, "score": 56228.21016038043 }, { "content": "#[derive(async_graphql::InputObject)]\n\nstruct FormQuery {\n\n id: Option<String>,\n\n source: Option<String>,\n\n normalized_source: Option<String>,\n\n simple_phonetics: Option<String>,\n\n english_gloss: Option<String>,\n\n unresolved: Option<bool>,\n\n}\n\nimpl FormQuery {\n\n fn into_bson(self) -> bson::Document {\n\n let regex_query = |q| bson::doc! { \"$regex\": q, \"$options\": \"i\" };\n\n let mut doc = bson::Document::new();\n\n if let Some(id) = self.id {\n\n doc.insert(\"id\", regex_query(id));\n\n }\n\n if let Some(source) = self.source {\n\n doc.insert(\"source\", regex_query(source));\n\n }\n\n if let Some(normalized_source) = self.normalized_source {\n\n doc.insert(\"normalizedSource\", regex_query(normalized_source));\n", "file_path": "graphql/src/query.rs", "rank": 25, "score": 56228.21016038043 }, { "content": "/// Requires that the user is authenticated and a member of the given user group.\n\nstruct GroupGuard {\n\n group: UserGroup,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Guard for GroupGuard {\n\n async fn check(&self, ctx: &async_graphql::Context<'_>) -> async_graphql::Result<()> {\n\n let user = ctx.data_opt::<UserInfo>();\n\n let has_group = user.map(|user| user.groups.iter().any(|group| group == &self.group));\n\n if has_group == Some(true) {\n\n Ok(())\n\n } else {\n\n Err(format!(\"Forbidden, user not in group '{}'\", self.group).into())\n\n }\n\n }\n\n}\n\n\n", "file_path": "graphql/src/query.rs", "rank": 26, "score": 56224.36035611594 }, { "content": "/// Requires that the user is authenticated.\n\nstruct AuthGuard;\n\n\n\n#[async_trait::async_trait]\n\nimpl Guard for AuthGuard {\n\n async fn check(&self, ctx: &async_graphql::Context<'_>) -> async_graphql::Result<()> {\n\n let user = ctx.data_opt::<UserInfo>();\n\n if user.is_some() {\n\n Ok(())\n\n } else {\n\n Err(\"Forbidden, user not authenticated\".into())\n\n }\n\n }\n\n}\n", "file_path": "graphql/src/query.rs", "rank": 27, "score": 56224.36035611594 }, { "content": "struct ConnectedForm {\n\n form: dailp::AnnotatedForm,\n\n links: Vec<dailp::LexicalConnection>,\n\n}\n", "file_path": "migration/src/early_vocab.rs", "rank": 28, "score": 54914.91658583772 }, { "content": "fn parse_new_df1975(\n\n sheet: SheetResult,\n\n doc_id: &str,\n\n year: i32,\n\n translation_count: usize,\n\n has_numeric: bool,\n\n has_comment: bool,\n\n after_root: usize,\n\n translations: usize,\n\n) -> impl Iterator<Item = LexicalEntryWithForms> {\n\n let doc_id = dailp::DocumentId(doc_id.to_owned());\n\n sheet\n\n .values\n\n .into_iter()\n\n // The first two rows are simply headers.\n\n .skip(2)\n\n // The rest are relevant to the verb itself.\n\n .filter_map(move |columns| {\n\n // The columns are as follows: key, page number, root, root gloss,\n\n // translations 1, 2, 3, transitivity, UDB class, blank, surface forms.\n", "file_path": "migration/src/lexical.rs", "rank": 29, "score": 54030.94181941336 }, { "content": "type Scalar = f64;\n\nimpl Geometry {\n\n pub fn new(x_min: Scalar, y_min: Scalar, x_max: Scalar, y_max: Scalar) -> Self {\n\n Self {\n\n x_min,\n\n y_min,\n\n x_max,\n\n y_max,\n\n }\n\n }\n\n pub fn width(&self) -> Scalar {\n\n (self.x_max - self.x_min).abs()\n\n }\n\n pub fn height(&self) -> Scalar {\n\n (self.y_max - self.y_min).abs()\n\n }\n\n pub fn to_iiif_string(&self) -> String {\n\n format!(\n\n \"pct:{},{},{},{}\",\n\n self.x_min,\n", "file_path": "types/src/geometry.rs", "rank": 30, "score": 53637.71211693606 }, { "content": "struct GlossSegment<'a> {\n\n tag: &'a [u8],\n\n followed_by: Option<SegmentType>,\n\n}\n\n\n\nconst SEPARATORS: &str = \"-=~\\\\\";\n\n\n", "file_path": "types/src/gloss.rs", "rank": 31, "score": 52966.16705371336 }, { "content": "fn upsert() -> mongodb::options::UpdateOptions {\n\n mongodb::options::UpdateOptions::builder()\n\n .upsert(true)\n\n .build()\n\n}\n\n\n\n/// One particular morpheme and all the known words that contain that exact morpheme.\n\n#[derive(async_graphql::SimpleObject)]\n\npub struct MorphemeReference {\n\n /// Phonemic shape of the morpheme.\n\n pub morpheme: String,\n\n /// List of words that contain this morpheme.\n\n pub forms: Vec<AnnotatedForm>,\n\n}\n\n\n\n/// A list of words grouped by the document that contains them.\n\n#[derive(async_graphql::SimpleObject)]\n\npub struct WordsInDocument {\n\n /// Unique identifier of the containing document\n\n pub document_id: Option<String>,\n\n /// What kind of document contains these words (e.g. manuscript vs dictionary)\n\n pub document_type: Option<DocumentType>,\n\n /// List of annotated and potentially segmented forms\n\n pub forms: Vec<AnnotatedForm>,\n\n}\n", "file_path": "types/src/database.rs", "rank": 32, "score": 44267.1892001955 }, { "content": "fn morpheme(input: &[u8]) -> IResult<&[u8], &[u8]> {\n\n take_while1(|c| !SEPARATORS.contains(c as char))(input)\n\n}\n\n\n", "file_path": "types/src/gloss.rs", "rank": 33, "score": 40457.79373828158 }, { "content": "fn morpheme_sep(input: &[u8]) -> IResult<&[u8], SegmentType> {\n\n map(is_a(SEPARATORS), |c: &[u8]| match c[0] as char {\n\n '-' => SegmentType::Morpheme,\n\n '=' => SegmentType::Clitic,\n\n _ => todo!(\"Unrecognized morpheme separator\"),\n\n })(input)\n\n}\n", "file_path": "types/src/gloss.rs", "rank": 34, "score": 38605.19040204876 }, { "content": "fn tailed_morpheme(input: &[u8]) -> IResult<&[u8], GlossSegment> {\n\n map(pair(morpheme, opt(morpheme_sep)), |(m, sep)| GlossSegment {\n\n tag: m,\n\n followed_by: sep,\n\n })(input)\n\n}\n\n\n", "file_path": "types/src/gloss.rs", "rank": 35, "score": 38605.19040204876 }, { "content": "/// Transforms a spreadsheet of morpheme information into a list of type-safe tag objects.\n\nfn parse_tag_glossary(sheet: SheetResult) -> Result<Vec<MorphemeTag>> {\n\n Ok(sheet\n\n .values\n\n .into_iter()\n\n // The first row is headers.\n\n .skip(1)\n\n // There are a few empty spacing rows to ignore.\n\n .filter(|row| !row.is_empty())\n\n .filter_map(|row| {\n\n // Skip over allomorphs, and instead allow them to emerge from our texts.\n\n let mut cols = row.into_iter();\n\n let id = cols.next()?;\n\n let _name = cols.next()?;\n\n let morpheme_type = cols.next()?;\n\n let _dailp_form = cols.next()?;\n\n let crg = parse_tag_section(&mut cols, true);\n\n let taoc = parse_tag_section(&mut cols, true);\n\n let learner = parse_tag_section(&mut cols, false);\n\n Some(MorphemeTag {\n\n id,\n", "file_path": "migration/src/tags.rs", "rank": 36, "score": 36986.67628555633 }, { "content": "/// Parses a string following the Leipzig glossing guidelines, where morphemes\n\n/// or morpheme glosses are separated by several different delimiters, each with\n\n/// different semantics.\n\nfn gloss_line(input: &[u8]) -> IResult<&[u8], Vec<GlossSegment>> {\n\n many1(tailed_morpheme)(input)\n\n}\n\n\n", "file_path": "types/src/gloss.rs", "rank": 37, "score": 36721.21058733322 }, { "content": "use crate::{DocumentId, FormId, Geometry};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// An annotation is a piece of information that provides details about a word,\n\n/// document, image, slice of an image, or audio recording.\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Annotation {\n\n #[serde(rename = \"_id\")]\n\n pub id: AnnotationId,\n\n pub content: String,\n\n pub attached_to: AnnotationAttachment,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Clone, Hash)]\n\npub struct AnnotationId(pub String);\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(tag = \"__typename\")]\n\npub enum AnnotationAttachment {\n", "file_path": "types/src/annotation.rs", "rank": 38, "score": 35407.98613614733 }, { "content": " /// Reply to another existing annotation, referred to by ID.\n\n Reply(Reply),\n\n /// Attached to a particular word.\n\n WordAttachment(WordAttachment),\n\n /// Attached to a document or section of the document image.\n\n DocumentRegion(DocumentRegion),\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Reply {\n\n to: AnnotationId,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct WordAttachment {\n\n to: FormId,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct DocumentRegion {\n\n pub document: DocumentId,\n\n /// Number of the page this annotation applies to.\n\n pub page: Option<u32>,\n\n /// An image annotation without a region applies to the whole image.\n\n /// A page number is required to specify a region.\n\n pub region: Option<Geometry>,\n\n}\n", "file_path": "types/src/annotation.rs", "rank": 39, "score": 35398.538004770875 }, { "content": "impl DocumentId {\n\n /// Page slug based on this identifier\n\n pub fn slug(&self) -> String {\n\n slug::slugify(&self.0)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct ImageSourceId(pub String);\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct ImageSource {\n\n #[serde(rename = \"_id\")]\n\n pub id: ImageSourceId,\n\n pub url: String,\n\n}\n\n#[async_graphql::Object]\n\nimpl ImageSource {\n\n async fn id(&self) -> &str {\n\n &self.id.0\n", "file_path": "types/src/document.rs", "rank": 40, "score": 35152.93691782238 }, { "content": " }\n\n async fn url(&self) -> &str {\n\n &self.url\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct IiifImages {\n\n pub source: ImageSourceId,\n\n pub ids: Vec<String>,\n\n}\n\nimpl IiifImages {\n\n pub fn count(&self) -> usize {\n\n self.ids.len()\n\n }\n\n}\n\n#[async_graphql::Object]\n\nimpl IiifImages {\n\n pub async fn source(\n\n &self,\n", "file_path": "types/src/document.rs", "rank": 41, "score": 35147.883983548214 }, { "content": "use crate::{\n\n AnnotatedForm, AudioSlice, Contributor, Database, Date, SourceAttribution, Translation,\n\n TranslationBlock,\n\n};\n\nuse async_graphql::{dataloader::DataLoader, FieldResult};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::borrow::Cow;\n\n\n\n#[derive(Serialize, Deserialize, Clone)]\n\npub struct AnnotatedDoc {\n\n #[serde(flatten)]\n\n pub meta: DocumentMetadata,\n\n pub segments: Option<Vec<TranslatedSection>>,\n\n}\n\nimpl AnnotatedDoc {\n\n pub fn new(meta: DocumentMetadata, segments: Vec<AnnotatedSeg>) -> Self {\n\n let mut merged_segments = Vec::new();\n\n // Skip the first block of the translation, since this usually contains\n\n // the header and information for translators and editors.\n\n let mut block_index = 1;\n", "file_path": "types/src/document.rs", "rank": 42, "score": 35145.37096276257 }, { "content": " Reference,\n\n Corpus,\n\n}\n\n\n\n#[derive(async_graphql::SimpleObject, Serialize, Deserialize, Clone)]\n\npub struct TranslatedSection {\n\n /// Translation of this portion of the source text.\n\n translation: Option<TranslationBlock>,\n\n /// Source text from the original document.\n\n source: AnnotatedSeg,\n\n}\n\n\n\n// Ideal structure:\n\n// documents: [{ meta, pages: [{ lines: [{ index, words }] }] }]\n\n// Basic to start: [{meta, lines: [{ index, words }]}]\n\n\n\n#[derive(Debug, async_graphql::Union, Serialize, Deserialize, Clone)]\n\n#[serde(tag = \"type\")]\n\npub enum AnnotatedSeg {\n\n Block(AnnotatedPhrase),\n", "file_path": "types/src/document.rs", "rank": 43, "score": 35144.395324545985 }, { "content": "}\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct DocumentCollection {\n\n pub name: String,\n\n}\n\n#[async_graphql::Object]\n\nimpl DocumentCollection {\n\n /// Full name of this collection\n\n async fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n /// URL-ready slug for this collection, generated from the name\n\n async fn slug(&self) -> String {\n\n slug::slugify(&self.name)\n\n }\n\n\n\n /// All documents that are part of this collection\n\n async fn documents(\n", "file_path": "types/src/document.rs", "rank": 44, "score": 35143.316054219315 }, { "content": " Word(AnnotatedForm),\n\n LineBreak(LineBreak),\n\n PageBreak(PageBreak),\n\n}\n\nimpl AnnotatedSeg {\n\n pub fn forms(&self) -> Vec<&AnnotatedForm> {\n\n use AnnotatedSeg::*;\n\n match self {\n\n Block(block) => block.parts.iter().flat_map(|s| s.forms()).collect(),\n\n Word(w) => vec![w],\n\n LineBreak(_) => Vec::new(),\n\n PageBreak(_) => Vec::new(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, async_graphql::Enum, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]\n\npub enum BlockType {\n\n Block,\n\n Phrase,\n", "file_path": "types/src/document.rs", "rank": 45, "score": 35141.96506345456 }, { "content": "}\n\n\n\n#[derive(Debug, async_graphql::SimpleObject, Serialize, Deserialize, Clone)]\n\npub struct LineBreak {\n\n pub index: i32,\n\n}\n\n\n\n#[derive(Debug, async_graphql::SimpleObject, Serialize, Deserialize, Clone)]\n\npub struct PageBreak {\n\n pub index: i32,\n\n}\n\n\n\n#[derive(async_graphql::SimpleObject, Debug, Serialize, Deserialize, Clone)]\n\npub struct AnnotatedPhrase {\n\n pub ty: BlockType,\n\n pub index: i32,\n\n pub parts: Vec<AnnotatedSeg>,\n\n}\n\n\n\n/// All the metadata associated with one particular document.\n", "file_path": "types/src/document.rs", "rank": 46, "score": 35141.049725323806 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct DocumentMetadata {\n\n /// Official short identifier.\n\n #[serde(rename = \"_id\")]\n\n pub id: DocumentId,\n\n /// Full title of the document.\n\n pub title: String,\n\n /// Further details about this particular document.\n\n // pub details: String,\n\n #[serde(default)]\n\n /// The original source(s) of this document, the most important first.\n\n pub sources: Vec<SourceAttribution>,\n\n /// Where the source document came from, maybe the name of a collection.\n\n pub collection: Option<String>,\n\n pub genre: Option<String>,\n\n #[serde(default)]\n\n /// The people involved in collecting, translating, annotating.\n\n pub contributors: Vec<Contributor>,\n\n /// Rough translation of the document, broken down by paragraph.\n", "file_path": "types/src/document.rs", "rank": 47, "score": 35140.094949444654 }, { "content": " .count_words_in_document(&self.meta.id)\n\n .await? as i64)\n\n }\n\n\n\n /// All words in the document that have unanalyzed or unfamiliar parts.\n\n /// These words need to be corrected or reviewed further.\n\n async fn unresolved_forms(\n\n &self,\n\n context: &async_graphql::Context<'_>,\n\n ) -> FieldResult<Vec<Cow<'_, AnnotatedForm>>> {\n\n let forms = self.forms(context).await?;\n\n Ok(forms\n\n .into_iter()\n\n .filter(|form| form.is_unresolved())\n\n .collect())\n\n }\n\n}\n\n\n\n#[derive(async_graphql::Enum, Clone, Copy, PartialEq, Eq)]\n\npub enum DocumentType {\n", "file_path": "types/src/document.rs", "rank": 48, "score": 35136.04179839533 }, { "content": " #[serde(skip)]\n\n pub translation: Option<Translation>,\n\n /// URL for an image of the original physical document.\n\n #[serde(default)]\n\n pub page_images: Option<IiifImages>,\n\n pub date: Option<Date>,\n\n /// Whether this document is a reference, therefore just a list of forms.\n\n pub is_reference: bool,\n\n /// Audio recording of this document, if one exists\n\n #[serde(default)]\n\n pub audio_recording: Option<AudioSlice>,\n\n #[serde(default)]\n\n /// Arbitrary number used for manually ordering documents in a collection.\n\n /// For collections without manual ordering, use zero here.\n\n pub order_index: i64,\n\n}\n\n\n\n#[derive(Clone, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, async_graphql::NewType)]\n\npub struct DocumentId(pub String);\n\n\n", "file_path": "types/src/document.rs", "rank": 49, "score": 35135.887884780044 }, { "content": " Self {\n\n segments: Some(merged_segments),\n\n meta,\n\n }\n\n }\n\n}\n\n\n\n#[async_graphql::Object]\n\nimpl AnnotatedDoc {\n\n /// Official short identifier for this document\n\n async fn id(&self) -> &DocumentId {\n\n &self.meta.id\n\n }\n\n\n\n /// Full title of the document\n\n async fn title(&self) -> &str {\n\n &self.meta.title\n\n }\n\n\n\n /// Date and time this document was written or created\n", "file_path": "types/src/document.rs", "rank": 50, "score": 35129.40266228367 }, { "content": " async fn genre(&self) -> &Option<String> {\n\n &self.meta.genre\n\n }\n\n\n\n /// Images of each source document page, in order\n\n async fn page_images(&self) -> &Option<IiifImages> {\n\n &self.meta.page_images\n\n }\n\n\n\n /// The people involved in producing this document, including the original\n\n /// author, translators, and annotators\n\n async fn contributors(&self) -> &Vec<Contributor> {\n\n &self.meta.contributors\n\n }\n\n\n\n /// Is this document a reference source (unstructured list of words)?\n\n /// Otherwise, it is considered a structured document with a translation.\n\n async fn is_reference(&self) -> bool {\n\n self.meta.is_reference\n\n }\n", "file_path": "types/src/document.rs", "rank": 51, "score": 35128.737100225946 }, { "content": " context: &async_graphql::Context<'_>,\n\n ) -> async_graphql::FieldResult<ImageSource> {\n\n Ok(context\n\n .data::<Database>()?\n\n .image_source(&self.source)\n\n .await?\n\n .ok_or_else(|| anyhow::format_err!(\"Image source not found\"))?)\n\n }\n\n\n\n async fn urls(\n\n &self,\n\n context: &async_graphql::Context<'_>,\n\n ) -> async_graphql::FieldResult<Vec<String>> {\n\n let source = self.source(context).await?;\n\n Ok(self\n\n .ids\n\n .iter()\n\n .map(|id| format!(\"{}/{}\", source.url, id))\n\n .collect())\n\n }\n", "file_path": "types/src/document.rs", "rank": 52, "score": 35128.2719642988 }, { "content": " if let Some(segs) = &self.segments {\n\n Ok(segs\n\n .iter()\n\n .flat_map(|s| s.source.forms())\n\n .map(Cow::Borrowed)\n\n .collect())\n\n } else {\n\n Ok(context\n\n .data::<Database>()?\n\n .words_in_document(&self.meta.id)\n\n .await?\n\n .into_iter()\n\n .map(Cow::Owned)\n\n .collect())\n\n }\n\n }\n\n\n\n async fn form_count(&self, context: &async_graphql::Context<'_>) -> FieldResult<i64> {\n\n Ok(context\n\n .data::<Database>()?\n", "file_path": "types/src/document.rs", "rank": 53, "score": 35128.26014033694 }, { "content": " async fn date(&self) -> &Option<Date> {\n\n &self.meta.date\n\n }\n\n\n\n /// The original source(s) of this document, the most important first.\n\n async fn sources(&self) -> &Vec<SourceAttribution> {\n\n &self.meta.sources\n\n }\n\n\n\n /// Where the source document came from, maybe the name of a collection\n\n async fn collection(&self) -> Option<DocumentCollection> {\n\n self.meta\n\n .collection\n\n .as_ref()\n\n .map(|name| DocumentCollection {\n\n name: name.to_owned(),\n\n })\n\n }\n\n\n\n /// The genre of the document, used to group similar ones\n", "file_path": "types/src/document.rs", "rank": 54, "score": 35128.03722644294 }, { "content": " &self,\n\n context: &async_graphql::Context<'_>,\n\n ) -> async_graphql::FieldResult<Vec<AnnotatedDoc>> {\n\n Ok(context\n\n .data::<Database>()?\n\n .all_documents(Some(&*self.name))\n\n .await?)\n\n }\n\n}\n", "file_path": "types/src/document.rs", "rank": 55, "score": 35127.46585082651 }, { "content": " context: &async_graphql::Context<'_>,\n\n ) -> FieldResult<Option<Cow<'_, Vec<TranslatedSection>>>> {\n\n // We may not have complete data.\n\n if self.segments.is_some() {\n\n Ok(self.segments.as_ref().map(|s| Cow::Borrowed(s)))\n\n } else {\n\n let db_doc = context\n\n .data::<DataLoader<Database>>()?\n\n .load_one(self.meta.id.clone())\n\n .await?;\n\n Ok(db_doc.and_then(|d| d.segments).map(Cow::Owned))\n\n }\n\n }\n\n\n\n /// All the words contained in this document, dropping structural formatting\n\n /// like line and page breaks.\n\n async fn forms(\n\n &self,\n\n context: &async_graphql::Context<'_>,\n\n ) -> FieldResult<Vec<Cow<'_, AnnotatedForm>>> {\n", "file_path": "types/src/document.rs", "rank": 56, "score": 35126.576916941805 }, { "content": "\n\n /// The audio recording resource for this entire document\n\n async fn audio_recording(&self) -> &Option<AudioSlice> {\n\n // TODO: Allow for multiple audio sources\n\n &self.meta.audio_recording\n\n }\n\n /// Arbitrary number used for manually ordering documents in a collection.\n\n /// For collections without manual ordering, use zero here.\n\n async fn order_index(&self) -> i64 {\n\n self.meta.order_index\n\n }\n\n\n\n /// URL-ready slug for this document, generated from the title\n\n async fn slug(&self) -> String {\n\n self.meta.id.slug()\n\n }\n\n\n\n /// Segments of the document paired with their respective rough translations\n\n async fn translated_segments(\n\n &self,\n", "file_path": "types/src/document.rs", "rank": 57, "score": 35125.497011149724 }, { "content": " let blocks = &meta\n\n .translation\n\n .as_ref()\n\n .expect(&format!(\"Missing translation for {}\", meta.id.0))\n\n .blocks;\n\n for seg in segments {\n\n // Only blocks have an associated translation.\n\n let trans = if let AnnotatedSeg::Block(_) = &seg {\n\n let t = blocks.get(block_index);\n\n block_index += 1;\n\n t.cloned()\n\n } else {\n\n None\n\n };\n\n merged_segments.push(TranslatedSection {\n\n translation: trans,\n\n source: seg,\n\n });\n\n }\n\n\n", "file_path": "types/src/document.rs", "rank": 58, "score": 35121.800083924165 }, { "content": "#[derive(Hash, PartialEq, Eq, Clone, Copy)]\n\nenum CherokeeSyllabaryVisualGroups {\n\n Lightning,\n\n Fork,\n\n Hills,\n\n Angles,\n\n Head,\n\n LargeHook,\n\n Post,\n\n Hook,\n\n Lip,\n\n Belly,\n\n Chair,\n\n Trough,\n\n StormCloud,\n\n Bicycle,\n\n Swirl,\n\n Horns,\n\n Scythe,\n\n LetterBigR,\n\n LetterBigB,\n", "file_path": "types/src/orthography.rs", "rank": 59, "score": 29916.472169822573 }, { "content": "export const ClientOnly = (props: { children: any }) => {\n\n const hasMounted = useHasMounted()\n\n if (hasMounted) {\n\n return createElement(Fragment, props)\n\n } else {\n\n return null\n\n }\n", "file_path": "website/src/cms/routes.ts", "rank": 60, "score": 26768.356402400343 }, { "content": "export const apolloClient = (token: string) =>\n\n new ApolloClient({\n\n ssrMode: isSSR(),\n\n cache: new InMemoryCache(),\n\n link: authLink(token).concat(httpLink(token)),\n", "file_path": "website/src/apollo.ts", "rank": 61, "score": 26768.356402400343 }, { "content": "const excludedDocuments = [\"DF1975\", \"AC1995\", \"PF1975\"]\n", "file_path": "website/gatsby-node.js", "rank": 62, "score": 26535.85216854716 }, { "content": "export const documentRoute = (slug: string) => `/documents/${slug}`\n", "file_path": "website/src/routes.ts", "rank": 63, "score": 26535.85216854716 }, { "content": "export const sourceCitationRoute = (key: string) => `/sources#source-${key}`\n", "file_path": "website/src/routes.ts", "rank": 64, "score": 25865.738765672017 }, { "content": "export const sourceCitationId = (key: string) => `source-${key}`\n", "file_path": "website/src/routes.ts", "rank": 65, "score": 25865.738765672017 }, { "content": "export const documentDetailsRoute = (slug: string) =>\n", "file_path": "website/src/routes.ts", "rank": 66, "score": 25641.074476077454 }, { "content": "export const useScrollableTabState = (initialState?: TabInitialState) => {\n\n const tabs = useTabState(initialState)\n\n\n\n const tabScrollPos = useRef<TabScrollPositions>({})\n\n useEffect(() => {\n\n // Restore the scroll position for the new tab.\n\n const newScroll = tabScrollPos.current[tabs.selectedId!]\n\n if (newScroll) {\n\n window.scrollTo({ top: newScroll })\n\n }\n\n\n\n function listener() {\n\n // Save scroll position for last tab.\n\n const lastTabId = tabs.selectedId!\n\n if (lastTabId) {\n\n tabScrollPos.current[lastTabId] = window.scrollY\n\n }\n\n }\n\n window.addEventListener(\"scroll\", listener, { passive: true })\n\n\n\n return () => window.removeEventListener(\"scroll\", listener)\n\n }, [tabs.selectedId])\n\n\n\n return tabs\n", "file_path": "website/src/scrollable-tabs.ts", "rank": 67, "score": 24231.581445741547 }, { "content": "export const useGraphQLForm = (\n\n initialData: any,\n\n query: any,\n\n mutation: any,\n\n config: {\n\n label: string\n\n id: any\n\n variables: Record<string, any>\n\n fields: CustomField[]\n\n transformIn?: (input: any) => any\n\n transformOut?: (input: any) => any\n\n }\n\n) => {\n\n const cms = useCMS()\n\n return useForm({\n\n loadInitialValues: async () => {\n\n if (cms.api.graphql) {\n\n const { data } = await cms.api.graphql.query({\n\n query,\n\n variables: config.variables,\n\n })\n\n return config.transformIn ? config.transformIn(data) : data\n\n } else {\n\n return initialData\n\n }\n\n },\n\n onSubmit: async (formData) => {\n\n const finalData = config.transformOut\n\n ? config.transformOut(formData)\n\n : formData\n\n const { data, errors } = await cms.api.graphql.mutate({\n\n mutation,\n\n variables: { ...config.variables, data: finalData },\n\n })\n\n if (errors) {\n\n console.error(errors)\n\n alert(errors[0].message)\n\n throw errors\n\n } else {\n\n return data\n\n }\n\n },\n\n id: config.id,\n\n label: config.label,\n\n fields: config.fields,\n\n })\n", "file_path": "website/src/cms/graphql-form.ts", "rank": 68, "score": 23489.564297376175 }, { "content": "use crate::{\n\n AnnotatedDoc, AudioSlice, Database, Date, MorphemeId, MorphemeSegment, PositionInDocument,\n\n};\n\nuse async_graphql::{dataloader::DataLoader, FieldResult};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Clone, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, async_graphql::NewType)]\n\npub struct FormId(pub String);\n\n\n\n/// A single word in an annotated document.\n\n/// One word contains several layers of interpretation, including the original\n\n/// source text, multiple layers of linguistic annotation, and annotator notes.\n\n#[derive(Clone, Serialize, Deserialize, Debug, async_graphql::SimpleObject)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[graphql(complex)]\n\npub struct AnnotatedForm {\n\n #[serde(rename = \"_id\")]\n\n /// Unique identifier of this form\n\n pub id: String,\n\n /// Original source text\n", "file_path": "types/src/form.rs", "rank": 69, "score": 44.16630756817247 }, { "content": " .await?\n\n .map::<Result<_>, _>(|d| Ok(bson::from_document(d?)?))\n\n .collect()\n\n .await\n\n }\n\n\n\n pub async fn potential_syllabary_matches(&self, syllabary: &str) -> Result<Vec<AnnotatedForm>> {\n\n let alternate_spellings = CherokeeOrthography::similar_syllabary_strings(syllabary);\n\n let spelling_queries: Vec<_> = alternate_spellings\n\n .into_iter()\n\n .map(|s| bson::doc! { \"source\": s })\n\n .collect();\n\n self.word_search(bson::doc! { \"$or\": spelling_queries })\n\n .await\n\n }\n\n\n\n pub async fn lexical_entry(&self, id: &str) -> Result<Option<AnnotatedForm>> {\n\n Ok(self\n\n .client\n\n .collection(Self::WORDS)\n", "file_path": "types/src/database.rs", "rank": 71, "score": 35.69795658203903 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// A unique identifier for audio slices\n\n#[derive(Clone, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, async_graphql::NewType)]\n\npub struct DocumentAudioId(pub String);\n\n\n\n/// A segment of audio representing a document, word, phrase,\n\n/// or other audio unit\n\n#[derive(Serialize, Deserialize, Clone, Debug, async_graphql::SimpleObject)]\n\npub struct AudioSlice {\n\n /// The audio resource this audio slice is taken from, generally pulled from the DRS API\n\n pub resource_url: String,\n\n /// An audio slice this slice is a subunit of, if there is one\n\n pub parent_track: Option<DocumentAudioId>,\n\n /// The annotations for subunits of this slice, if there are any\n\n #[graphql(skip = true)]\n\n pub annotations: Option<Vec<AudioSlice>>,\n\n /// This slice's relative position to other slices within an audio resource\n\n pub index: i32,\n\n /// The time (in seconds) in the parent track where this slice begins.\n\n pub start_time: Option<i32>,\n\n /// The time (in seconds) in the parent track where this slice ends.\n\n pub end_time: Option<i32>,\n\n}\n", "file_path": "types/src/audio.rs", "rank": 72, "score": 34.68648666150887 }, { "content": " const DOCUMENTS: &'static str = \"annotated-documents\";\n\n /// TODO Rename to \"forms\"\n\n const WORDS: &'static str = \"sea-of-words\";\n\n const TAGS: &'static str = \"tags\";\n\n const CONNECTIONS: &'static str = \"lexical-connections\";\n\n /// TODO Rename to \"contributors\"\n\n const PEOPLE: &'static str = \"people\";\n\n const IMAGE_SOURCES: &'static str = \"image-sources\";\n\n const PAGES: &'static str = \"pages\";\n\n const ANNOTATIONS: &'static str = \"annotations\";\n\n}\n\n\n\nimpl Database {\n\n pub fn new() -> Result<Self> {\n\n use mongodb::{options::ClientOptions, Client};\n\n\n\n let db_url = std::env::var(\"MONGODB_URI\")?;\n\n let opts = executor::block_on(ClientOptions::parse(&db_url))?;\n\n let client = Client::with_options(opts)?;\n\n let db = client.database(\"dailp-encoding\");\n", "file_path": "types/src/database.rs", "rank": 73, "score": 33.07733810880692 }, { "content": "use crate::*;\n\nuse async_graphql::FieldResult;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::borrow::Cow;\n\n\n\n/// A single unit of meaning and its corresponding English gloss.\n\n#[derive(Serialize, Clone, Deserialize, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct MorphemeSegment {\n\n /// Source language representation of this segment.\n\n pub morpheme: String,\n\n /// Target language representation of this segment.\n\n pub gloss: String,\n\n /// What kind of thing is the next segment?\n\n ///\n\n /// This field determines what character should separate this segment from\n\n /// the next one when reconstituting the full segmentation string.\n\n pub followed_by: Option<SegmentType>,\n\n}\n\n\n", "file_path": "types/src/morpheme.rs", "rank": 74, "score": 30.825333675863 }, { "content": "/// They may have transcribed a handwritten manuscript, translated it into\n\n/// English, or analyzed it for linguistic information.\n\n/// This information can be used to track who contributed to the development of\n\n/// each individual document, and track contributions to the archive as a whole.\n\n#[derive(async_graphql::SimpleObject, Clone, Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ContributorDetails {\n\n /// Full name of this person, this exact string must be used to identify\n\n /// them elsewhere, like in the attribution for a particular document.\n\n #[serde(rename = \"_id\")]\n\n pub full_name: String,\n\n /// Alternate name of this person, may be in a different language or writing\n\n /// system. Used only for descriptive purposes.\n\n pub alternate_name: Option<String>,\n\n /// The optional date that this contributor was born on.\n\n pub birth_date: Option<crate::Date>,\n\n}\n\n\n\n/// Attribution for a particular source, whether an institution or an individual.\n\n/// Most commonly, this will represent the details of a library or archive that\n\n/// houses documents used elsewhere.\n\n#[derive(async_graphql::SimpleObject, Clone, Debug, Serialize, Deserialize)]\n\npub struct SourceAttribution {\n\n /// Name of the source, i.e. \"The Newberry Library\"\n\n pub name: String,\n\n /// URL of this source's homepage, i.e. \"https://www.newberry.org/\"\n\n pub link: String,\n\n}\n", "file_path": "types/src/person.rs", "rank": 75, "score": 30.053783557380328 }, { "content": "use crate::{AnnotatedForm, Date, DocumentId, Geometry, MorphemeSegment};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// The reference position within a document of one specific form\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, async_graphql::SimpleObject)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[graphql(complex)]\n\npub struct PositionInDocument {\n\n /// What document is this item within?\n\n pub document_id: DocumentId,\n\n /// What page is it on (starting from 1)? May be a single page or range of pages.\n\n pub page_number: String,\n\n /// How many items come before this one in the whole document?\n\n ///\n\n /// 1-indexed position indicating where the form sits in the ordering of all\n\n /// forms in the document. Used for relative ordering of forms from the\n\n /// same document.\n\n pub index: i32,\n\n /// What section of the document image corresponds to this item?\n\n pub geometry: Option<Geometry>,\n", "file_path": "types/src/lexical.rs", "rank": 76, "score": 29.883318723740288 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// An individual or organization that contributed to the creation or analysis\n\n/// of a particular document or source. Each contributor has a name and a role\n\n/// that specifies the type of their contributions.\n\n#[derive(Clone, Debug, Serialize, Deserialize, async_graphql::SimpleObject)]\n\n#[graphql(complex)]\n\npub struct Contributor {\n\n /// Full name of the contributor\n\n pub name: String,\n\n /// The role that defines most of their contributions to the associated item\n\n pub role: String,\n\n}\n\nimpl Contributor {\n\n /// Create new contributor with the role \"Author\"\n\n pub fn new_author(name: String) -> Self {\n\n Self {\n\n name,\n\n role: \"Author\".to_owned(),\n\n }\n", "file_path": "types/src/person.rs", "rank": 77, "score": 29.8456969513139 }, { "content": " }\n\n // Remove trailing empty lines.\n\n let last_best = all_lines.iter().rposition(|l| !l.is_empty()).unwrap_or(0);\n\n all_lines.truncate(last_best + 1);\n\n all_lines\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct DocumentIndex {\n\n pub sheet_ids: Vec<String>,\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct AnnotationRow {\n\n pub title: String,\n\n pub items: Vec<String>,\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n", "file_path": "migration/src/spreadsheets.rs", "rank": 78, "score": 29.790785603548922 }, { "content": " }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Loader<DocumentId> for Database {\n\n type Value = AnnotatedDoc;\n\n type Error = mongodb::error::Error;\n\n async fn load(\n\n &self,\n\n keys: &[DocumentId],\n\n ) -> Result<HashMap<DocumentId, Self::Value>, Self::Error> {\n\n // Turn keys into strings for database request.\n\n let keys: Vec<_> = keys.iter().map(|x| &x.0 as &str).collect();\n\n let items: Vec<Self::Value> =\n\n find_all_keys(self.client.collection(Self::DOCUMENTS), keys).await?;\n\n Ok(items\n\n .into_iter()\n\n .map(|tag| (tag.meta.id.clone(), tag))\n\n .collect())\n\n }\n", "file_path": "types/src/database.rs", "rank": 80, "score": 28.808369998745654 }, { "content": "use crate::{Database, MorphemeId};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Represents a morphological gloss tag without committing to a single representation.\n\n///\n\n/// - TODO: Use a more generic representation than fields for learner, TAOC, and CRG.\n\n#[derive(Serialize, Deserialize, Debug, Clone, async_graphql::SimpleObject)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[graphql(complex)]\n\npub struct MorphemeTag {\n\n /// Unique identifier for this morpheme which should be used in raw\n\n /// interlinear glosses of a word containing this morpheme.\n\n /// Standard annotation tag for this morpheme, defined by DAILP.\n\n #[serde(rename = \"_id\")]\n\n pub id: String,\n\n /// The \"learner\" representation of this morpheme, a compromise between no\n\n /// interlinear glossing and standard linguistic terms.\n\n pub learner: Option<TagForm>,\n\n /// Representation of this morpheme that closely aligns with _Tone and\n\n /// Accent in Oklahoma Cherokee_.\n", "file_path": "types/src/tag.rs", "rank": 81, "score": 28.554551939008178 }, { "content": "pub struct SemanticLine {\n\n pub number: String,\n\n pub rows: Vec<AnnotationRow>,\n\n pub ends_page: bool,\n\n}\n\n\n\nimpl SemanticLine {\n\n /// Is this line devoid of any source or annotation information?\n\n /// Usually indicates that this is an extra line at the end of a document.\n\n fn is_empty(&self) -> bool {\n\n self.rows.iter().all(|r| r.items.is_empty())\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct AnnotatedLine {\n\n pub words: Vec<AnnotatedForm>,\n\n ends_page: bool,\n\n}\n\n\n", "file_path": "migration/src/spreadsheets.rs", "rank": 82, "score": 28.443186406407875 }, { "content": " pub async fn image_source(&self, id: &ImageSourceId) -> Result<Option<ImageSource>> {\n\n Ok(self\n\n .client\n\n .collection(Self::IMAGE_SOURCES)\n\n .find_one(bson::doc! { \"_id\": &id.0 }, None)\n\n .await?\n\n .and_then(|doc| bson::from_document(doc).ok()))\n\n }\n\n\n\n pub async fn words_in_document(&self, doc_id: &DocumentId) -> Result<Vec<AnnotatedForm>> {\n\n let mut forms: Vec<AnnotatedForm> = self\n\n .client\n\n .collection(Self::WORDS)\n\n .find(\n\n bson::doc! { \"position.documentId\": bson::to_bson(doc_id)? },\n\n None,\n\n )\n\n .await?\n\n .map::<Result<_>, _>(|d| Ok(bson::from_document(d?)?))\n\n .collect::<Result<Vec<_>>>()\n", "file_path": "types/src/database.rs", "rank": 83, "score": 28.193748102409803 }, { "content": "\n\n#[derive(Clone, Eq, PartialEq, Hash)]\n\npub struct PageId(pub String);\n\n\n\n#[async_trait::async_trait]\n\nimpl Loader<PageId> for Database {\n\n type Value = crate::page::Page;\n\n type Error = mongodb::error::Error;\n\n async fn load(&self, keys: &[PageId]) -> Result<HashMap<PageId, Self::Value>, Self::Error> {\n\n // Turn keys into strings for database request.\n\n let keys: Vec<_> = keys.iter().map(|x| &x.0 as &str).collect();\n\n let items: Vec<Self::Value> =\n\n find_all_keys(self.client.collection(Self::PAGES), keys).await?;\n\n Ok(items\n\n .into_iter()\n\n .map(|tag| (PageId(tag.id.clone()), tag))\n\n .collect())\n\n }\n\n}\n\n\n", "file_path": "types/src/database.rs", "rank": 84, "score": 28.16746283302294 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]\n\npub struct Date(pub chrono::NaiveDate);\n\n\n\nimpl Date {\n\n pub fn new(internal: chrono::NaiveDate) -> Self {\n\n Self(internal)\n\n }\n\n pub fn from_ymd(year: i32, month: u32, day: u32) -> Self {\n\n Self::new(chrono::NaiveDate::from_ymd(year, month, day))\n\n }\n\n pub fn parse(s: &str) -> Result<Self, chrono::ParseError> {\n\n chrono::NaiveDate::parse_from_str(s, \"%Y-%m-%d\").map(Self)\n\n }\n\n}\n\n\n\n#[async_graphql::Object]\n\nimpl Date {\n\n async fn year(&self) -> i32 {\n", "file_path": "types/src/date.rs", "rank": 85, "score": 27.548103443084084 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// A rectangle slice of something, usually a large document image.\n\n///\n\n/// Units are a percentage of the containing document.\n\n/// This is more useful than pixels because we can more easily compare\n\n/// geometries between images of different resolutions. For example, we could identify\n\n/// all items in any bottom-right corner with Geometry(90%, 90%, 100%, 100%).\n\n/// Physical units would be better, but IIIF only allows pixels and percentages.\n\n///\n\n/// Potential use case:\n\n/// Each document is represented by an ordered list of [AnnotatedForm]s. Each\n\n/// form has some geometry on the source image. There are a bunch of other\n\n/// annotations on the source image that are unordered. These may be specific\n\n/// syllabary characters, notes about the handwriting, etc. Using MongoDB\n\n/// comparison queries, we can request a list of all spatial annotations\n\n/// on the same document that lie within or around the geometry of this specific word.\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, async_graphql::SimpleObject)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Geometry {\n\n x_min: Scalar,\n\n y_min: Scalar,\n\n x_max: Scalar,\n\n y_max: Scalar,\n\n}\n\n\n", "file_path": "types/src/geometry.rs", "rank": 86, "score": 27.45609203920419 }, { "content": " .find_one(bson::doc! { \"_id\": bson::to_bson(document_id)? }, None)\n\n .await?\n\n .and_then(|doc| bson::from_document(doc).ok())\n\n .unwrap();\n\n // Build a IIIF manifest for this document.\n\n Ok(iiif::Manifest::from_document(self, doc, url).await)\n\n }\n\n\n\n /// USE WITH CAUTION! Clears the entire database of words, documents, etc.\n\n pub async fn clear_all(&self) -> Result<()> {\n\n Ok(self.client.drop(None).await?)\n\n }\n\n}\n\n\n\npub struct PagesDb {\n\n conn: mongodb::Collection<crate::page::Page>,\n\n}\n\nimpl PagesDb {\n\n pub async fn update(&self, page: crate::page::Page) -> Result<()> {\n\n upsert_one(&self.conn, &page.id, &page).await\n", "file_path": "types/src/database.rs", "rank": 87, "score": 27.417203124767013 }, { "content": " pub async fn update_image_source(&self, source: ImageSource) -> Result<()> {\n\n upsert_one(\n\n &self.client.collection(Self::IMAGE_SOURCES),\n\n &source.id.0,\n\n &source,\n\n )\n\n .await\n\n }\n\n\n\n pub async fn all_documents(&self, collection: Option<&str>) -> Result<Vec<AnnotatedDoc>> {\n\n self.client\n\n .collection(Self::DOCUMENTS)\n\n .find(\n\n collection.map(|collection| {\n\n bson::doc! { \"collection\": collection }\n\n }),\n\n mongodb::options::FindOptions::builder()\n\n .projection(bson::doc! { \"segments\": 0 })\n\n .build(),\n\n )\n", "file_path": "types/src/database.rs", "rank": 88, "score": 27.003356732325116 }, { "content": " }\n\n\n\n pub async fn all(&self) -> Result<Vec<crate::page::Page>> {\n\n self.conn\n\n .find(None, None)\n\n .await?\n\n .map(|doc| Ok(doc?))\n\n .collect()\n\n .await\n\n }\n\n}\n\n\n\npub struct AnnotationsDb {\n\n conn: mongodb::Collection<annotation::Annotation>,\n\n}\n\nimpl AnnotationsDb {\n\n pub async fn on_document(\n\n &self,\n\n document_id: &DocumentId,\n\n ) -> Result<Vec<annotation::Annotation>> {\n", "file_path": "types/src/database.rs", "rank": 89, "score": 26.148630620093414 }, { "content": " annot_db.on_document(&doc.meta.id),\n\n db.words_in_document(&doc.meta.id)\n\n )\n\n };\n\n let annotations = &annotations.unwrap();\n\n let words = &words.unwrap();\n\n let image_source = &image_source.unwrap().unwrap();\n\n let manifest_uri = &manifest_uri;\n\n Self::new(\n\n manifest_uri.clone(),\n\n doc.meta.title,\n\n \"The Newberry Library\".to_owned(),\n\n stream::iter(page_images.ids.into_iter().enumerate())\n\n .then(|(index, id)| async move {\n\n let image_url = format!(\"{}/{}\", image_source.url, id);\n\n let info_url = format!(\"{}/info.json\", image_url);\n\n let info = reqwest::get(info_url)\n\n .await\n\n .unwrap()\n\n .json::<ImageInfo>()\n", "file_path": "types/src/iiif.rs", "rank": 90, "score": 25.898608133386638 }, { "content": "}\n\n\n\n#[derive(Clone, Eq, PartialEq, Hash)]\n\npub struct PersonId(pub String);\n\n\n\n#[async_trait::async_trait]\n\nimpl Loader<PersonId> for Database {\n\n type Value = ContributorDetails;\n\n type Error = mongodb::error::Error;\n\n async fn load(&self, keys: &[PersonId]) -> Result<HashMap<PersonId, Self::Value>, Self::Error> {\n\n // Turn keys into strings for Mongo request.\n\n let keys: Vec<_> = keys.iter().map(|x| &x.0 as &str).collect();\n\n let items: Vec<Self::Value> =\n\n find_all_keys(self.client.collection(Self::PEOPLE), keys).await?;\n\n Ok(items\n\n .into_iter()\n\n .map(|tag| (PersonId(tag.full_name.clone()), tag))\n\n .collect())\n\n }\n\n}\n", "file_path": "types/src/database.rs", "rank": 91, "score": 25.862196866521423 }, { "content": "//! This piece of the project exposes a GraphQL endpoint that allows one to access DAILP data in a federated manner with specific queries.\n\n\n\nuse {\n\n dailp::async_graphql::{self, dataloader::DataLoader, guard::Guard, Context, FieldResult},\n\n dailp::{\n\n AnnotatedDoc, CherokeeOrthography, Database, MorphemeId, MorphemeReference, MorphemeTag,\n\n WordsInDocument,\n\n },\n\n mongodb::bson,\n\n serde::{Deserialize, Serialize},\n\n serde_with::{rust::StringWithSeparator, CommaSeparator},\n\n};\n\n\n\nlazy_static::lazy_static! {\n\n static ref MONGODB_PASSWORD: String = std::env::var(\"MONGODB_PASSWORD\").unwrap();\n\n}\n\n\n\n/// Home for all read-only queries\n\npub struct Query;\n\n\n", "file_path": "graphql/src/query.rs", "rank": 92, "score": 25.73998512927917 }, { "content": " let annotated = AnnotatedLine::many_from_semantic(&all_lines, &meta);\n\n let segments = AnnotatedLine::lines_into_segments(annotated, &meta.id, &meta.date);\n\n let doc = dailp::AnnotatedDoc::new(meta, segments);\n\n\n\n Ok(Some((doc, refs)))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n\nasync fn graphql_mutate(\n\n method: &str,\n\n content_list: impl IntoIterator<Item = String>,\n\n) -> Result<()> {\n\n use itertools::Itertools as _;\n\n lazy_static::lazy_static! {\n\n static ref CLIENT: reqwest::Client = reqwest::Client::new();\n\n static ref ENDPOINT: String = format!(\n\n \"{}/graphql\",\n\n std::env::var(\"DAILP_API_URL\")\n", "file_path": "migration/src/main.rs", "rank": 93, "score": 25.628116443488686 }, { "content": " let from = MorphemeId::parse(from)?;\n\n let to = MorphemeId::parse(to)?;\n\n Some(Self::new(from, to))\n\n }\n\n}\n\n\n\n/// Uniquely identifies a particular form based on its parent [`DocumentId`],\n\n/// gloss, and index within that document.\n\n#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct MorphemeId {\n\n pub document_id: Option<DocumentId>,\n\n pub gloss: String,\n\n pub index: Option<i32>,\n\n}\n\nimpl MorphemeId {\n\n /// Make a new [`MorphemeId`]\n\n pub fn new(document_id: Option<DocumentId>, index: Option<i32>, gloss: String) -> Self {\n\n Self {\n\n document_id,\n", "file_path": "types/src/lexical.rs", "rank": 94, "score": 25.55216151744662 }, { "content": " .await?;\n\n forms.sort_by_key(|f| f.position.index);\n\n Ok(forms)\n\n }\n\n\n\n /// The number of words that belong to the given document ID.\n\n pub async fn count_words_in_document(&self, doc_id: &DocumentId) -> Result<u64> {\n\n let coll = self.client.collection::<AnnotatedForm>(Self::WORDS);\n\n Ok(coll\n\n .count_documents(\n\n bson::doc! { \"position.documentId\": bson::to_bson(doc_id)? },\n\n None,\n\n )\n\n .await?)\n\n }\n\n\n\n pub async fn word_search(&self, query: bson::Document) -> Result<Vec<AnnotatedForm>> {\n\n self.client\n\n .collection(Self::WORDS)\n\n .find(query, None)\n", "file_path": "types/src/database.rs", "rank": 95, "score": 25.46861533022224 }, { "content": " pub position: PositionInDocument,\n\n /// The date and time this form was recorded\n\n pub date_recorded: Option<Date>,\n\n /// A slice of audio associated with this word in the context of a document\n\n pub audio_track: Option<AudioSlice>,\n\n}\n\n\n\n#[async_graphql::ComplexObject]\n\nimpl AnnotatedForm {\n\n /// The root morpheme of the word.\n\n /// For example, a verb form glossed as \"he catches\" might have a root morpheme\n\n /// corresponding to \"catch.\"\n\n async fn root(&self) -> Option<&MorphemeSegment> {\n\n self.find_root()\n\n }\n\n\n\n async fn romanized_source(&self) -> Option<String> {\n\n self.simple_phonetics\n\n .as_ref()\n\n .map(|phonetic| crate::lexical::simple_phonetics_to_worcester(phonetic))\n", "file_path": "types/src/form.rs", "rank": 96, "score": 25.45901769997833 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// One full translation broken into several [`TranslationBlock`](#struct.TranslationBlock)s.\n\n#[derive(async_graphql::SimpleObject, Clone, Debug, Serialize, Deserialize, Default)]\n\npub struct Translation {\n\n /// List of blocks or paragraphs that, in this order, constitute the full\n\n /// translation.\n\n pub blocks: Vec<TranslationBlock>,\n\n}\n\n\n\n/// One block or paragraph of a translation document that should correspond to a\n\n/// block of original text. One block may contain several segments (or lines).\n\n#[derive(Clone, Debug, Serialize, Deserialize, Default)]\n\npub struct TranslationBlock {\n\n /// 0-based index of this block within the full translation.\n\n pub index: i32,\n\n /// Each segment represents a sentence or line in the translation.\n\n pub segments: Vec<String>,\n\n}\n\n\n", "file_path": "types/src/translation.rs", "rank": 97, "score": 24.594197566256195 }, { "content": "//! This module handles the retrieval of data from Google Drive Spreadsheets and\n\n//! transforming that data into a usable format based on the data types\n\n//! specified in modules under `dailp`.\n\n\n\nuse crate::audio::AudioRes;\n\nuse crate::translations::DocResult;\n\nuse anyhow::Result;\n\nuse dailp::{\n\n convert_udb, root_noun_surface_forms, root_verb_surface_forms, AnnotatedDoc, AnnotatedForm,\n\n AnnotatedPhrase, AnnotatedSeg, AudioSlice, BlockType, Contributor, Date, DocumentMetadata,\n\n LexicalConnection, LineBreak, MorphemeId, MorphemeSegment, PageBreak,\n\n};\n\nuse dailp::{PositionInDocument, SourceAttribution};\n\nuse log::{error, info, warn};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{collections::HashMap, fs::File, io::Write, time::Duration};\n\n\n\n// Define the delimiters used in spreadsheets for marking phrases, blocks,\n\n// lines, and pages.\n\nconst PHRASE_START: &str = \"[\";\n", "file_path": "migration/src/spreadsheets.rs", "rank": 98, "score": 23.91304557830628 }, { "content": " #[serde(rename = \"@context\")]\n\n context: String,\n\n id: String,\n\n label: LanguageString,\n\n summary: LanguageString,\n\n metadata: Vec<MetadataEntry>,\n\n required_statement: MetadataEntry,\n\n behavior: Vec<String>,\n\n provider: Vec<Agent>,\n\n homepage: Vec<Text>,\n\n items: Vec<Canvas>,\n\n}\n\nimpl Manifest {\n\n /// Make a IIIF manifest from the given document\n\n pub async fn from_document(db: &Database, doc: AnnotatedDoc, manifest_uri: String) -> Self {\n\n let page_images = doc.meta.page_images.unwrap();\n\n let (image_source, annotations, words) = {\n\n let annot_db = db.annotations();\n\n join!(\n\n db.image_source(&page_images.source),\n", "file_path": "types/src/iiif.rs", "rank": 99, "score": 23.6277805623118 } ]
Rust
src/lib.rs
SolarLiner/buffers
43483f3e5401648babc85faf734907835280e7d3
use std::{fs, io}; use std::io::{Cursor, Error, Read, Write}; pub enum Input { Standard(io::Stdin), Memory(io::Cursor<Vec<u8>>), File(fs::File), } pub enum Output { Standard(io::Stdout), Memory(io::Cursor<Vec<u8>>), File(fs::File), } pub enum InputOutput { Standard(io::Stdin, io::Stdout), Memory(io::Cursor<Vec<u8>>), File(fs::File), } impl Input { pub fn stdin() -> Self { Input::Standard(io::stdin()) } pub fn memory() -> Self { Input::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<Self> { fs::OpenOptions::new() .read(true) .open(path) .map(Input::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<Self> { match arg { None | Some("-") => Ok(Self::stdin()), Some(fname) => Self::file(fname), } } } impl Read for Input { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match self { Input::Standard(ref mut s) => s.read(buf), Input::Memory(ref mut m) => m.read(buf), Input::File(ref mut f) => f.read(buf), } } } impl Output { pub fn stdout() -> Self { Output::Standard(io::stdout()) } pub fn memory() -> Self { Output::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<Self> { fs::OpenOptions::new() .write(true) .create(true) .open(path) .map(Output::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<Self> { match arg { None | Some("-") => Ok(Self::stdout()), Some(fname) => Self::file(fname), } } } impl Write for Output { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { match self { Output::Standard(ref mut s) => s.write(buf), Output::Memory(ref mut m) => m.write(buf), Output::File(ref mut f) => f.write(buf), } } fn flush(&mut self) -> Result<(), Error> { match self { Output::Standard(ref mut s) => s.flush(), Output::Memory(ref mut m) => m.flush(), Output::File(ref mut f) => f.flush(), } } } impl InputOutput { pub fn stdio() -> InputOutput { InputOutput::Standard(io::stdin(), io::stdout()) } pub fn memory() -> InputOutput { InputOutput::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<InputOutput> { fs::OpenOptions::new().read(true).write(true).open(path).map(InputOutput::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<InputOutput> { match arg { None | Some("-") => Ok(Self::stdio()), Some(path) => Self::file(path), } } } impl Read for InputOutput { fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> { match self { InputOutput::Standard(stdin, _) => stdin.read(buf), InputOutput::Memory(c) => c.read(buf), InputOutput::File(f) => f.read(buf) } } } impl Write for InputOutput { fn write(&mut self, buf: &[u8]) -> Result<usize, Error> { match self { InputOutput::Standard(_, stdout) => stdout.write(buf), InputOutput::Memory(c) => c.write(buf), InputOutput::File(f) => f.write(buf), } } fn flush(&mut self) -> Result<(), Error> { match self { InputOutput::Standard(_, stdout) => stdout.flush(), InputOutput::Memory(m) => m.flush(), InputOutput::File(f) => f.flush() } } }
use std::{fs, io}; use std::io::{Cursor, Error, Read, Write}; pub enum Input { Standard(io::Stdin), Memory(io::Cursor<Vec<u8>>), File(fs::File), } pub enum Output { Standard(io::Stdout), Memory(io::Cursor<Vec<u8>>), File(fs::File), } pub enum InputOutput { Standard(io::Stdin, io::Stdout), Memory(io::Cursor<Vec<u8>>), File(fs::File), } impl Input { pub fn stdin() -> Self { Input::Standard(io::stdin()) } pub fn memory() -> Self { Input::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<Self> { fs::OpenOptions::new() .read(true) .open(path) .map(Input::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<Self> { match arg { None | Some("-") => Ok(Self::stdin()), Some(fname) => Self::file(fname), } } } impl Read for Input { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match self { Input::Standard(ref mut s) => s.read(buf), Input::Memory(ref mut m) => m.read(buf), Input::File(ref mut f) => f.read(buf), } } } impl Output { pub fn stdout() -> Self { Output::Standard(io::stdout()) } pub fn memory() -> Self { Output::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<Self> { fs::OpenOptions::new() .write(true) .create(true) .open(path) .map(Output::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<Self> { match arg { None | Some("-") => Ok(Self::stdout()), Some(fname) => Self::file(fname), } } } impl Write for Output { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { match self { Output::Standard(ref mut s) => s.write(buf), Output::Memory(ref mut m) => m.write(buf), Output::File(ref mut f) => f.write(buf), } } fn flush(&mut self) -> Result<(), Error> { match self { Output::Standard(ref mut s) => s.flush(), Output::Memory(ref mut m) => m.flush(), Output::File(ref mut f) => f.flush(), } } } impl InputOutput { pub fn stdio() -> InputOutput { InputOutput::Standard(io::stdin(), io::stdout()) } pub fn memory() -> InputOutput { InputOutput::Memory(Cursor::new(vec![])) } pub fn file(path: &str) -> io::Result<InputOutput> { fs::OpenOptions::new().read(true).write(true).open(path).map(InputOutput::File) } pub fn from_arg(arg: Option<&str>) -> io::Result<InputOutput> { match arg { None | Some("-") => Ok(Self::stdio()), Some(path) => Self::file(path), } } } impl Read for InputOutput { fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> { match self { InputOutput::Standard(stdin, _) => stdin.read(buf), InputOutput::Memory(c) => c.read(buf), InputOutput::File(f) => f.read(buf) } } } impl Write for InputOutput { fn write(&mut self, buf: &[u8]) -> Result<usize, Error> { match self { InputOutput::Standard(_, stdout) => stdout.write(buf), InputOutput::Memory(c) => c.write(buf), InputOutput::File(f) => f.write(buf), } } fn flush(&mut self) -> Result<(), Error> {
} }
match self { InputOutput::Standard(_, stdout) => stdout.flush(), InputOutput::Memory(m) => m.flush(), InputOutput::File(f) => f.flush() }
if_condition
[ { "content": "# buffers\n\nCollection of unified buffers from stdio, file and memory buffers.\n\n\n\nThe `buffers` crate unifies standard IO, memory and file buffers into a unified type, allowing\n\nto effectively leave the type of buffer used to the user.\n\n\n\n## How to use\n\n\n\nThe `buffers` crate exposes three types; one for input, one for output, and one for duplex in/out\n\noperations. For convenience, each type has a `from_arg` constructor that takes in the output of\n\na commandline parser (such as `clap`) and returns the buffer of the appropriate type (see the\n\nfunction docs for more details).\n\n\n\nIO Read/Write traits are implemented for the types meaning you can use those wrapper types as a\n\ndrop-in replacement of \"regular\" buffers.\n\n\n\n## Example\n\n\n\n```rust\n\nuse clap::{App, Arg};\n\nuse buffers::{Input, Output};\n\n\n\nlet matches = App::new(\"app\")\n\n .arg(Arg::with_name(\"input\").index(1))\n\n .arg(Arg::with_name(\"output\").index(2))\n\n .get_matches();\n\nlet mut input_buf = Input::from_arg(matches.value_of(\"input\"));\n\nlet mut output_buf = Output::from_arg(matches.value_of(\"output\"));\n\nparse_input(&mut input_buf).and_then(|ast| transpile(ast, &mut output_buf));\n", "file_path": "README.md", "rank": 8, "score": 9.916948952271477 } ]
Rust
src/locustdb.rs
virattara/LocustDB
42945df6f4313b9dbded35bcd2d3018a003af003
use std::str; use std::sync::Arc; use std::error::Error; use std::path::{Path, PathBuf}; use futures::channel::oneshot; use num_cpus; use crate::QueryError; use crate::QueryResult; use crate::disk_store::interface::*; use crate::disk_store::noop_storage::NoopStorage; use crate::engine::query_task::QueryTask; use crate::ingest::colgen::GenTable; use crate::ingest::csv_loader::{CSVIngestionTask, Options as LoadOptions}; use crate::mem_store::*; use crate::scheduler::*; use crate::syntax::parser; pub struct LocustDB { inner_locustdb: Arc<InnerLocustDB> } impl LocustDB { pub fn memory_only() -> LocustDB { LocustDB::new(&Options::default()) } pub fn new(opts: &Options) -> LocustDB { let disk_store = opts.db_path.as_ref() .map(|path| LocustDB::persistent_storage(path)) .unwrap_or_else(|| Arc::new(NoopStorage)); let locustdb = Arc::new(InnerLocustDB::new(disk_store, opts)); InnerLocustDB::start_worker_threads(&locustdb); LocustDB { inner_locustdb: locustdb } } pub async fn run_query(&self, query: &str, explain: bool, show: Vec<usize>) -> Result<QueryResult, oneshot::Canceled> { let (sender, receiver) = oneshot::channel(); let query = match parser::parse_query(query) { Ok(query) => query, Err(err) => return Ok(Err(err)), }; let mut data = match self.inner_locustdb.snapshot(&query.table) { Some(data) => data, None => return Ok(Err( QueryError::NotImplemented(format!("Table {} does not exist!", &query.table)))), }; if self.inner_locustdb.opts().seq_disk_read { self.inner_locustdb.disk_read_scheduler() .schedule_sequential_read(&mut data, &query.find_referenced_cols(), self.inner_locustdb.opts().readahead); let ldb = self.inner_locustdb.clone(); let (read_data, _) = Task::from_fn(move || ldb.disk_read_scheduler().service_reads(&ldb)); let _ = self.inner_locustdb.schedule(read_data); } let query_task = QueryTask::new( query, explain, show, data, self.inner_locustdb.disk_read_scheduler().clone(), SharedSender::new(sender) ); match query_task { Ok(task) => { self.schedule(task); Ok(receiver.await?) } Err(err) => Ok(Err(err)), } } pub async fn load_csv(&self, options: LoadOptions) -> Result<(), Box<dyn Error>> { let (sender, receiver) = oneshot::channel(); let task = CSVIngestionTask::new( options, self.inner_locustdb.clone(), SharedSender::new(sender)); let _ = self.schedule(task); Ok(receiver.await??) } pub async fn gen_table(&self, opts: GenTable) -> Result<(), oneshot::Canceled> { let mut receivers = Vec::new(); let opts = Arc::new(opts); for partition in 0..opts.partitions { let opts = opts.clone(); let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.gen_partition(&opts, partition as u64)); let _ = self.schedule(task); receivers.push(receiver); } for receiver in receivers { receiver.await?; } Ok(()) } pub fn ast(&self, query: &str) -> String { match parser::parse_query(query) { Ok(query) => format!("{:#?}", query), Err(err) => format!("{:?}", err), } } pub async fn bulk_load(&self) -> Result<Vec<MemTreeTable>, oneshot::Canceled> { for table in self.inner_locustdb.full_snapshot() { self.inner_locustdb.disk_read_scheduler() .schedule_bulk_load(table, self.inner_locustdb.opts().readahead); } let mut receivers = Vec::new(); for _ in 0..self.inner_locustdb.opts().read_threads { let ldb = self.inner_locustdb.clone(); let (read_data, receiver) = Task::from_fn(move || ldb.disk_read_scheduler().service_reads(&ldb)); let _ = self.inner_locustdb.schedule(read_data); receivers.push(receiver); } for receiver in receivers { receiver.await?; } self.mem_tree(2).await } pub fn recover(&self) { self.inner_locustdb.drop_pending_tasks(); InnerLocustDB::start_worker_threads(&self.inner_locustdb); } pub async fn mem_tree(&self, depth: usize) -> Result<Vec<MemTreeTable>, oneshot::Canceled> { let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.mem_tree(depth)); let _ = self.schedule(task); receiver.await } pub async fn table_stats(&self) -> Result<Vec<TableStats>, oneshot::Canceled> { let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.stats()); let _ = self.schedule(task); receiver.await } pub fn schedule<T: Task + 'static>(&self, task: T) { self.inner_locustdb.schedule(task) } #[cfg(feature = "enable_rocksdb")] pub fn persistent_storage<P: AsRef<Path>>(db_path: P) -> Arc<dyn DiskStore> { use crate::disk_store::rocksdb; Arc::new(rocksdb::RocksDB::new(db_path)) } #[cfg(not(feature = "enable_rocksdb"))] pub fn persistent_storage<P: AsRef<Path>>(_: P) -> Arc<dyn DiskStore> { panic!("RocksDB storage backend is not enabled in this build of LocustDB. Create db with `memory_only`, or set the `enable_rocksdb` feature.") } } impl Drop for LocustDB { fn drop(&mut self) { self.inner_locustdb.stop(); } } #[derive(Clone)] pub struct Options { pub threads: usize, pub read_threads: usize, pub db_path: Option<PathBuf>, pub mem_size_limit_tables: usize, pub mem_lz4: bool, pub readahead: usize, pub seq_disk_read: bool, } impl Default for Options { fn default() -> Options { Options { threads: num_cpus::get(), read_threads: num_cpus::get(), db_path: None, mem_size_limit_tables: 8 * 1024 * 1024 * 1024, mem_lz4: true, readahead: 256 * 1024 * 1024, seq_disk_read: false, } } }
use std::str; use std::sync::Arc; use std::error::Error; use std::path::{Path, PathBuf}; use futures::channel::oneshot; use num_cpus; use crate::QueryError; use crate::QueryResult; use crate::disk_store::interface::*; use crate::disk_store::noop_storage::NoopStorage; use crate::engine::query_task::QueryTask; use crate::ingest::colgen::GenTable; use crate::ingest::csv_loader::{CSVIngestionTask, Options as LoadOptions}; use crate::mem_store::*; use crate::scheduler::*; use crate::syntax::parser; pub struct LocustDB { inner_locustdb: Arc<InnerLocustDB> } impl LocustDB { pub fn memory_only() -> LocustDB { LocustDB::new(&Options::default()) } pub fn new(opts: &Options) -> LocustDB { let disk_store = opts.db_path.as_ref() .map(|path| LocustDB::persistent_storage(path)) .
pub async fn run_query(&self, query: &str, explain: bool, show: Vec<usize>) -> Result<QueryResult, oneshot::Canceled> { let (sender, receiver) = oneshot::channel(); let query = match parser::parse_query(query) { Ok(query) => query, Err(err) => return Ok(Err(err)), }; let mut data = match self.inner_locustdb.snapshot(&query.table) { Some(data) => data, None => return Ok(Err( QueryError::NotImplemented(format!("Table {} does not exist!", &query.table)))), }; if self.inner_locustdb.opts().seq_disk_read { self.inner_locustdb.disk_read_scheduler() .schedule_sequential_read(&mut data, &query.find_referenced_cols(), self.inner_locustdb.opts().readahead); let ldb = self.inner_locustdb.clone(); let (read_data, _) = Task::from_fn(move || ldb.disk_read_scheduler().service_reads(&ldb)); let _ = self.inner_locustdb.schedule(read_data); } let query_task = QueryTask::new( query, explain, show, data, self.inner_locustdb.disk_read_scheduler().clone(), SharedSender::new(sender) ); match query_task { Ok(task) => { self.schedule(task); Ok(receiver.await?) } Err(err) => Ok(Err(err)), } } pub async fn load_csv(&self, options: LoadOptions) -> Result<(), Box<dyn Error>> { let (sender, receiver) = oneshot::channel(); let task = CSVIngestionTask::new( options, self.inner_locustdb.clone(), SharedSender::new(sender)); let _ = self.schedule(task); Ok(receiver.await??) } pub async fn gen_table(&self, opts: GenTable) -> Result<(), oneshot::Canceled> { let mut receivers = Vec::new(); let opts = Arc::new(opts); for partition in 0..opts.partitions { let opts = opts.clone(); let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.gen_partition(&opts, partition as u64)); let _ = self.schedule(task); receivers.push(receiver); } for receiver in receivers { receiver.await?; } Ok(()) } pub fn ast(&self, query: &str) -> String { match parser::parse_query(query) { Ok(query) => format!("{:#?}", query), Err(err) => format!("{:?}", err), } } pub async fn bulk_load(&self) -> Result<Vec<MemTreeTable>, oneshot::Canceled> { for table in self.inner_locustdb.full_snapshot() { self.inner_locustdb.disk_read_scheduler() .schedule_bulk_load(table, self.inner_locustdb.opts().readahead); } let mut receivers = Vec::new(); for _ in 0..self.inner_locustdb.opts().read_threads { let ldb = self.inner_locustdb.clone(); let (read_data, receiver) = Task::from_fn(move || ldb.disk_read_scheduler().service_reads(&ldb)); let _ = self.inner_locustdb.schedule(read_data); receivers.push(receiver); } for receiver in receivers { receiver.await?; } self.mem_tree(2).await } pub fn recover(&self) { self.inner_locustdb.drop_pending_tasks(); InnerLocustDB::start_worker_threads(&self.inner_locustdb); } pub async fn mem_tree(&self, depth: usize) -> Result<Vec<MemTreeTable>, oneshot::Canceled> { let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.mem_tree(depth)); let _ = self.schedule(task); receiver.await } pub async fn table_stats(&self) -> Result<Vec<TableStats>, oneshot::Canceled> { let inner = self.inner_locustdb.clone(); let (task, receiver) = Task::from_fn(move || inner.stats()); let _ = self.schedule(task); receiver.await } pub fn schedule<T: Task + 'static>(&self, task: T) { self.inner_locustdb.schedule(task) } #[cfg(feature = "enable_rocksdb")] pub fn persistent_storage<P: AsRef<Path>>(db_path: P) -> Arc<dyn DiskStore> { use crate::disk_store::rocksdb; Arc::new(rocksdb::RocksDB::new(db_path)) } #[cfg(not(feature = "enable_rocksdb"))] pub fn persistent_storage<P: AsRef<Path>>(_: P) -> Arc<dyn DiskStore> { panic!("RocksDB storage backend is not enabled in this build of LocustDB. Create db with `memory_only`, or set the `enable_rocksdb` feature.") } } impl Drop for LocustDB { fn drop(&mut self) { self.inner_locustdb.stop(); } } #[derive(Clone)] pub struct Options { pub threads: usize, pub read_threads: usize, pub db_path: Option<PathBuf>, pub mem_size_limit_tables: usize, pub mem_lz4: bool, pub readahead: usize, pub seq_disk_read: bool, } impl Default for Options { fn default() -> Options { Options { threads: num_cpus::get(), read_threads: num_cpus::get(), db_path: None, mem_size_limit_tables: 8 * 1024 * 1024 * 1024, mem_lz4: true, readahead: 256 * 1024 * 1024, seq_disk_read: false, } } }
unwrap_or_else(|| Arc::new(NoopStorage)); let locustdb = Arc::new(InnerLocustDB::new(disk_store, opts)); InnerLocustDB::start_worker_threads(&locustdb); LocustDB { inner_locustdb: locustdb } }
function_block-function_prefix_line
[ { "content": "fn parse_type(field_ident: &Ident, type_def: String) -> Option<(Expr, Option<FnArg>)> {\n\n lazy_static! {\n\n // E.g. `data` in `( t = \"data.nullable\" )`\n\n static ref T: Regex = Regex::new(r#\"t = \"(.*)\"\"#).unwrap();\n\n static ref BASE: Regex = Regex::new(r#\"base=([^;]*)\"#).unwrap();\n\n static ref NULL: Regex = Regex::new(r#\"null=([^;]*)\"#).unwrap();\n\n\n\n }\n\n\n\n if let Some(t) = T.captures(&type_def) {\n\n let t = t.get(1).unwrap().as_str();\n\n\n\n let base = BASE.captures(t)\n\n .expect(&format!(\"No `base` specified for {}\", field_ident))\n\n .get(1).unwrap().as_str();\n\n let mut fn_input = None;\n\n let base_type: Expr = if base == \"provided\" {\n\n let provided_type_ident = Ident::new(&format!(\"{}_type\", field_ident), Span::call_site());\n\n fn_input = Some(parse_quote!(#provided_type_ident: EncodingType));\n\n parse_quote!(#provided_type_ident)\n", "file_path": "locustdb-derive/src/ast_builder.rs", "rank": 0, "score": 185581.85983993846 }, { "content": "fn types(t: &Ident) -> Option<Vec<Type>> {\n\n match t.to_string().as_ref() {\n\n \"Str\" => Some(vec![Type::Str]),\n\n \"IntegerNoU64\" => Some(vec![Type::U8, Type::U16, Type::U32, Type::I64]),\n\n \"Integer\" => Some(vec![Type::U8, Type::U16, Type::U32, Type::U64, Type::I64]),\n\n \"NullableInteger\" => Some(vec![Type::NullableU8, Type::NullableU16, Type::NullableU32, Type::NullableI64]),\n\n \"Primitive\" => Some(vec![Type::U8, Type::U16, Type::U32, Type::U64, Type::I64, Type::Str, Type::OptStr]),\n\n \"NullablePrimitive\" => Some(vec![Type::NullableU8, Type::NullableU16, Type::NullableU32, Type::NullableI64, Type::NullableStr]),\n\n \"PrimitiveUSize\" => Some(vec![Type::U8, Type::U16, Type::U32, Type::U64, Type::I64, Type::Str, Type::USize]),\n\n \"PrimitiveNoU64\" => Some(vec![Type::U8, Type::U16, Type::U32, Type::I64, Type::Str]),\n\n \"Const\" => Some(vec![Type::ScalarI64, Type::ScalarStr]),\n\n \"ScalarI64\" => Some(vec![Type::ScalarI64]),\n\n \"ScalarStr\" => Some(vec![Type::ScalarStr]),\n\n \"Aggregator\" => Some(vec![Type::AggregatorCount, Type::AggregatorSum, Type::AggregatorMax, Type::AggregatorMin]),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "locustdb-derive/src/reify_types.rs", "rank": 1, "score": 175918.54714114152 }, { "content": "#[proc_macro]\n\npub fn reify_types(input: TokenStream) -> TokenStream {\n\n reify_types::reify_types(input)\n\n}\n\n\n", "file_path": "locustdb-derive/src/lib.rs", "rank": 2, "score": 171964.34798863527 }, { "content": "#[proc_macro_derive(EnumSyntax)]\n\npub fn enum_syntax(input: TokenStream) -> TokenStream {\n\n enum_syntax::enum_syntax(input)\n\n}\n\n\n", "file_path": "locustdb-derive/src/lib.rs", "rank": 3, "score": 171964.34798863527 }, { "content": "pub fn enum_syntax(input: TokenStream) -> TokenStream {\n\n // Parse the input tokens into a syntax tree\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n\n\n if let Data::Enum(DataEnum { variants, .. }) = input.data {\n\n let enum_ident = input.ident;\n\n let mut constructors = Vec::<Item>::new();\n\n let boxed: Type = parse_quote!(Box<#enum_ident>);\n\n let string: Type = parse_quote!(String);\n\n for variant in variants.into_iter() {\n\n if let Fields::Named(fields) = variant.fields {\n\n let variant_ident = variant.ident.clone();\n\n let ident_snake = studley_to_snake(variant.ident);\n\n let mut fn_inputs = Vec::<FnArg>::new();\n\n let mut struct_args = Vec::<FieldValue>::new();\n\n let mut fn_generics = Vec::<GenericArgument>::new();\n\n for field in fields.named.into_iter() {\n\n let field_ident = field.ident.clone().unwrap();\n\n if field.ty == boxed {\n", "file_path": "locustdb-derive/src/enum_syntax.rs", "rank": 4, "score": 169870.0450962492 }, { "content": "pub fn reify_types(input: TokenStream) -> TokenStream {\n\n let TypeExpand {\n\n name,\n\n productions,\n\n } = parse_macro_input!(input as TypeExpand);\n\n\n\n let mut all_match_arms = Vec::new();\n\n let mut unified_variable_groups = Vec::new();\n\n let mut type_equalities = Vec::<Stmt>::new();\n\n\n\n for Production { specs, expr } in productions {\n\n let mut type_domains = Vec::with_capacity(specs.len());\n\n let mut variable_groups = Vec::with_capacity(specs.len());\n\n for Declaration { variables, t } in specs {\n\n if variables.len() > 1 {\n\n let v0 = variables[0].clone();\n\n for v in &variables[1..] {\n\n let name0 = LitStr::new(&format!(\"{}\", &v0), v0.span());\n\n let name1 = LitStr::new(&format!(\"{}\", &v), v.span());\n\n type_equalities.push(parse_quote! {\n", "file_path": "locustdb-derive/src/reify_types.rs", "rank": 5, "score": 169870.0450962492 }, { "content": "pub fn ast_builder(input: TokenStream) -> TokenStream {\n\n // Parse the input tokens into a syntax tree\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n if let Data::Enum(DataEnum { variants, .. }) = input.data {\n\n let enum_ident = input.ident;\n\n let mut productions = Vec::<Item>::new();\n\n let string: Type = parse_quote!(String);\n\n for (index, variant) in variants.into_iter().enumerate() {\n\n if let Fields::Named(fields) = variant.fields {\n\n let variant_ident = variant.ident.clone();\n\n let ident_snake = studley_to_snake(variant.ident);\n\n let mut fn_inputs = Vec::<FnArg>::new();\n\n let mut new_buffers = Vec::<Stmt>::new();\n\n let mut struct_args = Vec::<FieldValue>::new();\n\n let mut result = Vec::<Ident>::new();\n\n let mut cache_retrieve = Vec::<Expr>::new();\n\n let mut result_type = Vec::<Type>::new();\n\n let mut output: Expr = parse_quote!(None);\n\n let mut hashes = Vec::<Stmt>::new();\n", "file_path": "locustdb-derive/src/ast_builder.rs", "rank": 6, "score": 169870.0450962492 }, { "content": "pub fn ingest_file(ldb: &InnerLocustDB, opts: &Options) -> Result<(), String> {\n\n // Can't combine these two branches because csv::Reader takes a type param which differs for creating from Reader/File\n\n if opts.unzip {\n\n let f = File::open(&opts.filename).map_err(|x| x.to_string())?;\n\n let decoded = GzDecoder::new(f);\n\n let mut reader = csv::ReaderBuilder::new()\n\n .has_headers(opts.colnames.is_none())\n\n .from_reader(decoded);\n\n let headers = match opts.colnames {\n\n Some(ref colnames) => colnames.clone(),\n\n None => reader.headers().unwrap().iter().map(str::to_owned).collect()\n\n };\n\n auto_ingest(ldb, reader.records().map(|r| r.unwrap()), &headers, opts)\n\n } else {\n\n let mut reader = csv::ReaderBuilder::new()\n\n .has_headers(opts.colnames.is_none())\n\n .from_path(&opts.filename)\n\n .map_err(|x| x.to_string())?;\n\n let headers = match opts.colnames {\n\n Some(ref colnames) => colnames.clone(),\n\n None => reader.headers().unwrap().iter().map(str::to_owned).collect()\n\n };\n\n auto_ingest(ldb, reader.records().map(|r| r.unwrap()), &headers, opts)\n\n }\n\n}\n\n\n", "file_path": "src/ingest/csv_loader.rs", "rank": 7, "score": 159498.57466341925 }, { "content": "#[proc_macro_derive(ASTBuilder, attributes(newstyle, input, internal, output, nohash))]\n\npub fn ast_builder(input: TokenStream) -> TokenStream { ast_builder::ast_builder(input) }\n", "file_path": "locustdb-derive/src/lib.rs", "rank": 8, "score": 155500.88127446073 }, { "content": "pub fn ingest_file<P: AsRef<Path>>(file_path: P, tablename: &str) -> Options {\n\n Options::new(file_path, tablename)\n\n .with_schema(&nyc_schema())\n\n}\n\n\n", "file_path": "src/ingest/nyc_taxi_data.rs", "rank": 9, "score": 152167.13823963286 }, { "content": "pub fn partition_sparse(\n\n null_probability: f64,\n\n generator: Box<dyn ColumnGenerator>) -> Box<dyn ColumnGenerator> {\n\n Box::new(PartitionSparse {\n\n null_probability,\n\n generator,\n\n })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 10, "score": 150883.61238723737 }, { "content": "pub fn ingest_reduced_file<P: AsRef<Path>>(file_path: P, tablename: &str) -> Options {\n\n Options::new(file_path, tablename)\n\n .with_schema(&reduced_nyc_schema())\n\n}\n\n\n", "file_path": "src/ingest/nyc_taxi_data.rs", "rank": 11, "score": 150399.26843190685 }, { "content": "pub fn string_markov_chain(\n\n elements: Vec<String>,\n\n transition_probabilities: Vec<Vec<f64>>) -> Box<dyn ColumnGenerator> {\n\n Box::new(MarkovChain {\n\n elem: elements,\n\n p_transition: transition_probabilities,\n\n s: PhantomData::<StringColBuilder>,\n\n })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 12, "score": 149135.89244379898 }, { "content": "pub fn int_markov_chain(\n\n elements: Vec<i64>,\n\n transition_probabilities: Vec<Vec<f64>>) -> Box<dyn ColumnGenerator> {\n\n Box::new(MarkovChain {\n\n elem: elements.into_iter().map(Some).collect(),\n\n p_transition: transition_probabilities,\n\n s: PhantomData::<IntColBuilder>,\n\n })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 13, "score": 149135.89244379898 }, { "content": "#[allow(clippy::type_complexity)]\n\npub fn compile_grouping_key(\n\n exprs: &Vec<Expr>,\n\n filter: Filter,\n\n columns: &HashMap<String, Arc<dyn DataSource>>,\n\n partition_len: usize,\n\n planner: &mut QueryPlanner)\n\n -> Result<((TypedBufferRef, bool), i64, Vec<(TypedBufferRef, Type)>, TypedBufferRef), QueryError> {\n\n if exprs.is_empty() {\n\n let mut plan = planner.constant_expand(0, partition_len, EncodingType::U8);\n\n plan = match filter {\n\n Filter::U8(filter) => planner.filter(plan, filter),\n\n Filter::NullableU8(filter) => planner.nullable_filter(plan, filter),\n\n Filter::Indices(indices) => planner.select(plan, indices),\n\n Filter::None => plan,\n\n };\n\n Ok((\n\n (plan, true),\n\n 1,\n\n vec![],\n\n planner.buffer_provider.named_buffer(\"empty_group_by\", EncodingType::Null)\n", "file_path": "src/engine/planning/query_plan.rs", "rank": 14, "score": 145841.46289689053 }, { "content": "pub fn nyc_schema() -> String {\n\n \"trip_id:i,\\\n\n vendor_id:s,\\\n\n pickup_datetime:i.date,\\\n\n dropoff_datetime:i.date,\\\n\n store_and_fwd_flag:s,\\\n\n rate_code_id:s,\\\n\n pickup_longitude:s,\\\n\n pickup_latitude:s,\\\n\n dropoff_longitude:s,\\\n\n dropoff_latitude:s,\\\n\n passenger_count:i,\\\n\n trip_distance:i.1000,\\\n\n fare_amount:i.100,\\\n\n extra:i.100,\\\n\n mta_tax:i.100,\\\n\n tip_amount:i.100,\\\n\n tolls_amount:i.100,\\\n\n ehail_fee:i.100,\\\n\n improvement_surcharge:i.100,\\\n", "file_path": "src/ingest/nyc_taxi_data.rs", "rank": 15, "score": 142578.61468889713 }, { "content": "pub fn reduced_nyc_schema() -> String {\n\n \"trip_id:i,\\\n\n vendor_id:s,\\\n\n pickup_datetime:i.date,\\\n\n dropoff_datetime:,\\\n\n store_and_fwd_flag:s,\\\n\n rate_code_id:s,\\\n\n pickup_longitude:,\\\n\n pickup_latitude:,\\\n\n dropoff_longitude:,\\\n\n dropoff_latitude:,\\\n\n passenger_count:i,\\\n\n trip_distance:i.1000,\\\n\n fare_amount:,\\\n\n extra:,\\\n\n mta_tax:i.100,\\\n\n tip_amount:,\\\n\n tolls_amount:,\\\n\n ehail_fee:i.100,\\\n\n improvement_surcharge:i.100,\\\n", "file_path": "src/ingest/nyc_taxi_data.rs", "rank": 16, "score": 141024.3437662254 }, { "content": "pub fn multiply_by_1000(field: &str) -> i64 {\n\n if let Ok(int) = field.parse::<i64>() {\n\n int * 1000\n\n } else if let Ok(float) = field.parse::<f64>() {\n\n (float * 1000.0) as i64\n\n } else if field == \"\" {\n\n 0\n\n } else {\n\n panic!(\"invalid field {}\", &field)\n\n }\n\n}\n\n\n", "file_path": "src/ingest/extractor.rs", "rank": 17, "score": 138584.00917606498 }, { "content": "pub fn multiply_by_100(field: &str) -> i64 {\n\n if let Ok(int) = field.parse::<i64>() {\n\n int * 100\n\n } else if let Ok(float) = field.parse::<f64>() {\n\n (float * 100.0) as i64\n\n } else if field == \"\" {\n\n 0\n\n } else {\n\n panic!(\"invalid field {}\", &field)\n\n }\n\n}\n\n\n", "file_path": "src/ingest/extractor.rs", "rank": 18, "score": 138584.00917606498 }, { "content": "pub fn int(field: &str) -> i64 {\n\n if let Ok(int) = field.parse::<i64>() {\n\n int\n\n } else if field == \"\" {\n\n 0\n\n } else {\n\n panic!(\"can't parse {} as integer\", &field)\n\n }\n\n}\n\n\n", "file_path": "src/ingest/extractor.rs", "rank": 19, "score": 138584.00917606498 }, { "content": "pub fn date_time(field: &str) -> i64 {\n\n Utc.datetime_from_str(field, \"%Y-%m-%d %H:%M:%S\")\n\n .unwrap_or_else(|_| panic!(\"Failed to parse {} as date time\", &field))\n\n .timestamp()\n\n}", "file_path": "src/ingest/extractor.rs", "rank": 20, "score": 136968.98664800564 }, { "content": "pub fn ns(quantity: usize) -> UnitFormatter {\n\n UnitFormatter::new(quantity as f64, 1000, 5, SI_PREFIXES, \"s\".to_string())\n\n}\n\n\n", "file_path": "src/unit_fmt.rs", "rank": 21, "score": 136968.98664800564 }, { "content": "pub fn second(quantity: usize) -> UnitFormatter {\n\n UnitFormatter::new(quantity as f64, 1000, 8, SI_PREFIXES, \"s\".to_string())\n\n}\n\n\n", "file_path": "src/unit_fmt.rs", "rank": 22, "score": 136968.98664800564 }, { "content": "pub fn bite(quantity: usize) -> UnitFormatter {\n\n UnitFormatter::new(quantity as f64, 1024, 0, IEC_PREFIXES, \"B\".to_string())\n\n}\n\n\n", "file_path": "src/unit_fmt.rs", "rank": 23, "score": 136968.98664800564 }, { "content": "pub fn billion(quantity: f64) -> UnitFormatter {\n\n short_scale(quantity * 1_000_000_000.0)\n\n}\n\n\n", "file_path": "src/unit_fmt.rs", "rank": 24, "score": 136968.98664800564 }, { "content": "pub fn percent(quantity: f64) -> UnitFormatter {\n\n UnitFormatter::new(quantity, 2, 0, NO_PREFIX, \"%\".to_string())\n\n}\n\n\n\n\n\npub struct UnitFormatter {\n\n quantity: f64,\n\n power: usize,\n\n ratio: usize,\n\n prefixes: &'static [&'static str],\n\n suffix: String,\n\n}\n\n\n\nimpl UnitFormatter {\n\n pub fn new(quantity: f64,\n\n ratio: usize,\n\n power: usize,\n\n prefixes: &'static [&'static str],\n\n suffix: String) -> UnitFormatter {\n\n UnitFormatter { quantity, power, ratio, prefixes, suffix }\n", "file_path": "src/unit_fmt.rs", "rank": 25, "score": 136968.98664800564 }, { "content": "pub fn byte(quantity: f64) -> UnitFormatter {\n\n UnitFormatter::new(quantity, 1000, 8, SI_PREFIXES, \"B\".to_string())\n\n}\n\n\n", "file_path": "src/unit_fmt.rs", "rank": 26, "score": 136968.98664800564 }, { "content": "pub fn string_pack_codec() -> Vec<CodecOp> {\n\n vec![CodecOp::UnpackStrings]\n\n}", "file_path": "src/mem_store/strings.rs", "rank": 27, "score": 136584.95766181755 }, { "content": "pub fn short_scale(quantity: f64) -> UnitFormatter {\n\n UnitFormatter::new(quantity, 1000, 0, SHORT_SCALE, \"\".to_string())\n\n}\n\n\n", "file_path": "src/unit_fmt.rs", "rank": 28, "score": 135414.7157253339 }, { "content": "pub fn incrementing_int() -> Box<dyn ColumnGenerator> {\n\n Box::new(IncrementingInteger)\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 29, "score": 135414.7157253339 }, { "content": "#[allow(clippy::type_complexity)]\n\npub fn prepare_hashmap_grouping(raw_grouping_key: TypedBufferRef,\n\n columns: usize,\n\n max_cardinality: usize,\n\n planner: &mut QueryPlanner)\n\n -> Result<(Option<TypedBufferRef>,\n\n TypedBufferRef,\n\n bool,\n\n BufferRef<Scalar<i64>>), QueryError> {\n\n let (unique_out, grouping_key_out, cardinality_out) =\n\n if raw_grouping_key.tag == EncodingType::ValRows {\n\n let (u, g, c) = planner.hash_map_grouping_val_rows(raw_grouping_key.val_rows()?, columns, max_cardinality);\n\n (u.into(), g, c)\n\n } else {\n\n planner.hash_map_grouping(raw_grouping_key, max_cardinality)\n\n };\n\n Ok((Some(unique_out),\n\n grouping_key_out.into(),\n\n false,\n\n cardinality_out))\n\n}\n\n\n", "file_path": "src/engine/planning/query_plan.rs", "rank": 30, "score": 131112.0974711199 }, { "content": "pub fn prepare_aggregation(mut plan: TypedBufferRef,\n\n plan_type: Type,\n\n grouping_key: TypedBufferRef,\n\n max_index: BufferRef<Scalar<i64>>,\n\n aggregator: Aggregator,\n\n planner: &mut QueryPlanner)\n\n -> Result<(TypedBufferRef, Type), QueryError> {\n\n Ok(match aggregator {\n\n Aggregator::Count => {\n\n let plan = if plan.tag == EncodingType::ScalarI64 { grouping_key } else { plan };\n\n (planner.aggregate(plan, grouping_key, max_index, Aggregator::Count, EncodingType::U32),\n\n Type::encoded(Codec::integer_cast(EncodingType::U32)))\n\n }\n\n Aggregator::Sum => {\n\n if !plan_type.is_summation_preserving() {\n\n plan = plan_type.codec.unwrap().decode(plan, planner);\n\n }\n\n // PERF: determine dense groupings\n\n (planner.checked_aggregate(plan, grouping_key, max_index, Aggregator::Sum, EncodingType::I64),\n\n Type::unencoded(BasicType::Integer))\n\n }\n\n Aggregator::Max | Aggregator::Min => {\n\n // PERF: don't always have to decode before taking max/min, and after is more efficient (e.g. dict encoded strings)\n\n plan = plan_type.codec.unwrap().decode(plan, planner);\n\n (planner.aggregate(plan, grouping_key, max_index, aggregator, EncodingType::I64),\n\n Type::unencoded(BasicType::Integer))\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/engine/planning/query_plan.rs", "rank": 31, "score": 131083.9644664256 }, { "content": "pub fn fast_build_string_column<'a, T>(name: &str,\n\n strings: T,\n\n len: usize,\n\n lhex: bool,\n\n uhex: bool,\n\n total_bytes: usize,\n\n present: Option<Vec<u8>>)\n\n -> Arc<Column> where T: Iterator<Item=&'a str> + Clone {\n\n let mut unique_values = HashSetSea::default();\n\n for s in strings.clone() {\n\n unique_values.insert(s);\n\n // PERF: is 2 the right constant? and should probably also depend on the length of the strings\n\n // TODO(#103): len > 1000 || name == \"string_packed\" is a hack to make tests use dictionary encoding. Remove once we are able to group by string packed columns.\n\n if unique_values.len() == len / DICTIONARY_RATIO {\n\n let (mut codec, data) = if (lhex || uhex) && total_bytes / len > 5 {\n\n let packed = PackedBytes::from_iterator(strings.map(|s| hex::decode(s).unwrap()));\n\n (vec![CodecOp::UnhexpackStrings(uhex, total_bytes)], DataSection::U8(packed.into_vec()))\n\n } else {\n\n let packed = PackedStrings::from_iterator(strings);\n\n (string_pack_codec(), DataSection::U8(packed.into_vec()))\n", "file_path": "src/mem_store/strings.rs", "rank": 32, "score": 130046.53566761845 }, { "content": "fn get_offset(offset: Option<Offset>) -> Result<u64, QueryError> {\n\n match offset {\n\n None => Ok(0),\n\n Some(offset) => match offset.value {\n\n ASTNode::Value(Value::Number(rows)) => Ok(rows.parse::<u64>().unwrap()),\n\n expr => Err(QueryError::ParseError(format!(\n\n \"Invalid expression in offset clause: Expected constant integer, got {:?}\",\n\n expr,\n\n ))),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/syntax/parser.rs", "rank": 33, "score": 129702.40120661489 }, { "content": "pub fn decoder(data: &[u8]) -> lz4::Decoder<&[u8]> {\n\n lz4::Decoder::new(data).unwrap()\n\n}\n\n\n", "file_path": "src/mem_store/lz4.rs", "rank": 34, "score": 129268.31372163372 }, { "content": "pub fn order_preserving((plan, t): (TypedBufferRef, Type),\n\n planner: &mut QueryPlanner) -> (TypedBufferRef, Type) {\n\n if t.is_order_preserving() {\n\n (plan, t)\n\n } else {\n\n let new_type = t.decoded();\n\n (t.codec.unwrap().decode(plan, planner), new_type)\n\n }\n\n}\n\n\n", "file_path": "src/engine/planning/query_plan.rs", "rank": 35, "score": 128655.28595463326 }, { "content": "fn get_limit(limit: Option<ASTNode>) -> Result<u64, QueryError> {\n\n match limit {\n\n Some(ASTNode::Value(Value::Number(int))) => Ok(int.parse::<u64>().unwrap()),\n\n None => Ok(100),\n\n _ => Err(QueryError::NotImplemented(format!(\n\n \"Invalid expression in limit clause: {:?}\",\n\n limit\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/syntax/parser.rs", "rank": 36, "score": 128304.45276886382 }, { "content": "// Convert sqlparser-rs `ASTNode` to LocustDB's `Query`\n\npub fn parse_query(query: &str) -> Result<Query, QueryError> {\n\n let dialect = GenericDialect {};\n\n let mut ast = Parser::parse_sql(&dialect, query).map_err(|e| match e {\n\n ParserError::ParserError(e_str) => QueryError::ParseError(e_str),\n\n _ => fatal!(\"{:?}\", e),\n\n })?;\n\n if ast.len() > 1 {\n\n return Err(QueryError::ParseError(format!(\n\n \"Expected a single query statement, but there are {}\",\n\n ast.len()\n\n )));\n\n }\n\n\n\n let query = match ast.pop().unwrap() {\n\n Statement::Query(query) => query,\n\n _ => {\n\n return Err(QueryError::ParseError(\n\n \"Only SELECT queries are supported.\".to_string(),\n\n ))\n\n }\n", "file_path": "src/syntax/parser.rs", "rank": 37, "score": 127825.69611068779 }, { "content": "pub fn dict_codec(index_type: EncodingType) -> Vec<CodecOp> {\n\n vec![\n\n CodecOp::PushDataSection(1),\n\n CodecOp::PushDataSection(2),\n\n CodecOp::DictLookup(index_type),\n\n ]\n\n}\n\n\n", "file_path": "src/mem_store/strings.rs", "rank": 38, "score": 127312.7085139149 }, { "content": "fn get_table_name(relation: Option<TableFactor>) -> Result<String, QueryError> {\n\n match relation {\n\n // TODO: error message if any unused fields are set\n\n Some(TableFactor::Table { name, .. }) => Ok(format!(\"{}\", name)),\n\n Some(s) => Err(QueryError::ParseError(format!(\n\n \"Invalid expression for table name: {:?}\",\n\n s\n\n ))),\n\n None => Err(QueryError::ParseError(\"Table name missing.\".to_string())),\n\n }\n\n}\n\n\n", "file_path": "src/syntax/parser.rs", "rank": 39, "score": 126955.41095538167 }, { "content": "#[derive(Clone)]\n\nstruct Production {\n\n specs: Vec<Declaration>,\n\n expr: Expr,\n\n}\n\n\n", "file_path": "locustdb-derive/src/reify_types.rs", "rank": 40, "score": 126744.35467396688 }, { "content": "#[derive(Clone)]\n\nstruct Declaration {\n\n variables: Vec<Ident>,\n\n t: Ident,\n\n}\n\n\n\nimpl Parse for TypeExpand {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n let name: LitStr = input.parse()?;\n\n input.parse::<Token![;]>()?;\n\n\n\n let productions = Punctuated::<Production, Token![;]>::parse_separated_nonempty(input)?;\n\n\n\n Ok(TypeExpand {\n\n name,\n\n productions: productions.into_iter().collect(),\n\n })\n\n }\n\n}\n\n\n\nimpl Parse for Production {\n", "file_path": "locustdb-derive/src/reify_types.rs", "rank": 41, "score": 126744.35467396688 }, { "content": "pub fn random_hex_string(length: usize) -> Box<dyn ColumnGenerator> {\n\n Box::new(HexString { length })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 42, "score": 126434.44639770259 }, { "content": "pub fn error_buffer_ref(name: &'static str) -> BufferRef<Any> {\n\n BufferRef {\n\n i: 0xdead_beef,\n\n name,\n\n t: PhantomData,\n\n }\n\n}\n\n\n\nimpl BufferRef<Any> {\n\n pub fn merge_op(self) -> BufferRef<MergeOp> { self.transmute() }\n\n pub fn premerge(self) -> BufferRef<Premerge> { self.transmute() }\n\n pub fn raw_val(self) -> BufferRef<RawVal> { self.transmute() }\n\n pub fn i64(self) -> BufferRef<i64> { self.transmute() }\n\n pub fn u64(self) -> BufferRef<u64> { self.transmute() }\n\n pub fn u32(self) -> BufferRef<u32> { self.transmute() }\n\n pub fn u16(self) -> BufferRef<u16> { self.transmute() }\n\n pub fn u8(self) -> BufferRef<u8> { self.transmute() }\n\n\n\n pub fn cast_nullable_any(&self) -> BufferRef<Nullable<Any>> { self.transmute() }\n\n\n", "file_path": "src/engine/execution/buffer.rs", "rank": 43, "score": 126434.44639770259 }, { "content": "pub fn encode<T: Debug>(data: &[T]) -> Vec<u8> {\n\n let ptr_t = data.as_ptr();\n\n // Endianness? Never heard of it...\n\n let data_u8: &[u8] = unsafe {\n\n let ptr_u8 = ptr_t as *const u8;\n\n from_raw_parts(ptr_u8, data.len() * mem::size_of::<T>())\n\n };\n\n\n\n let mut result = Vec::new();\n\n {\n\n let mut encoder = lz4::EncoderBuilder::new().build(&mut result).unwrap();\n\n encoder.write_all(data_u8).unwrap();\n\n encoder.finish().1.unwrap();\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/mem_store/lz4.rs", "rank": 44, "score": 125787.11089273711 }, { "content": "struct TypeExpand {\n\n name: LitStr,\n\n productions: Vec<Production>,\n\n}\n\n\n", "file_path": "locustdb-derive/src/reify_types.rs", "rank": 45, "score": 124971.90668189511 }, { "content": "pub fn display_vals(vals: &[Val], max_chars: usize) -> String {\n\n let mut length = vals.len();\n\n loop {\n\n let result = _display_vals(vals, length);\n\n if result.len() < max_chars { break; }\n\n length = min(length - 1, max_chars * length / result.len());\n\n if length < 3 {\n\n return _display_vals(vals, 2);\n\n }\n\n }\n\n if length == vals.len() {\n\n return _display_vals(vals, vals.len());\n\n }\n\n for l in length..max_chars {\n\n if _display_vals(vals, l).len() > max_chars {\n\n return _display_vals(vals, l - 1);\n\n }\n\n }\n\n \"display_vals error!\".to_owned()\n\n}\n\n\n", "file_path": "src/engine/data_types/val_rows.rs", "rank": 46, "score": 123795.45353093608 }, { "content": "pub fn display_byte_slices(slice: &[&[u8]], max_chars: usize) -> String {\n\n let mut length = slice.len();\n\n loop {\n\n let result = _display_slice(slice, length);\n\n if result.len() < max_chars { break; }\n\n length = min(length - 1, max_chars * length / result.len());\n\n if length < 3 {\n\n return _display_slice(slice, 2);\n\n }\n\n }\n\n if length == slice.len() {\n\n return _display_slice(slice, slice.len());\n\n }\n\n for l in length..max_chars {\n\n if _display_slice(slice, l).len() > max_chars {\n\n return _display_slice(slice, l - 1);\n\n }\n\n }\n\n \"display_slice error!\".to_owned()\n\n}\n\n\n", "file_path": "src/engine/data_types/byte_slices.rs", "rank": 47, "score": 122542.85956551829 }, { "content": "pub fn splayed(offset: i64, coefficient: i64) -> Box<dyn ColumnGenerator> {\n\n Box::new(Splayed { offset, coefficient })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 48, "score": 122517.97939974697 }, { "content": "/// Creates a new AliasTable struct.\n\npub fn new_alias_table(weights: &[f64]) -> Result<AliasTable, AliasMethodError> {\n\n let n = weights.len() as i32;\n\n\n\n let sum = weights.iter().fold(0.0, |acc, x| acc + x);\n\n if sum == 0.0 {\n\n return Err(AliasMethodError::ZeroTotalWeights);\n\n }\n\n\n\n let mut prob = weights.iter().map(|w| w * f64::from(n) / sum).collect::<Vec<f64>>();\n\n let mut h = 0;\n\n let mut l = n - 1;\n\n let mut hl: Vec<usize> = vec![0; n as usize];\n\n\n\n for (i, p) in prob.iter().enumerate() {\n\n if *p < 1.0 {\n\n hl[l as usize] = i;\n\n l -= 1;\n\n }\n\n if 1.0 < *p {\n\n hl[h as usize] = i;\n", "file_path": "src/ingest/alias_method_fork.rs", "rank": 49, "score": 121336.95305355739 }, { "content": "pub fn int_uniform(low: i64, high: i64) -> Box<dyn ColumnGenerator> {\n\n Box::new(UniformInteger { low, high })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 50, "score": 121175.40195902862 }, { "content": "fn encoding_range(plan: &TypedBufferRef, qp: &QueryPlanner) -> Option<(i64, i64)> {\n\n // This would benefit from more principled approach - it currently doesn't work for all partially decodings\n\n // Example: [LZ4, Add, Delta] will have as bottom decoding range the range after indices, max_index Delta, but without the Add :/\n\n // This works in this case because we always have to decode the Delta, but is hard to reason about and has caused bugs\n\n use self::QueryPlan::*;\n\n match *qp.resolve(plan) {\n\n ColumnSection { range, .. } => range,\n\n ToYear { timestamp, .. } => encoding_range(&timestamp, qp).map(|(min, max)|\n\n (i64::from(NaiveDateTime::from_timestamp(min, 0).year()),\n\n i64::from(NaiveDateTime::from_timestamp(max, 0).year()))\n\n ),\n\n Filter { ref plan, .. } => encoding_range(plan, qp),\n\n Divide { ref lhs, ref rhs, .. } => if let ScalarI64 { value: c, .. } = qp.resolve(rhs) {\n\n encoding_range(lhs, qp).map(|(min, max)|\n\n if *c > 0 { (min / *c, max / *c) } else { (max / *c, min / *c) })\n\n } else {\n\n None\n\n },\n\n CheckedDivide { ref lhs, ref rhs, .. } => if let ScalarI64 { value: c, .. } = qp.resolve(rhs) {\n\n encoding_range(lhs, qp).map(|(min, max)|\n", "file_path": "src/engine/planning/query_plan.rs", "rank": 51, "score": 121077.18740392092 }, { "content": "pub fn random_string(min_length: usize, max_length: usize) -> Box<dyn ColumnGenerator> {\n\n Box::new(RandomString {\n\n min_length,\n\n max_length,\n\n })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 52, "score": 118626.39256756267 }, { "content": "pub fn merge_deduplicate_partitioned<'a, T: PartialOrd + Copy + 'a>(partitioning: &[Premerge],\n\n left: &[T],\n\n right: &[T]) -> (Vec<T>, Vec<MergeOp>) {\n\n let output_len_estimate = max(left.len(), right.len()) + min(left.len(), right.len()) / 2;\n\n let mut result = Vec::with_capacity(output_len_estimate);\n\n let mut ops = Vec::<MergeOp>::with_capacity(output_len_estimate);\n\n\n\n let mut i = 0;\n\n let mut j = 0;\n\n for group in partitioning {\n\n let mut last = None;\n\n let i_max = i + group.left as usize;\n\n let j_max = j + group.right as usize;\n\n // println!(\"i_max = {}, j_max = {}\", i_max, j_max);\n\n for _ in 0..(group.left + group.right) {\n\n // println!(\"i = {}, j = {}, last = {:?}\", i, j, last);\n\n // println!(\"{:?} {:?}\", left.get(i), right.get(j));\n\n if j < j_max && last == Some(right[j]) {\n\n ops.push(MergeOp::MergeRight);\n\n j += 1;\n", "file_path": "src/engine/operators/merge_deduplicate_partitioned.rs", "rank": 53, "score": 118626.39256756267 }, { "content": "fn get_projection(projection: Vec<SelectItem>) -> Result<Vec<(Expr, Option<String>)>, QueryError> {\n\n let mut result = Vec::<(Expr, Option<String>)>::new();\n\n for elem in &projection {\n\n match elem {\n\n SelectItem::UnnamedExpr(e) => result.push( (*convert_to_expr(&e)?, None) ),\n\n SelectItem::Wildcard => result.push( (Expr::ColName('*'.to_string()), None) ),\n\n SelectItem::ExprWithAlias { expr, alias } => result.push( (*convert_to_expr(&expr)?, Some(alias.to_string())) ),\n\n _ => {\n\n return Err(QueryError::NotImplemented(format!(\n\n \"Unsupported projection in SELECT: {}\",\n\n elem\n\n )))\n\n }\n\n }\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/syntax/parser.rs", "rank": 54, "score": 118571.50627417998 }, { "content": "fn get_order_by(order_by: Option<Vec<OrderByExpr>>) -> Result<Vec<(Expr, bool)>, QueryError> {\n\n let mut order = Vec::new();\n\n if let Some(sql_order_by_exprs) = order_by {\n\n for e in sql_order_by_exprs {\n\n order.push((*(convert_to_expr(&e.expr))?, !e.asc.unwrap_or(true)));\n\n }\n\n }\n\n Ok(order)\n\n}\n\n\n", "file_path": "src/syntax/parser.rs", "rank": 55, "score": 118571.50627417998 }, { "content": "struct FnTask<F, T> where\n\n F: Fn() -> T + Sync + Send + 'static,\n\n T: Send {\n\n fun: F,\n\n sender: SharedSender<T>,\n\n}\n\n\n\nimpl<F, T> Task for FnTask<F, T> where\n\n F: Fn() -> T + Sync + Send + 'static,\n\n T: Send {\n\n fn execute(&self) {\n\n let result = self.fun.call(());\n\n self.sender.send(result);\n\n }\n\n\n\n fn completed(&self) -> bool { false }\n\n fn multithreaded(&self) -> bool { false }\n\n}\n\n\n\nimpl dyn Task {\n\n pub fn from_fn<F, T>(fun: F) -> (impl Task, oneshot::Receiver<T>) where\n\n F: Fn() -> T + Sync + Send + 'static,\n\n T: Send {\n\n let (sender, receiver) = oneshot::channel();\n\n (FnTask { fun, sender: SharedSender::new(sender) }, receiver)\n\n }\n\n}\n", "file_path": "src/scheduler/task.rs", "rank": 56, "score": 118562.34782749512 }, { "content": "pub fn display_slice<T: Debug>(slice: &[T], max_chars: usize) -> String {\n\n let mut length = slice.len();\n\n loop {\n\n let result = _display_slice(slice, length);\n\n if result.len() < max_chars { break; }\n\n length = min(length - 1, max_chars * length / result.len());\n\n if length < 3 {\n\n return _display_slice(slice, 2);\n\n }\n\n }\n\n if length == slice.len() {\n\n return _display_slice(slice, slice.len());\n\n }\n\n for l in length..max_chars {\n\n if _display_slice(slice, l).len() > max_chars {\n\n return _display_slice(slice, l - 1);\n\n }\n\n }\n\n \"display_slice error!\".to_owned()\n\n}\n\n\n", "file_path": "src/engine/data_types/vec_data.rs", "rank": 57, "score": 117415.43510983036 }, { "content": "pub fn decode<T>(src: &mut dyn Read, dst: &mut [T]) -> usize {\n\n let ptr_t = dst.as_ptr();\n\n let dst_u8: &mut [u8] = unsafe {\n\n let ptr_u8 = ptr_t as *mut u8;\n\n from_raw_parts_mut(ptr_u8, dst.len() * mem::size_of::<T>())\n\n };\n\n\n\n let mut read = 0;\n\n // LZ4 decodes in blocks of at most 65536 elements, so might have to call multiple times to fill buffer\n\n while read < dst_u8.len() && 0 != {\n\n let len = src.read(&mut dst_u8[read..]).unwrap();\n\n read += len;\n\n len\n\n } {}\n\n if read % mem::size_of::<T>() != 0 {\n\n println!(\"{} {} {} {}\", dst.len(), dst_u8.len(), read, mem::size_of::<T>());\n\n }\n\n assert_eq!(read % mem::size_of::<T>(), 0);\n\n read / mem::size_of::<T>()\n\n}\n", "file_path": "src/mem_store/lz4.rs", "rank": 58, "score": 116534.87706479713 }, { "content": "pub fn string_weighted(values: Vec<String>, weights: Vec<f64>) -> Box<dyn ColumnGenerator> {\n\n Box::new(Weighted {\n\n elem: values,\n\n weights,\n\n s: PhantomData::<StringColBuilder>,\n\n })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 59, "score": 115282.28309937933 }, { "content": "pub fn int_weighted(values: Vec<i64>, weights: Vec<f64>) -> Box<dyn ColumnGenerator> {\n\n Box::new(Weighted {\n\n elem: values.into_iter().map(Some).collect(),\n\n weights,\n\n s: PhantomData::<IntColBuilder>,\n\n })\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 60, "score": 115282.28309937933 }, { "content": "pub fn partition<'a, T, C>(left: &[T], right: &[T], limit: usize) -> Vec<Premerge>\n\n where T: VecData<T> + 'a, C: Comparator<T> {\n\n let mut result = Vec::new();\n\n let mut i = 0;\n\n let mut j = 0;\n\n let limit = if limit > u32::MAX as usize { u32::MAX } else { limit as u32 };\n\n let mut min_elems = 0u32;\n\n while i < left.len() && j < right.len() && min_elems < limit {\n\n let mut partition = Premerge { left: 0, right: 0 };\n\n let elem = if C::cmp_eq(left[i], right[j]) { left[i] } else { right[j] };\n\n while i < left.len() && elem == left[i] {\n\n partition.left += 1;\n\n i += 1;\n\n }\n\n while j < right.len() && elem == right[j] {\n\n partition.right += 1;\n\n j += 1;\n\n }\n\n min_elems += cmp::max(partition.left, partition.right);\n\n result.push(partition);\n", "file_path": "src/engine/operators/partition.rs", "rank": 61, "score": 112393.60726574608 }, { "content": "pub fn merge_partitioned<'a, T, C>(partitioning: &[Premerge], left: &[T], right: &[T], limit: usize)\n\n -> (Vec<T>, Vec<u8>) where T: PartialOrd + Debug + Copy + 'a, C: Comparator<T> {\n\n let len = cmp::min(left.len() + right.len(), limit);\n\n let mut result = Vec::with_capacity(len);\n\n let mut take_left = Vec::<u8>::with_capacity(len);\n\n\n\n let mut i = 0;\n\n let mut j = 0;\n\n 'outer: for group in partitioning {\n\n let i_max = i + group.left as usize;\n\n let j_max = j + group.right as usize;\n\n for _ in 0..(group.left + group.right) {\n\n if j == j_max || (i < i_max && C::cmp_eq(left[i], right[j])) {\n\n take_left.push(1);\n\n result.push(left[i]);\n\n i += 1;\n\n } else {\n\n take_left.push(0);\n\n result.push(right[j]);\n\n j += 1;\n\n }\n\n if i + j == limit {\n\n break 'outer;\n\n }\n\n }\n\n }\n\n (result, take_left)\n\n}\n\n\n", "file_path": "src/engine/operators/merge_partitioned.rs", "rank": 62, "score": 110011.28678697138 }, { "content": "fn studley_to_snake(ident: Ident) -> Ident {\n\n let mut snake_case = String::new();\n\n let mut previous_lowercase = false;\n\n for c in format!(\"{}\", ident).chars() {\n\n if c.is_uppercase() {\n\n if previous_lowercase {\n\n snake_case.push('_');\n\n }\n\n previous_lowercase = false;\n\n for l in c.to_lowercase() {\n\n snake_case.push(l);\n\n }\n\n } else {\n\n previous_lowercase = true;\n\n snake_case.push(c);\n\n }\n\n }\n\n Ident::new(&snake_case, ident.span())\n\n}\n", "file_path": "locustdb-derive/src/ast_builder.rs", "rank": 63, "score": 107402.98248352479 }, { "content": "fn studley_to_snake(ident: Ident) -> Ident {\n\n let mut snake_case = String::new();\n\n let mut previous_lowercase = false;\n\n for c in format!(\"{}\", ident).chars() {\n\n if c.is_uppercase() {\n\n if previous_lowercase {\n\n snake_case.push('_');\n\n }\n\n previous_lowercase = false;\n\n for l in c.to_lowercase() {\n\n snake_case.push(l);\n\n }\n\n } else {\n\n previous_lowercase = true;\n\n snake_case.push(c);\n\n }\n\n }\n\n Ident::new(&snake_case, ident.span())\n\n}", "file_path": "locustdb-derive/src/enum_syntax.rs", "rank": 64, "score": 107402.98248352479 }, { "content": "pub fn display_nullable_slice<T: fmt::Debug>(slice: &[T], present: &[u8], max_chars: usize) -> String {\n\n let mut length = slice.len();\n\n loop {\n\n let result = _display_nullable_slice(slice, present, length);\n\n if result.len() < max_chars { break; }\n\n length = min(length - 1, max_chars * length / result.len());\n\n if length < 3 {\n\n return _display_nullable_slice(slice, present, 2);\n\n }\n\n }\n\n if length == slice.len() {\n\n return _display_nullable_slice(slice, present, slice.len());\n\n }\n\n for l in length..max_chars {\n\n if _display_nullable_slice(slice, present, l).len() > max_chars {\n\n return _display_nullable_slice(slice, present, l - 1);\n\n }\n\n }\n\n \"display_slice error!\".to_owned()\n\n}\n\n\n", "file_path": "src/engine/data_types/nullable_vec_data.rs", "rank": 65, "score": 107078.75316981514 }, { "content": "fn db() -> &'static LocustDB {\n\n unsafe {\n\n // Prints each argument on a separate line\n\n let thread_count = env::var_os(\"LOCUSTDB_THREADS\")\n\n .map(|x| x.to_str().unwrap().parse::<usize>().unwrap());\n\n match DB {\n\n Some(ref locustdb) => locustdb,\n\n None => {\n\n let mut opts = Options::default();\n\n opts.threads = thread_count.unwrap_or(opts.threads);\n\n let locustdb = LocustDB::new(&opts);\n\n\n\n eprintln!(\"Synthesizing tables\");\n\n gen_table(&locustdb, \"trips_e8\", 100, 1 << 20);\n\n gen_table(&locustdb, \"trips_e7\", 80, 1 << 17);\n\n gen_table(&locustdb, \"trips_e6\", 64, 1 << 14);\n\n eprintln!(\"Done\");\n\n\n\n DB = Some(locustdb);\n\n DB.as_ref().unwrap()\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "benches/basic.rs", "rank": 66, "score": 105340.81269506147 }, { "content": "pub fn combine<'a>(batch1: BatchResult<'a>, batch2: BatchResult<'a>, limit: usize) -> Result<BatchResult<'a>, QueryError> {\n\n ensure!(\n\n batch1.projection.len() == batch2.projection.len(),\n\n \"Unequal number of projections in left ({}) and right ({}) batch result.\",\n\n batch1.projection.len(), batch2.projection.len(),\n\n );\n\n ensure!(\n\n batch1.order_by.len() == batch2.order_by.len(),\n\n \"Unequal number of order by in left ({}) and right ({}) batch result.\",\n\n batch1.order_by.len(), batch2.order_by.len(),\n\n );\n\n ensure!(\n\n batch1.aggregations.len() == batch2.aggregations.len(),\n\n \"Unequal number of aggregations in left ({:?}) and right ({:?}) batch result.\",\n\n batch1.aggregations.len(), batch2.aggregations.len(),\n\n );\n\n\n\n\n\n let mut qp = QueryPlanner::default();\n\n let mut data = Vec::new();\n", "file_path": "src/engine/execution/batch_merging.rs", "rank": 67, "score": 105259.47350971907 }, { "content": "fn convert(expr: Expr, field_type: &Type) -> Expr {\n\n if *field_type == parse_quote!(BufferRef<u8>) {\n\n parse_quote!(#expr.u8().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<&'static str>) {\n\n parse_quote!(#expr.str().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<Val<'static>>) {\n\n parse_quote!(#expr.val().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<ValRows<'static>>) {\n\n parse_quote!(#expr.val_rows().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<usize>) {\n\n parse_quote!(#expr.usize().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<i64>) {\n\n parse_quote!(#expr.i64().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<u32>) {\n\n parse_quote!(#expr.u32().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<Nullable<i64>>) {\n\n parse_quote!(#expr.nullable_i64().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<MergeOp>) {\n\n parse_quote!(#expr.merge_op().unwrap())\n\n } else if *field_type == parse_quote!(BufferRef<Premerge>) {\n", "file_path": "locustdb-derive/src/ast_builder.rs", "rank": 68, "score": 102114.37202006913 }, { "content": "fn hash(field_ident: &Ident, field_type: &Type) -> Stmt {\n\n if *field_type == parse_quote!(String) {\n\n parse_quote!(hasher.input_str(#field_ident);)\n\n } else if *field_type == parse_quote!(usize) || *field_type == parse_quote!(i64) {\n\n parse_quote!(hasher.input(&#field_ident.to_ne_bytes());)\n\n } else if *field_type == parse_quote!(u8) {\n\n parse_quote!(hasher.input(&[#field_ident]);)\n\n } else if *field_type == parse_quote!(bool) {\n\n parse_quote!(hasher.input(&[#field_ident as u8]);)\n\n } else if *field_type == parse_quote!(Aggregator) {\n\n parse_quote!(hasher.input(&[#field_ident as u8]);)\n\n } else if *field_type == parse_quote!(TypedBufferRef) {\n\n parse_quote!(hasher.input(&#field_ident.buffer.i.to_ne_bytes());)\n\n } else {\n\n parse_quote!(hasher.input(&#field_ident.i.to_ne_bytes());)\n\n }\n\n}\n\n\n", "file_path": "locustdb-derive/src/ast_builder.rs", "rank": 69, "score": 100721.85264186378 }, { "content": "fn create_buffer(field_ident: &Ident, field_type: &Type) -> Stmt {\n\n let field_name = LitStr::new(&format!(\"{}\", field_ident), Span::call_site());\n\n if *field_type == parse_quote!(BufferRef<u8>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.buffer_u8(#field_name);)\n\n } else if *field_type == parse_quote!(BufferRef<&'static str>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.buffer_str(#field_name);)\n\n } else if *field_type == parse_quote!(BufferRef<Val<'static>>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.buffer_val(#field_name);)\n\n } else if *field_type == parse_quote!(BufferRef<ValRows<'static>>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.buffer_val_rows(#field_name);)\n\n } else if *field_type == parse_quote!(BufferRef<usize>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.buffer_usize(#field_name);)\n\n } else if *field_type == parse_quote!(BufferRef<i64>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.buffer_i64(#field_name);)\n\n } else if *field_type == parse_quote!(BufferRef<u32>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.buffer_u32(#field_name);)\n\n } else if *field_type == parse_quote!(BufferRef<Nullable<i64>>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.nullable_buffer_i64(#field_name);)\n\n } else if *field_type == parse_quote!(BufferRef<MergeOp>) {\n\n parse_quote!(let #field_ident = self.buffer_provider.buffer_merge_op(#field_name);)\n", "file_path": "locustdb-derive/src/ast_builder.rs", "rank": 70, "score": 99378.04995461635 }, { "content": "fn auto_ingest<T>(ldb: &InnerLocustDB, records: T, colnames: &[String], opts: &Options) -> Result<(), String>\n\n where T: Iterator<Item=csv::StringRecord> {\n\n let ignore = (0..colnames.len()).map(|x| opts.ignore_cols.contains(&x)).collect::<Vec<_>>();\n\n let string = (0..colnames.len()).map(|x| opts.always_string.contains(&x)).collect::<Vec<_>>();\n\n let mut raw_cols = (0..colnames.len()).map(|x|\n\n RawCol::new(opts.allow_nulls_all_columns || opts.allow_nulls.contains(&x))).collect::<Vec<_>>();\n\n let mut row_num = 0usize;\n\n for row in records {\n\n for (i, val) in row.iter().enumerate() {\n\n if !ignore[i] {\n\n raw_cols[i].push(val);\n\n }\n\n }\n\n\n\n if row_num % opts.partition_size == opts.partition_size - 1 {\n\n let partition = create_batch(&mut raw_cols, colnames, &opts.extractors, &ignore, &string);\n\n ldb.store_partition(&opts.tablename, partition);\n\n }\n\n row_num += 1;\n\n }\n\n\n\n if row_num % opts.partition_size != 0 {\n\n let partition = create_batch(&mut raw_cols, colnames, &opts.extractors, &ignore, &string);\n\n ldb.store_partition(&opts.tablename, partition);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ingest/csv_loader.rs", "rank": 71, "score": 88523.76927874569 }, { "content": "fn gen_table(db: &LocustDB, name: &str, partitions: usize, partition_size: usize) {\n\n let _ = block_on(db.gen_table(\n\n locustdb::colgen::GenTable {\n\n name: name.to_string(),\n\n partitions,\n\n partition_size,\n\n columns: vec![\n\n (\"total_amount\".to_string(),\n\n locustdb::colgen::int_uniform(-1000, 50_000)),\n\n (\"pickup_datetime\".to_string(),\n\n locustdb::colgen::splayed(1_200_000_000, 3)),\n\n (\"uniform_u32\".to_string(),\n\n locustdb::colgen::int_uniform(0, u32::MAX.into())),\n\n (\"trip_id\".to_string(),\n\n locustdb::colgen::incrementing_int()),\n\n (\"passenger_count\".to_string(),\n\n locustdb::colgen::int_weighted(\n\n vec![0, 1, 2, 4, 5, 6, 7, 8, 9, 208],\n\n vec![4.0, 1000.0, 200.0, 60.0, 30.0, 95.0, 34.0, 1.0, 1.0, 0.001],\n\n )),\n", "file_path": "benches/basic.rs", "rank": 72, "score": 84623.94226396453 }, { "content": "struct Splayed {\n\n offset: i64,\n\n coefficient: i64,\n\n}\n\n\n\nimpl ColumnGenerator for Splayed {\n\n fn generate(&self, length: usize, name: &str, partition: u64) -> Arc<Column> {\n\n let mut rng = seeded_rng(partition);\n\n let mut builder = IntColBuilder::default();\n\n for _ in 0..length {\n\n builder.push(&Some(rng.gen_range::<i64>(\n\n self.offset + self.coefficient * length as i64 * partition as i64,\n\n self.offset + self.coefficient * length as i64 * (partition as i64 + 1),\n\n )));\n\n }\n\n ColumnBuilder::<Option<i64>>::finalize(builder, name, None)\n\n }\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 73, "score": 84327.97496771577 }, { "content": "struct UniformInteger {\n\n low: i64,\n\n high: i64,\n\n}\n\n\n\nimpl ColumnGenerator for UniformInteger {\n\n fn generate(&self, length: usize, name: &str, seed: u64) -> Arc<Column> {\n\n let mut rng = seeded_rng(seed);\n\n let mut builder = IntColBuilder::default();\n\n for _ in 0..length {\n\n builder.push(&Some(rng.gen_range::<i64>(self.low, self.high)));\n\n }\n\n ColumnBuilder::<Option<i64>>::finalize(builder, name, None)\n\n }\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 74, "score": 83362.52403928404 }, { "content": "struct RandomString {\n\n min_length: usize,\n\n max_length: usize,\n\n}\n\n\n\nimpl ColumnGenerator for RandomString {\n\n fn generate(&self, length: usize, name: &str, seed: u64) -> Arc<Column> {\n\n let mut rng = seeded_rng(seed);\n\n let mut builder = StringColBuilder::default();\n\n for _ in 0..length {\n\n let len = rng.gen_range(self.min_length, self.max_length + 1);\n\n let string: String = rng.sample_iter::<char, _>(&Alphanumeric).take(len).collect();\n\n builder.push(&string);\n\n }\n\n ColumnBuilder::<&str>::finalize(builder, name, None)\n\n }\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 75, "score": 83362.52403928404 }, { "content": "struct IncrementingInteger;\n\n\n\nimpl ColumnGenerator for IncrementingInteger {\n\n fn generate(&self, length: usize, name: &str, seed: u64) -> Arc<Column> {\n\n let mut builder = IntColBuilder::default();\n\n for i in seed as i64 * length as i64..length as i64 * (seed as i64 + 1) {\n\n builder.push(&Some(i));\n\n }\n\n builder.finalize(name, None)\n\n }\n\n}\n\n\n\npub struct GenTable {\n\n pub name: String,\n\n pub partitions: usize,\n\n pub partition_size: usize,\n\n pub columns: Vec<(String, Box<dyn ColumnGenerator>)>,\n\n}\n\n\n\nimpl GenTable {\n\n pub fn gen(&self, db: &InnerLocustDB, partition_number: u64) {\n\n let partition = self.columns\n\n .iter()\n\n .map(|(name, c)| c.generate(self.partition_size, &name, partition_number))\n\n .collect();\n\n db.store_partition(&self.name, partition);\n\n }\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 76, "score": 83362.52403928404 }, { "content": "struct HexString {\n\n length: usize,\n\n}\n\n\n\nimpl ColumnGenerator for HexString {\n\n fn generate(&self, length: usize, name: &str, seed: u64) -> Arc<Column> {\n\n let mut rng = seeded_rng(seed);\n\n let mut builder = StringColBuilder::default();\n\n for _ in 0..length {\n\n let bytes: Vec<u8> = rng.sample_iter(&Standard).take(self.length).collect();\n\n builder.push(&hex::encode(&bytes));\n\n }\n\n ColumnBuilder::<&str>::finalize(builder, name, None)\n\n }\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 77, "score": 83362.52403928404 }, { "content": "struct PartitionSparse {\n\n null_probability: f64,\n\n generator: Box<dyn ColumnGenerator>,\n\n}\n\n\n\nimpl ColumnGenerator for PartitionSparse {\n\n fn generate(&self, length: usize, name: &str, seed: u64) -> Arc<Column> {\n\n let mut rng = seeded_rng(seed);\n\n if rng.gen::<f64>() < self.null_probability {\n\n Arc::new(Column::null(name, length))\n\n } else {\n\n self.generator.generate(length, name, seed)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 78, "score": 83362.52403928404 }, { "content": "#[derive(Default, Clone)]\n\nstruct ExecutorStage {\n\n // Vec<(index to op, streamable output)>\n\n ops: Vec<(usize, bool)>,\n\n stream: bool,\n\n}\n\n\n\nimpl<'a> QueryExecutor<'a> {\n\n pub fn set_buffer_count(&mut self, count: usize) { self.count = count }\n\n\n\n pub fn named_buffer(&mut self, name: &'static str, tag: EncodingType) -> TypedBufferRef {\n\n let buffer = TypedBufferRef::new(BufferRef { i: self.count, name, t: PhantomData }, tag);\n\n self.count += 1;\n\n self.last_buffer = buffer;\n\n buffer\n\n }\n\n\n\n pub fn buffer_merge_op(&mut self, name: &'static str) -> BufferRef<MergeOp> {\n\n self.named_buffer(name, EncodingType::MergeOp).merge_op().unwrap()\n\n }\n\n\n", "file_path": "src/engine/execution/executor.rs", "rank": 79, "score": 82436.34457734515 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct ColType {\n\n contains_string: bool,\n\n contains_int: bool,\n\n contains_null: bool,\n\n}\n\n\n\nimpl ColType {\n\n fn new(string: bool, int: bool, null: bool) -> ColType {\n\n ColType { contains_string: string, contains_int: int, contains_null: null }\n\n }\n\n\n\n fn string() -> ColType {\n\n ColType::new(true, false, false)\n\n }\n\n\n\n fn int() -> ColType {\n\n ColType::new(false, true, false)\n\n }\n\n\n\n fn null() -> ColType {\n", "file_path": "src/ingest/csv_loader.rs", "rank": 80, "score": 82436.34457734515 }, { "content": "struct Function2 {\n\n pub factory: Factory,\n\n pub type_rhs: BasicType,\n\n pub type_lhs: BasicType,\n\n pub type_out: Type,\n\n pub encoding_invariance: bool,\n\n}\n\n\n\nimpl Function2 {\n\n pub fn integer_op(factory: Factory) -> Function2 {\n\n Function2 {\n\n factory,\n\n type_lhs: BasicType::Integer,\n\n type_rhs: BasicType::Integer,\n\n type_out: Type::unencoded(BasicType::Integer).mutable(),\n\n encoding_invariance: false,\n\n }\n\n }\n\n\n\n pub fn comparison_op(factory: Factory,\n", "file_path": "src/engine/planning/query_plan.rs", "rank": 81, "score": 82436.34457734515 }, { "content": "struct RawCol {\n\n types: ColType,\n\n values: IndexedPackedStrings,\n\n lhex: bool,\n\n uhex: bool,\n\n string_bytes: usize,\n\n allow_null: bool,\n\n present: Vec<u8>,\n\n any_null: bool,\n\n}\n\n\n\nimpl RawCol {\n\n fn new(allow_null: bool) -> RawCol {\n\n RawCol {\n\n types: ColType::nothing(),\n\n values: IndexedPackedStrings::default(),\n\n lhex: true,\n\n uhex: true,\n\n string_bytes: 0,\n\n allow_null,\n", "file_path": "src/ingest/csv_loader.rs", "rank": 82, "score": 82436.34457734515 }, { "content": "#[derive(Default, Debug)]\n\nstruct DiskRun {\n\n start: PartitionID,\n\n end: PartitionID,\n\n columns: HashSet<String>,\n\n bytes: usize,\n\n}\n\n\n\nimpl DiskReadScheduler {\n\n pub fn new(disk_store: Arc<dyn DiskStore>, lru: LRU, max_readers: usize, lz4_decode: bool) -> DiskReadScheduler {\n\n DiskReadScheduler {\n\n disk_store,\n\n task_queue: Mutex::default(),\n\n reader_semaphore: Semaphore::new(max_readers as isize),\n\n lru,\n\n lz4_decode,\n\n background_load_wait_queue: Condvar::default(),\n\n background_load_in_progress: Mutex::default(),\n\n }\n\n }\n\n\n", "file_path": "src/scheduler/disk_read_scheduler.rs", "rank": 83, "score": 81547.08818737637 }, { "content": "#[derive(PartialEq, Debug, Copy, Clone)]\n\nstruct ColType {\n\n contains_string: bool,\n\n contains_int: bool,\n\n contains_null: bool,\n\n}\n\n\n\nimpl ColType {\n\n fn new(string: bool, int: bool, null: bool) -> ColType {\n\n ColType { contains_string: string, contains_int: int, contains_null: null }\n\n }\n\n\n\n fn string() -> ColType {\n\n ColType::new(true, false, false)\n\n }\n\n\n\n fn int() -> ColType {\n\n ColType::new(false, true, false)\n\n }\n\n\n\n fn null() -> ColType {\n", "file_path": "src/mem_store/raw_col.rs", "rank": 84, "score": 81547.08818737637 }, { "content": "pub trait BitVec {\n\n fn is_set(&self, index: usize) -> bool;\n\n}\n\n\n\nimpl BitVecMut for Vec<u8> {\n\n fn set(&mut self, index: usize) {\n\n let slot = index >> 3;\n\n while slot >= self.len() {\n\n self.push(0);\n\n }\n\n self[slot] |= 1 << (index as u8 & 7)\n\n }\n\n}\n\n\n\nimpl BitVec for Vec<u8> {\n\n fn is_set(&self, index: usize) -> bool {\n\n let slot = index >> 3;\n\n slot < self.len() && self[slot] & (1 << (index as u8 & 7)) > 0\n\n }\n\n}\n\n\n\nimpl<'a> BitVec for &'a [u8] {\n\n fn is_set(&self, index: usize) -> bool {\n\n let slot = index >> 3;\n\n slot < self.len() && self[slot] & (1 << (index as u8 & 7)) > 0\n\n }\n\n}\n", "file_path": "src/bitvec.rs", "rank": 85, "score": 79247.07077613461 }, { "content": "#[derive(Clone)]\n\nstruct Weighted<T, S> {\n\n elem: Vec<T>,\n\n weights: Vec<f64>,\n\n s: PhantomData<S>,\n\n}\n\n\n\nunsafe impl<T: Send, S: ColumnBuilder<T>> Send for Weighted<T, S> {}\n\n\n\nunsafe impl<T: Sync, S: ColumnBuilder<T>> Sync for Weighted<T, S> {}\n\n\n\nimpl<T: Sync + Send, S: ColumnBuilder<T>> ColumnGenerator for Weighted<T, S> {\n\n fn generate(&self, length: usize, name: &str, seed: u64) -> Arc<Column> {\n\n let rng = seeded_rng(seed);\n\n let mut builder = S::default();\n\n let p = new_alias_table(&self.weights).unwrap();\n\n let mut alias_method = AliasMethod::new(rng);\n\n for _ in 0..length {\n\n let i = alias_method.random(&p);\n\n builder.push(&self.elem[i]);\n\n }\n\n builder.finalize(name, None)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 86, "score": 78531.07183088534 }, { "content": "pub trait BitVecMut {\n\n fn set(&mut self, index: usize);\n\n}\n\n\n", "file_path": "src/bitvec.rs", "rank": 87, "score": 78328.8691283148 }, { "content": "fn main() {\n\n #[cfg(feature = \"enable_rocksdb\")]\n\n ::capnpc::CompilerCommand::new()\n\n .src_prefix(\"src\")\n\n .file(\"src/storage_format.capnp\")\n\n .run()\n\n .unwrap();\n\n}\n", "file_path": "build.rs", "rank": 88, "score": 78078.14925949808 }, { "content": "#[derive(Clone)]\n\nstruct MarkovChain<T, S> {\n\n elem: Vec<T>,\n\n p_transition: Vec<Vec<f64>>,\n\n s: PhantomData<S>,\n\n}\n\n\n\nunsafe impl<T: Send, S: ColumnBuilder<T>> Send for MarkovChain<T, S> {}\n\n\n\nunsafe impl<T: Sync, S: ColumnBuilder<T>> Sync for MarkovChain<T, S> {}\n\n\n\nimpl<T: Sync + Send, S: ColumnBuilder<T>> ColumnGenerator for MarkovChain<T, S> {\n\n fn generate(&self, length: usize, name: &str, seed: u64) -> Arc<Column> {\n\n let mut rng = seeded_rng(seed);\n\n let mut builder = S::default();\n\n let mut state = rng.gen_range(0, self.elem.len());\n\n let p = self.p_transition.iter()\n\n .map(|p| new_alias_table(p).unwrap())\n\n .collect::<Vec<_>>();\n\n let mut alias_method = AliasMethod::new(rng);\n\n for _ in 0..length {\n\n state = alias_method.random(&p[state]);\n\n builder.push(&self.elem[state]);\n\n }\n\n builder.finalize(name, None)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/ingest/colgen.rs", "rank": 89, "score": 77641.81544091654 }, { "content": "pub trait BooleanOp {\n\n fn evaluate(lhs: &mut [u8], rhs: &[u8]);\n\n fn name() -> &'static str;\n\n fn symbol() -> &'static str;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct BooleanOr;\n\n\n\nimpl BooleanOp for BooleanOr {\n\n fn evaluate(lhs: &mut [u8], rhs: &[u8]) {\n\n for (l, r) in lhs.iter_mut().zip(rhs) {\n\n *l |= r;\n\n }\n\n }\n\n\n\n fn name() -> &'static str { \"bit_vec_or\" }\n\n fn symbol() -> &'static str { \"|\" }\n\n}\n\n\n", "file_path": "src/engine/operators/bool_op.rs", "rank": 90, "score": 76600.13475324928 }, { "content": "#[test]\n\nfn test_and_or() {\n\n test_query(\n\n \"select first_name, last_name from default where ((first_name = 'Adam') OR (first_name = 'Catherine')) AND (num = 3);\",\n\n &[vec![\"Adam\".into(), \"Crawford\".into()]],\n\n )\n\n}\n\n\n", "file_path": "tests/query_tests.rs", "rank": 91, "score": 76072.56618218278 }, { "content": "pub trait CastUsize {\n\n fn cast_usize(&self) -> usize;\n\n}\n\n\n\nimpl CastUsize for u8 {\n\n fn cast_usize(&self) -> usize { *self as usize }\n\n}\n\n\n\nimpl CastUsize for u16 {\n\n fn cast_usize(&self) -> usize { *self as usize }\n\n}\n\n\n\nimpl CastUsize for u32 {\n\n fn cast_usize(&self) -> usize { *self as usize }\n\n}\n\n\n\nimpl CastUsize for i64 {\n\n fn cast_usize(&self) -> usize { *self as usize }\n\n}\n\n\n", "file_path": "src/engine/data_types/vec_data.rs", "rank": 92, "score": 75785.47425438705 }, { "content": "pub trait Comparator<T> {\n\n fn cmp(left: T, right: T) -> bool;\n\n fn cmp_eq(left: T, right: T) -> bool;\n\n fn is_less_than() -> bool;\n\n}\n\n\n\n\n\n#[derive(Debug)]\n\npub struct CmpLessThan;\n\n\n\n// Blanket implementation of Asc for PrimInt doesn't work because of trait coherence rules (upstream could implement `PrimInt` for &str).\n\nimpl Comparator<u8> for CmpLessThan {\n\n fn cmp(left: u8, right: u8) -> bool { left < right }\n\n fn cmp_eq(left: u8, right: u8) -> bool { left <= right }\n\n fn is_less_than() -> bool { true }\n\n}\n\n\n\nimpl Comparator<u16> for CmpLessThan {\n\n fn cmp(left: u16, right: u16) -> bool { left < right }\n\n fn cmp_eq(left: u16, right: u16) -> bool { left <= right }\n", "file_path": "src/engine/operators/comparator.rs", "rank": 93, "score": 75686.8432108879 }, { "content": "pub trait Aggregator<T> {\n\n fn unit() -> T;\n\n fn accumulate(accumulator: T, value: i64) -> T;\n\n fn combine(accumulator1: i64, accumulator2: i64) -> i64;\n\n}\n\n\n", "file_path": "src/engine/operators/aggregate.rs", "rank": 94, "score": 75686.8432108879 }, { "content": "#[test]\n\nfn test_regex() {\n\n test_query(\n\n \"SELECT first_name FROM default WHERE regex(first_name, '^C.+h.a');\",\n\n &[vec![Str(\"Cynthia\")]],\n\n );\n\n}\n\n\n", "file_path": "tests/query_tests.rs", "rank": 95, "score": 75132.23044013878 }, { "content": "#[test]\n\nfn test_sum() {\n\n test_query(\n\n \"select tld, sum(num) from default where (tld = 'name');\",\n\n &[vec![\"name\".into(), 26.into()]],\n\n )\n\n}\n\n\n", "file_path": "tests/query_tests.rs", "rank": 96, "score": 75132.23044013878 }, { "content": "#[test]\n\nfn test_division() {\n\n test_query(\n\n \"select num / 10, count(1) from default;\",\n\n &[vec![0.into(), 100.into()]],\n\n )\n\n}\n\n\n", "file_path": "tests/query_tests.rs", "rank": 97, "score": 75132.23044013878 }, { "content": "#[test]\n\nfn test_sum_2() {\n\n test_query_ec(\n\n \"select non_dense_ints, sum(u8_offset_encoded) from default;\",\n\n &[\n\n vec![0.into(), 756.into()],\n\n vec![1.into(), 689.into()],\n\n vec![2.into(), 1112.into()],\n\n vec![3.into(), 759.into()],\n\n vec![4.into(), 275.into()],\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/query_tests.rs", "rank": 98, "score": 75132.23044013878 }, { "content": "#[test]\n\nfn test_not_regex() {\n\n test_query(\n\n \"SELECT first_name FROM default WHERE not(regex(first_name, '^C.*h.a')) ORDER BY ts LIMIT 1;\",\n\n &[vec![Str(\"Charles\")]],\n\n );\n\n}\n\n\n", "file_path": "tests/query_tests.rs", "rank": 99, "score": 75132.23044013878 } ]
Rust
demo/src/components/physics.rs
aclysma/atelier-legion-demo
658d1f6471cd41d48b13f7fc2db2f5ebabdb9429
use serde::{Deserialize, Serialize}; use serde_diff::SerdeDiff; use type_uuid::TypeUuid; use nphysics2d::object::DefaultBodyHandle; use legion_transaction::SpawnFrom; use crate::math::Vec2; use crate::resources::{PhysicsResource, OpenedPrefabState}; use legion::prelude::*; use std::ops::Range; use legion::storage::ComponentStorage; use skulpin_plugin_imgui::imgui; use imgui_inspect_derive::Inspect; use ncollide2d::shape::ShapeHandle; use ncollide2d::shape::{Ball, Cuboid}; use ncollide2d::pipeline::{CollisionGroups, GeometricQueryType}; use legion::index::ComponentIndex; use legion_transaction::iter_components_in_storage; use crate::components::{ Position2DComponent, UniformScale2DComponent, NonUniformScale2DComponent, Rotation2DComponent, }; use ncollide2d::world::CollisionWorld; #[derive(TypeUuid, Serialize, Deserialize, SerdeDiff, Debug, PartialEq, Clone, Inspect, Default)] #[uuid = "fa518c0a-a65a-44c8-9d35-3f4f336b4de4"] pub struct RigidBodyBallComponentDef { pub radius: f32, pub is_static: bool, } legion_prefab::register_component_type!(RigidBodyBallComponentDef); #[derive(TypeUuid, Serialize, Deserialize, SerdeDiff, Debug, PartialEq, Clone, Inspect, Default)] #[uuid = "36df3006-a5ad-4997-9ccc-0860f49195ad"] pub struct RigidBodyBoxComponentDef { #[serde_diff(opaque)] pub half_extents: Vec2, pub is_static: bool, } legion_prefab::register_component_type!(RigidBodyBoxComponentDef); pub struct RigidBodyComponent { pub handle: DefaultBodyHandle, delete_body_tx: crossbeam_channel::Sender<DefaultBodyHandle>, } impl Drop for RigidBodyComponent { fn drop(&mut self) { self.delete_body_tx.send(self.handle); } } fn transform_shape_to_rigid_body( physics: &mut PhysicsResource, into: &mut std::mem::MaybeUninit<RigidBodyComponent>, src_position: Option<&Position2DComponent>, src_rotation: Option<&Rotation2DComponent>, shape_handle: ShapeHandle<f32>, is_static: bool, ) { let position = if let Some(position) = src_position { position.position } else { Vec2::zero() }; let mut collider_offset = Vec2::zero(); let rigid_body_handle = if is_static { *collider_offset += *position; physics.bodies.insert(nphysics2d::object::Ground::new()) } else { physics.bodies.insert( nphysics2d::object::RigidBodyDesc::new() .translation(position.into()) .build(), ) }; let collider = nphysics2d::object::ColliderDesc::new(shape_handle.clone()) .density(1.0) .translation(collider_offset.into()) .build(nphysics2d::object::BodyPartHandle(rigid_body_handle, 0)); physics.colliders.insert(collider); *into = std::mem::MaybeUninit::new(RigidBodyComponent { handle: rigid_body_handle, delete_body_tx: physics.delete_body_tx().clone(), }) } impl SpawnFrom<RigidBodyBallComponentDef> for RigidBodyComponent { fn spawn_from( _src_world: &World, src_component_storage: &ComponentStorage, src_component_storage_indexes: Range<ComponentIndex>, resources: &Resources, _src_entities: &[Entity], _dst_entities: &[Entity], from: &[RigidBodyBallComponentDef], into: &mut [std::mem::MaybeUninit<Self>], ) { let mut physics = resources.get_mut::<PhysicsResource>().unwrap(); let position_components = iter_components_in_storage::<Position2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let uniform_scale_components = iter_components_in_storage::<UniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let rotation_components = iter_components_in_storage::<Rotation2DComponent>( src_component_storage, src_component_storage_indexes, ); for (src_position, src_uniform_scale, src_rotation, from, into) in izip!( position_components, uniform_scale_components, rotation_components, from, into ) { let mut radius = from.radius; if let Some(src_uniform_scale) = src_uniform_scale { radius *= src_uniform_scale.uniform_scale; } let shape_handle = ShapeHandle::new(Ball::new(radius.max(0.01))); transform_shape_to_rigid_body( &mut physics, into, src_position, src_rotation, shape_handle, from.is_static, ); } } } impl crate::selection::EditorSelectableTransformed<RigidBodyComponent> for RigidBodyBallComponentDef { fn create_editor_selection_world( &self, collision_world: &mut CollisionWorld<f32, Entity>, resources: &Resources, opened_prefab: &OpenedPrefabState, prefab_world: &World, prefab_entity: Entity, transformed_world: &World, transformed_entity: Entity, transformed_component: &RigidBodyComponent, ) { if let Some(position) = prefab_world.get_component::<Position2DComponent>(prefab_entity) { let mut radius = self.radius; if let Some(uniform_scale) = prefab_world.get_component::<UniformScale2DComponent>(prefab_entity) { radius *= uniform_scale.uniform_scale; } let shape_handle = ShapeHandle::new(Ball::new(radius.max(0.01))); collision_world.add( ncollide2d::math::Isometry::new(position.position.into(), 0.0), shape_handle, CollisionGroups::new(), GeometricQueryType::Proximity(0.001), transformed_entity, ); } } } impl SpawnFrom<RigidBodyBoxComponentDef> for RigidBodyComponent { fn spawn_from( _src_world: &World, src_component_storage: &ComponentStorage, src_component_storage_indexes: Range<ComponentIndex>, resources: &Resources, _src_entities: &[Entity], _dst_entities: &[Entity], from: &[RigidBodyBoxComponentDef], into: &mut [std::mem::MaybeUninit<Self>], ) { let mut physics = resources.get_mut::<PhysicsResource>().unwrap(); let position_components = iter_components_in_storage::<Position2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let uniform_scale_components = iter_components_in_storage::<UniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let non_uniform_scale_components = iter_components_in_storage::<NonUniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let rotation_components = iter_components_in_storage::<Rotation2DComponent>( src_component_storage, src_component_storage_indexes, ); for (src_position, src_uniform_scale, src_non_uniform_scale, src_rotation, from, into) in izip!( position_components, uniform_scale_components, non_uniform_scale_components, rotation_components, from, into ) { let mut half_extents = *from.half_extents; if let Some(src_uniform_scale) = src_uniform_scale { half_extents *= glam::Vec2::splat(src_uniform_scale.uniform_scale); } if let Some(src_non_uniform_scale) = src_non_uniform_scale { half_extents *= *src_non_uniform_scale.non_uniform_scale; } let shape_handle = ShapeHandle::new(Cuboid::new(crate::math::vec2_glam_to_glm(half_extents))); transform_shape_to_rigid_body( &mut physics, into, src_position, src_rotation, shape_handle, from.is_static, ); } } } impl crate::selection::EditorSelectableTransformed<RigidBodyComponent> for RigidBodyBoxComponentDef { fn create_editor_selection_world( &self, collision_world: &mut CollisionWorld<f32, Entity>, resources: &Resources, opened_prefab: &OpenedPrefabState, prefab_world: &World, prefab_entity: Entity, transformed_world: &World, transformed_entity: Entity, transformed_component: &RigidBodyComponent, ) { if let Some(position) = prefab_world.get_component::<Position2DComponent>(prefab_entity) { let mut half_extents = *self.half_extents; if let Some(uniform_scale) = prefab_world.get_component::<UniformScale2DComponent>(prefab_entity) { half_extents *= uniform_scale.uniform_scale; } if let Some(non_uniform_scale) = prefab_world.get_component::<NonUniformScale2DComponent>(prefab_entity) { half_extents *= *non_uniform_scale.non_uniform_scale; } let mut rotation = 0.0; if let Some(rotation_component) = prefab_world.get_component::<Rotation2DComponent>(prefab_entity) { rotation = rotation_component.rotation; } let shape_handle = ShapeHandle::new(Cuboid::new(crate::math::vec2_glam_to_glm(half_extents))); collision_world.add( ncollide2d::math::Isometry::new(position.position.into(), rotation), shape_handle, CollisionGroups::new(), GeometricQueryType::Proximity(0.001), transformed_entity, ); } } }
use serde::{Deserialize, Serialize}; use serde_diff::SerdeDiff; use type_uuid::TypeUuid; use nphysics2d::object::DefaultBodyHandle; use legion_transaction::SpawnFrom; use crate::math::Vec2; use crate::resources::{PhysicsResource, OpenedPrefabState}; use legion::prelude::*; use std::ops::Range; use legion::storage::ComponentStorage; use skulpin_plugin_imgui::imgui; use imgui_inspect_derive::Inspect; use ncollide2d::shape::ShapeHandle; use ncollide2d::shape::{Ball, Cuboid}; use ncollide2d::pipeline::{CollisionGroups, GeometricQueryType}; use legion::index::ComponentIndex; use legion_transaction::iter_components_in_storage; use crate::components::{ Position2DComponent, UniformScale2DComponent, NonUniformScale2DComponent, Rotation2DComponent, }; use ncollide2d::world::CollisionWorld; #[derive(TypeUuid, Serialize, Deserialize, SerdeDiff, Debug, PartialEq, Clone, Inspect, Default)] #[uuid = "fa518c0a-a65a-44c8-9d35-3f4f336b4de4"] pub struct RigidBodyBallComponentDef { pub radius: f32, pub is_static: bool, } legion_prefab::register_component_type!(RigidBodyBallComponentDef); #[derive(TypeUuid, Serialize, Deserialize, SerdeDiff, Debug, PartialEq, Clone, Inspect, Default)] #[uuid = "36df3006-a5ad-4997-9ccc-0860f49195ad"] pub struct RigidBodyBoxComponentDef { #[serde_diff(opaque)] pub half_extents: Vec2, pub is_static: bool, } legion_prefab::register_component_type!(RigidBodyBoxComponentDef); pub struct RigidBodyComponent { pub handle: DefaultBodyHandle, delete_body_tx: crossbeam_channel::Sender<DefaultBodyHandle>, } impl Drop for RigidBodyComponent { fn drop(&mut self) { self.delete_body_tx.send(self.handle); } } fn transform_shape_to_rigid_body( physics: &mut PhysicsResource, into: &mut std::mem::MaybeUninit<RigidBodyComponent>, src_position: Option<&Position2DComponent>, src_rotation: Option<&Rotation2DComponent>, shape_handle: ShapeHandle<f32>, is_static: bool, ) { let position = if let Some(position) = src_position { position.position } else { Vec2::zero() }; let mut collider_offset = Vec2::zero(); let rigid_body_handle = if is_static { *collider_offset += *position; physics.bodies.insert(nphysics2d::object::Ground::new()) } else { physics.bodies.insert( nphysics2d::object::RigidBodyDesc::new() .translation(position.into()) .build(), ) }; let collider = nphysics2d::object::ColliderDesc::new(shape_handle.clone()) .density(1.0) .translation(collider_offset.into()) .build(nphysics2d::object::BodyPartHandle(rigid_body_handle, 0)); physics.colliders.insert(collider); *into = std::mem::MaybeUninit::new(RigidBodyComponent { handle: rigid_body_handle, delete_body_tx: physics.delete_body_tx().clone(), }) } impl SpawnFrom<RigidBodyBallComponentDef> for RigidBodyComponent { fn spawn_from( _src_world: &World, src_component_storage: &ComponentStorage, src_component_storage_indexes: Range<ComponentIndex>, resources: &Resources, _src_entities: &[Entity], _dst_entities: &[Entity], from: &[RigidBodyBallComponentDef], into: &mut [std::mem::MaybeUninit<Self>], ) { let mut physics = resources.get_mut::<PhysicsResource>().unwrap(); let position_components = iter_components_in_storage::<Position2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let uniform_scale_components = iter_components_in_storage::<UniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let rotation_components = iter_components_in_storage::<Rotation2DComponent>( src_component_storage, src_component_storage_indexes, ); for (src_position, src_uniform_scale, src_rotation, from, into) in izip!( position_components, uniform_scale_components, rotation_components, from, into ) { let mut radius = from.radius; if let Some(src_uniform_scale) = src_uniform_scale { radius *= src_uniform_scale.uniform_scale; } let shape_handle = ShapeHandle::new(Ball::new(radius.max(0.01))); transform_shape_to_rigid_body( &mut physics, into, src_position, src_rotation, shape_handle, from.is_static, ); } } } impl crate::selection::EditorSelectableTransformed<RigidBodyComponent> for RigidBodyBallComponentDef { fn create_editor_selection_world( &self, collision_world: &mut CollisionWorld<f32, Entity>, resources: &Resources, opened_prefab: &OpenedPrefabState, prefab_world: &World, prefab_entity: Entity, transformed_world: &World, transformed_entity: Entity, transformed_component: &RigidBodyComponent, ) { if let Some(position) = prefab_world.get_component::<Position2DComponent>(prefab_entity) { let mut radius = self.radius; if let Some(uniform_scale) = prefab_world.get_component::<UniformScale2DComponent>(prefab_entity) { radius *= uniform_scale.uniform_scale; } let shape_handle = ShapeHandle::new(Ball::new(radius.max(0.01))); collision_world.add( ncollide2d::math::Isometry::new(position.position.into(), 0.0), shape_handle, CollisionGroups::new(), GeometricQueryType::Proximity(0.001), transformed_entity, ); } } } impl SpawnFrom<RigidBodyBoxComponentDef> for RigidBodyComponent { fn spawn_from( _src_world: &World, src_component_storage: &ComponentStorage, src_component_storage_indexes: Range<ComponentIndex>, resources: &Resources, _src_entities: &[Entity], _dst_entities: &[Entity], from: &[RigidBodyBoxComponentDef], into: &mut [std::mem::MaybeUninit<Self>], ) { let mut physics = resources.get_mut::<PhysicsResource>().unwrap(); let position_components = iter_components_in_storage::<Position2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let uniform_scale_components = iter_components_in_storage::<UniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let non_uniform_scale_components = iter_components_in_storage::<NonUniformScale2DComponent>( src_component_storage, src_component_storage_indexes.clone(), ); let rotation_components = iter_components_in_storage::<Rotation2DComponent>( src_component_storage, src_component_storage_indexes, ); for (src_position, src_uniform_scale, src_non_uniform_scale, src_rotation, from, into) in izip!( position_components, uniform_scale_components, non_uniform_scale_components, rotation_components, from, into ) { let mut half_extents = *from.half_extents; if let Some(src_uniform_scale) = src_uniform_scale { half_extents *= glam::Vec2::splat(src_uniform_scale.uniform_scale); } if let Some(src_non_uniform_scale) = src_non_uniform_scale { half_extents *= *src_non_uniform_scale.non_uniform_scale; } let shape_handle = ShapeHandle::new(Cuboid::new(crate::math::vec2_glam_to_glm(half_extents))); transform_shape_to_rigid_body( &mut physics, into, src_position, src_rotation, shape_handle, from.is_static, ); } } } impl crate::selection::EditorSelectableTransformed<RigidBodyComponent> for RigidBodyBoxComponentDef { fn create_editor_selection_world( &self, collision_world: &mut CollisionWorld<f32, Entity>, resources: &Resources, opened_prefab: &OpenedPrefabState, prefab_world: &World, prefab_entity: Entity, transformed_world: &World, transformed_entity: Entity, transformed_component: &RigidBodyComponent, ) { if let Some(position) = prefab_world.get_component::<Position2DComponent>(prefab_entity) { let mut half_extents = *self.half_extents; if let Some(uniform_scale) = prefab_world.get_component::<UniformScale2DComponent>(prefab_entity) { half_extents *= uniform_scale.uniform_scale; }
}
if let Some(non_uniform_scale) = prefab_world.get_component::<NonUniformScale2DComponent>(prefab_entity) { half_extents *= *non_uniform_scale.non_uniform_scale; } let mut rotation = 0.0; if let Some(rotation_component) = prefab_world.get_component::<Rotation2DComponent>(prefab_entity) { rotation = rotation_component.rotation; } let shape_handle = ShapeHandle::new(Cuboid::new(crate::math::vec2_glam_to_glm(half_extents))); collision_world.add( ncollide2d::math::Isometry::new(position.position.into(), rotation), shape_handle, CollisionGroups::new(), GeometricQueryType::Proximity(0.001), transformed_entity, ); } }
function_block-function_prefix_line
[ { "content": "pub fn winit_position_to_glam(position: PhysicalPosition<f64>) -> glam::Vec2 {\n\n glam::Vec2::new(position.x as f32, position.y as f32)\n\n}\n\n\n\n#[derive(Copy, Clone, Serialize, Deserialize, Debug, PartialEq, Default)]\n\n#[repr(transparent)]\n\n#[serde(transparent)]\n\npub struct Vec2 {\n\n value: glam::Vec2,\n\n}\n\n\n\nimpl Vec2 {\n\n pub fn zero() -> Self {\n\n Vec2 {\n\n value: glam::Vec2::zero(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<glam::Vec2> for Vec2 {\n", "file_path": "demo/src/math.rs", "rank": 0, "score": 211573.0934757984 }, { "content": "pub fn create_spawn_clone_impl_handler_set(\n\n) -> SpawnCloneImplHandlerSet {\n\n let mut handler_set = SpawnCloneImplHandlerSet::new();\n\n handler_set.add_mapping_into::<DrawSkiaCircleComponentDef, DrawSkiaCircleComponent>();\n\n handler_set.add_mapping_into::<DrawSkiaBoxComponentDef, DrawSkiaBoxComponent>();\n\n handler_set.add_mapping::<RigidBodyBallComponentDef, RigidBodyComponent>();\n\n handler_set.add_mapping::<RigidBodyBoxComponentDef, RigidBodyComponent>();\n\n handler_set\n\n}\n\n\n", "file_path": "demo/src/lib.rs", "rank": 1, "score": 169029.81429225364 }, { "content": "pub fn create_spawn_clone_impl<'a, 'b, 'c>(\n\n handler_set: &'a SpawnCloneImplHandlerSet,\n\n component_registry: &'b HashMap<ComponentTypeId, ComponentRegistration>,\n\n resources: &'c Resources\n\n) -> SpawnCloneImpl<'a, 'b, 'c> {\n\n SpawnCloneImpl::new(handler_set, component_registry, resources)\n\n}\n\n\n", "file_path": "demo/src/lib.rs", "rank": 2, "score": 168027.44579415 }, { "content": "pub fn vec2_glm_to_glam(value: glm::Vec2) -> glam::Vec2 {\n\n glam::Vec2::new(value.x, value.y)\n\n}\n\n\n", "file_path": "demo/src/math.rs", "rank": 3, "score": 162452.80717130902 }, { "content": "pub fn vec2_glam_to_glm(value: glam::Vec2) -> glm::Vec2 {\n\n glm::Vec2::new(value.x(), value.y())\n\n}\n\n\n", "file_path": "demo/src/math.rs", "rank": 4, "score": 162452.80717130902 }, { "content": "fn sign_aware_magnitude(v: glam::Vec2) -> f32 {\n\n let mut total = 0.0;\n\n total += if v.x() > 0.0 {\n\n v.x() * v.x()\n\n } else {\n\n v.x() * v.x() * -1.0\n\n };\n\n\n\n total += if v.y() > 0.0 {\n\n v.y() * v.y()\n\n } else {\n\n v.y() * v.y() * -1.0\n\n };\n\n\n\n if total >= 0.0 {\n\n total.sqrt()\n\n } else {\n\n (total * -1.0).sqrt() * -1.0\n\n }\n\n}\n\n\n", "file_path": "demo/src/systems/editor_systems/gizmos.rs", "rank": 5, "score": 149607.97264084104 }, { "content": "pub fn read_from_physics() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"read physics data\")\n\n .read_resource::<PhysicsResource>()\n\n .with_query(<(Write<Position2DComponent>, Read<RigidBodyComponent>)>::query())\n\n .build(|_, mut world, physics, query| {\n\n for (mut pos, body) in query.iter_mut(&mut world) {\n\n if let Some(rigid_body) = physics.bodies.rigid_body(body.handle) {\n\n pos.position = rigid_body.position().translation.vector.into()\n\n }\n\n }\n\n })\n\n}\n", "file_path": "demo/src/systems/physics_systems.rs", "rank": 6, "score": 136303.62969842923 }, { "content": "pub fn update_physics() -> Box<dyn Schedulable> {\n\n // Do a physics simulation timestep\n\n SystemBuilder::new(\"update physics\")\n\n .write_resource::<PhysicsResource>()\n\n .read_resource::<TimeResource>()\n\n .build(|_, _, (physics, time), _| {\n\n if time.is_simulation_paused() {\n\n physics.maintain()\n\n } else {\n\n physics.step();\n\n }\n\n })\n\n}\n\n\n", "file_path": "demo/src/systems/physics_systems.rs", "rank": 7, "score": 136303.62969842923 }, { "content": "pub fn editor_refresh_selection_world(\n\n world: &mut World,\n\n resources: &mut Resources,\n\n) {\n\n let mut selection_world =\n\n EditorSelectionResource::create_editor_selection_world(resources, world);\n\n selection_world.update();\n\n resources\n\n .get_mut::<EditorSelectionResource>()\n\n .unwrap()\n\n .set_editor_selection_world(selection_world);\n\n}\n\n\n", "file_path": "demo/src/systems/editor_systems/mod.rs", "rank": 8, "score": 134682.5688161808 }, { "content": "/// Create the asset manager that has all the required types registered\n\npub fn create_asset_manager() -> AssetResource {\n\n let mut asset_manager = AssetResource::default();\n\n asset_manager.add_storage::<PrefabAsset>();\n\n asset_manager\n\n}\n\n\n", "file_path": "demo/src/lib.rs", "rank": 9, "score": 134187.1246584907 }, { "content": "pub fn create_editor_inspector_registry() -> EditorInspectRegistry {\n\n let mut registry = EditorInspectRegistry::default();\n\n registry.register::<DrawSkiaCircleComponentDef>();\n\n registry.register::<DrawSkiaBoxComponentDef>();\n\n registry.register::<Position2DComponent>();\n\n registry.register::<UniformScale2DComponent>();\n\n registry.register::<NonUniformScale2DComponent>();\n\n registry.register::<Rotation2DComponent>();\n\n registry.register::<RigidBodyBallComponentDef>();\n\n registry.register::<RigidBodyBoxComponentDef>();\n\n registry\n\n}\n\n\n\npub struct DemoApp {\n\n update_schedules: HashMap<ScheduleCriteria, Schedule>,\n\n draw_schedules: HashMap<ScheduleCriteria, Schedule>,\n\n}\n\n\n\nimpl DemoApp {\n\n #[allow(clippy::new_without_default)]\n", "file_path": "demo/src/lib.rs", "rank": 10, "score": 131478.19149638433 }, { "content": "pub fn create_component_registry_by_uuid() -> HashMap<ComponentTypeUuid, ComponentRegistration> {\n\n let comp_registrations = legion_prefab::iter_component_registrations();\n\n use std::iter::FromIterator;\n\n let component_types: HashMap<ComponentTypeUuid, ComponentRegistration> =\n\n HashMap::from_iter(comp_registrations.map(|reg| (reg.uuid().clone(), reg.clone())));\n\n\n\n component_types\n\n}\n\n\n", "file_path": "demo/src/lib.rs", "rank": 11, "score": 128320.32266158232 }, { "content": "pub fn editor_entity_list_window() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"editor_entity_list_window\")\n\n .write_resource::<ImguiResource>()\n\n .write_resource::<EditorStateResource>()\n\n .write_resource::<EditorSelectionResource>()\n\n .read_resource::<InputResource>()\n\n .read_resource::<UniverseResource>()\n\n .with_query(<(TryRead<()>)>::query())\n\n .build(\n\n |_,\n\n world,\n\n (imgui_manager, editor_ui_state, editor_selection, input, universe_resource),\n\n all_query| {\n\n imgui_manager.with_ui(|ui: &mut imgui::Ui| {\n\n use imgui::im_str;\n\n\n\n let window_options = editor_ui_state.window_options();\n\n\n\n if window_options.show_entity_list {\n\n imgui::Window::new(im_str!(\"Entity List\"))\n", "file_path": "demo/src/systems/editor_systems/entity_list_window.rs", "rank": 12, "score": 126376.08559455108 }, { "content": "pub fn editor_handle_selection() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"editor_input\")\n\n .write_resource::<EditorStateResource>()\n\n .read_resource::<InputResource>()\n\n .read_resource::<ViewportResource>()\n\n .write_resource::<EditorSelectionResource>()\n\n .write_resource::<DebugDrawResource>()\n\n .write_resource::<EditorDrawResource>()\n\n .read_resource::<UniverseResource>()\n\n .with_query(<(Read<Position2DComponent>)>::query())\n\n .build(\n\n |command_buffer,\n\n subworld,\n\n (\n\n editor_state,\n\n input_state,\n\n viewport,\n\n editor_selection,\n\n debug_draw,\n\n editor_draw,\n", "file_path": "demo/src/systems/editor_systems/selection.rs", "rank": 13, "score": 122034.74815180551 }, { "content": "// Call this to process input state\n\npub fn update_input_resource() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"input end frame\")\n\n .write_resource::<InputResource>()\n\n .read_resource::<ViewportResource>()\n\n .build(|_, _, (input, viewport), _| {\n\n input.update(&*viewport);\n\n })\n\n}\n\n\n", "file_path": "demo/src/systems/input_systems.rs", "rank": 14, "score": 122029.71741115747 }, { "content": "// this is a virtual coordinate system where h = 600 and w = 600 * aspect_ratio where\n\n// aspect_ratio is window_width / window_height\n\n// top-left: (-w/2, -h/2)\n\n// bottom-right: (w/2, h/2)\n\nfn calculate_world_space_matrix(\n\n logical_size: PhysicalSize<u32>,\n\n position: glam::Vec3,\n\n view_half_extents: glam::Vec2,\n\n) -> glam::Mat4 {\n\n let view = glam::Mat4::look_at_rh(\n\n glam::Vec3::from([0.0, 0.0, 5.0]),\n\n glam::Vec3::from([0.0, 0.0, 0.0]),\n\n glam::Vec3::from([0.0, 1.0, 0.0]),\n\n );\n\n\n\n let projection = glam::Mat4::orthographic_rh(\n\n position.x() - view_half_extents.x(),\n\n position.x() + view_half_extents.x(),\n\n position.y() + view_half_extents.y(),\n\n position.y() - view_half_extents.y(),\n\n -100.0,\n\n 100.0,\n\n );\n\n\n", "file_path": "demo/src/resources/viewport.rs", "rank": 15, "score": 119503.71003490241 }, { "content": "pub fn run() {\n\n init_modules();\n\n\n\n log::info!(\n\n \"registered importers for {}\",\n\n atelier_importer::get_source_importers()\n\n .map(|(ext, _)| ext)\n\n .collect::<Vec<_>>()\n\n .join(\", \")\n\n );\n\n\n\n let opt = AssetDaemonOpt::from_args();\n\n\n\n AssetDaemon::default()\n\n .with_importers(atelier_importer::get_source_importers())\n\n .with_db_path(opt.db_dir)\n\n .with_address(opt.address)\n\n .with_asset_dirs(opt.asset_dirs)\n\n .run();\n\n}\n", "file_path": "demo/src/daemon.rs", "rank": 16, "score": 114788.96627161751 }, { "content": "pub fn cook_prefab(\n\n universe: &Universe,\n\n asset_manager: &mut AssetResource,\n\n registered_components: &HashMap<ComponentTypeId, ComponentRegistration>,\n\n registered_components_by_uuid: &HashMap<ComponentTypeUuid, ComponentRegistration>,\n\n prefab_uuid: AssetUuid,\n\n) -> CookedPrefab {\n\n let resources = Resources::default();\n\n\n\n // Create the clone_merge impl. For prefab cooking, we will clone everything so we don't need to\n\n // set up any transformations\n\n let clone_merge_impl = CopyCloneImpl::new(registered_components);\n\n\n\n // This will allow us to look up prefabs by AssetUuid\n\n let mut prefab_lookup = HashMap::new();\n\n\n\n // This will allow us to look up the cooked entity ID by the entity's original UUID\n\n let mut entity_lookup = HashMap::new();\n\n\n\n // This will hold the asset IDs sorted with dependencies first. This ensures that\n", "file_path": "demo/src/prefab_cooking.rs", "rank": 17, "score": 110629.86966478647 }, { "content": "pub fn editor_process_edit_diffs(\n\n world: &mut World,\n\n resources: &mut Resources,\n\n) {\n\n EditorStateResource::process_diffs(world, resources);\n\n}\n\n\n", "file_path": "demo/src/systems/editor_systems/mod.rs", "rank": 18, "score": 103603.63167828656 }, { "content": "pub fn editor_process_editor_ops(\n\n world: &mut World,\n\n resources: &mut Resources,\n\n) {\n\n EditorStateResource::process_editor_ops(world, resources);\n\n}\n\n\n", "file_path": "demo/src/systems/editor_systems/mod.rs", "rank": 19, "score": 103603.63167828656 }, { "content": "pub fn editor_process_selection_ops(\n\n world: &mut World,\n\n resources: &mut Resources,\n\n) {\n\n let mut editor_selection = resources.get_mut::<EditorSelectionResource>().unwrap();\n\n let mut editor_state = resources.get_mut::<EditorStateResource>().unwrap();\n\n let universe = resources.get_mut::<UniverseResource>().unwrap();\n\n\n\n editor_selection.process_selection_ops(&mut *editor_state, &*universe, world);\n\n}\n\n\n", "file_path": "demo/src/systems/editor_systems/mod.rs", "rank": 20, "score": 103603.63167828656 }, { "content": "pub fn editor_inspector_window(\n\n world: &mut World,\n\n resources: &mut Resources,\n\n) {\n\n {\n\n let mut selection_world = resources.get::<EditorSelectionResource>().unwrap();\n\n\n\n let mut imgui_manager = resources.get::<ImguiResource>().unwrap();\n\n\n\n let mut editor_ui_state = resources.get_mut::<EditorStateResource>().unwrap();\n\n\n\n let mut universe_resource = resources.get::<UniverseResource>().unwrap();\n\n\n\n let opened_prefab = editor_ui_state.opened_prefab();\n\n if opened_prefab.is_none() {\n\n return;\n\n }\n\n\n\n let opened_prefab = opened_prefab.unwrap();\n\n\n", "file_path": "demo/src/systems/editor_systems/inspector_window.rs", "rank": 21, "score": 103603.63167828656 }, { "content": "pub fn reload_editor_state_if_file_changed(\n\n world: &mut World,\n\n resources: &mut Resources,\n\n) {\n\n EditorStateResource::hot_reload_if_asset_changed(world, resources);\n\n}\n\n\n", "file_path": "demo/src/systems/editor_systems/mod.rs", "rank": 22, "score": 102068.68808253041 }, { "content": "struct Line {\n\n p0: glam::Vec2,\n\n p1: glam::Vec2,\n\n}\n\n\n", "file_path": "demo/src/resources/editor_resources/editor_draw.rs", "rank": 23, "score": 101551.16234930996 }, { "content": "pub fn create_editor_selection_registry() -> EditorSelectableRegistry {\n\n let mut registry = EditorSelectableRegistry::default();\n\n registry.register::<DrawSkiaBoxComponent>();\n\n registry.register::<DrawSkiaCircleComponent>();\n\n registry.register_transformed::<RigidBodyBoxComponentDef, RigidBodyComponent>();\n\n registry.register_transformed::<RigidBodyBallComponentDef, RigidBodyComponent>();\n\n registry\n\n}\n\n\n", "file_path": "demo/src/lib.rs", "rank": 24, "score": 100518.15713410851 }, { "content": "struct CircleOutline {\n\n center: glam::Vec2,\n\n radius: f32,\n\n}\n\n\n", "file_path": "demo/src/resources/editor_resources/editor_draw.rs", "rank": 25, "score": 99923.29766925862 }, { "content": "struct ShapeWithId {\n\n id: String,\n\n shape: Shape,\n\n}\n\n\n\nimpl ShapeWithId {\n\n fn new_line(\n\n id: String,\n\n p0: glam::Vec2,\n\n p1: glam::Vec2,\n\n ) -> Self {\n\n ShapeWithId {\n\n id,\n\n shape: Shape::Line(Line { p0, p1 }),\n\n }\n\n }\n\n\n\n fn new_circle_outline(\n\n id: String,\n\n center: glam::Vec2,\n\n radius: f32,\n\n ) -> Self {\n\n ShapeWithId {\n\n id,\n\n shape: Shape::CircleOutline(CircleOutline { center, radius }),\n\n }\n\n }\n\n}\n\n\n", "file_path": "demo/src/resources/editor_resources/editor_draw.rs", "rank": 26, "score": 99923.29766925862 }, { "content": "/// Contains the data required to identify the current transaction by ID and commit or cancel the\n\n/// transaction\n\nstruct CurrentTransactionInfo {\n\n /// The ID of the transaction that is currently in progress\n\n id: EditorTransactionId,\n\n\n\n /// The diffs required to commit or cancel the transaction (apply vs. revert)\n\n diffs: TransactionDiffs,\n\n}\n\n\n\npub struct EditorStateResource {\n\n // Indicates the overall state of the editor (i.e. editing vs. playing)\n\n editor_mode: EditorMode,\n\n\n\n // Runtime state for editing UI\n\n window_options_running: WindowOptions,\n\n window_options_editing: WindowOptions,\n\n active_editor_tool: EditorTool,\n\n pub add_component_search_text: ImString,\n\n\n\n // If a prefab is opened, this holds the state associated with editing it\n\n opened_prefab: Option<Arc<OpenedPrefabState>>,\n", "file_path": "demo/src/resources/editor_resources/editor_state.rs", "rank": 27, "score": 98368.92404576832 }, { "content": "pub fn draw() -> Box<dyn Schedulable> {\n\n // Copy the data from physics rigid bodies into position components\n\n SystemBuilder::new(\"draw\")\n\n .write_resource::<CanvasDrawResource>()\n\n .read_resource::<FpsTextResource>()\n\n .write_resource::<CameraResource>()\n\n .write_resource::<ViewportResource>()\n\n .read_resource::<InputResource>()\n\n .write_resource::<DebugDrawResource>()\n\n .with_query(<(\n\n Read<Position2DComponent>,\n\n Read<DrawSkiaBoxComponent>,\n\n TryRead<UniformScale2DComponent>,\n\n TryRead<NonUniformScale2DComponent>,\n\n TryRead<Rotation2DComponent>,\n\n )>::query())\n\n .with_query(<(\n\n Read<Position2DComponent>,\n\n Read<DrawSkiaCircleComponent>,\n\n TryRead<UniformScale2DComponent>,\n", "file_path": "demo/src/systems/draw_systems.rs", "rank": 28, "score": 97130.80580613541 }, { "content": "struct ClosestShapeIndexDistance {\n\n index: usize,\n\n distance_sq: f32,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct EditorShapeClickedState {\n\n pub click_position: glam::Vec2,\n\n pub shape_id: String,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct EditorShapeDragState {\n\n pub begin_position: glam::Vec2,\n\n pub end_position: glam::Vec2,\n\n pub previous_frame_delta: glam::Vec2,\n\n pub accumulated_frame_delta: glam::Vec2,\n\n pub world_space_begin_position: glam::Vec2,\n\n pub world_space_end_position: glam::Vec2,\n\n pub world_space_previous_frame_delta: glam::Vec2,\n", "file_path": "demo/src/resources/editor_resources/editor_draw.rs", "rank": 29, "score": 96882.98673921038 }, { "content": "/// Diffs that are pending being applied\n\nstruct TransactionDiffsPendingApply {\n\n /// The diffs required to apply/revert the transaction\n\n diffs: TransactionDiffs,\n\n\n\n /// If true, an undo step will be recorded\n\n commit_changes: bool,\n\n\n\n /// How to handle selection after the transaction commits\n\n post_commit_selection: PostCommitSelection,\n\n}\n\n\n", "file_path": "demo/src/resources/editor_resources/editor_state.rs", "rank": 30, "score": 96882.98673921038 }, { "content": "struct ClosestShapeIdDistance {\n\n id: String,\n\n distance_sq: f32,\n\n}\n\n\n", "file_path": "demo/src/resources/editor_resources/editor_draw.rs", "rank": 31, "score": 96882.98673921038 }, { "content": "// this is based on window size (i.e. pixels)\n\n// bottom-left: (0, 0)\n\n// top-right: (window_width_in_pixels, window_height_in_pixels)\n\nfn calculate_ui_space_matrix(logical_size: PhysicalSize<u32>) -> glam::Mat4 {\n\n let view = glam::Mat4::look_at_rh(\n\n glam::Vec3::from([0.0, 0.0, 5.0]),\n\n glam::Vec3::from([0.0, 0.0, 0.0]),\n\n glam::Vec3::from([0.0, 1.0, 0.0]),\n\n );\n\n\n\n let projection = glam::Mat4::orthographic_rh(\n\n 0.0,\n\n logical_size.width as f32,\n\n 0.0,\n\n logical_size.height as f32,\n\n -100.0,\n\n 100.0,\n\n );\n\n\n\n projection * view\n\n}\n\n\n", "file_path": "demo/src/resources/viewport.rs", "rank": 32, "score": 96753.19656737357 }, { "content": "pub fn advance_time() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"advance_time\")\n\n .write_resource::<TimeResource>()\n\n .build(|_, _, time_resource, _| {\n\n time_resource.process_time_ops();\n\n time_resource.advance_time();\n\n\n\n let now = time_resource.time_state.current_instant();\n\n if time_resource\n\n .print_fps_event\n\n .try_take_event(now, std::time::Duration::from_secs(1))\n\n {\n\n log::debug!(\"fps: {}\", time_resource.time_state.updates_per_second());\n\n }\n\n })\n\n}\n", "file_path": "demo/src/systems/time_systems.rs", "rank": 33, "score": 95514.28292417839 }, { "content": "#[derive(Default)]\n\nstruct RegisteredEditorInspector<T> {\n\n phantom_data: PhantomData<T>,\n\n}\n\n\n\nimpl<T> RegisteredEditorInspector<T>\n\nwhere\n\n T: InspectRenderStruct<T>,\n\n{\n\n fn new() -> Self {\n\n RegisteredEditorInspector {\n\n phantom_data: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> RegisteredEditorInspectorT for RegisteredEditorInspector<T>\n\nwhere\n\n T: InspectRenderStruct<T> + legion::storage::Component,\n\n{\n\n fn render(\n", "file_path": "demo/src/inspect.rs", "rank": 34, "score": 94722.25826534307 }, { "content": "pub fn editor_keybinds() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"editor_input\")\n\n .write_resource::<EditorStateResource>()\n\n .read_resource::<InputResource>()\n\n .read_resource::<ViewportResource>()\n\n .write_resource::<EditorSelectionResource>()\n\n .write_resource::<DebugDrawResource>()\n\n .write_resource::<EditorDrawResource>()\n\n .read_resource::<UniverseResource>()\n\n .with_query(<(Read<Position2DComponent>)>::query())\n\n .build(\n\n |command_buffer,\n\n subworld,\n\n (\n\n editor_state,\n\n input_state,\n\n viewport,\n\n editor_selection,\n\n debug_draw,\n\n editor_draw,\n", "file_path": "demo/src/systems/editor_systems/mod.rs", "rank": 35, "score": 93979.33932842224 }, { "content": "pub fn create_update_schedule(criteria: &ScheduleCriteria) -> Schedule {\n\n ScheduleBuilder::new(criteria)\n\n .always(update_input_resource)\n\n .always(advance_time)\n\n .always(quit_if_escape_pressed)\n\n .always(update_asset_manager)\n\n .always(update_fps_text)\n\n .always(update_physics)\n\n .simulation_unpaused_only(read_from_physics)\n\n // --- Editor stuff here ---\n\n // Prepare to handle editor input\n\n .always_thread_local(editor_refresh_selection_world)\n\n // Editor input\n\n .always_thread_local(reload_editor_state_if_file_changed)\n\n .always(editor_keybinds)\n\n .always(editor_mouse_input)\n\n .always(editor_update_editor_draw)\n\n .always(editor_gizmos)\n\n .always(editor_handle_selection)\n\n .always(editor_imgui_menu)\n", "file_path": "demo/src/systems/mod.rs", "rank": 36, "score": 93979.33932842224 }, { "content": "pub fn editor_gizmos() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"editor_input\")\n\n .write_resource::<EditorStateResource>()\n\n .read_resource::<InputResource>()\n\n .read_resource::<ViewportResource>()\n\n .write_resource::<EditorSelectionResource>()\n\n .write_resource::<DebugDrawResource>()\n\n .write_resource::<EditorDrawResource>()\n\n .read_resource::<UniverseResource>()\n\n .with_query(<(Read<Position2DComponent>)>::query())\n\n .with_query(<(\n\n Read<Position2DComponent>,\n\n TryRead<UniformScale2DComponent>,\n\n TryRead<NonUniformScale2DComponent>,\n\n )>::query())\n\n .with_query(<(Read<Position2DComponent>, Read<Rotation2DComponent>)>::query())\n\n .build(\n\n |command_buffer,\n\n subworld,\n\n (\n", "file_path": "demo/src/systems/editor_systems/gizmos.rs", "rank": 37, "score": 93979.33932842224 }, { "content": "pub fn create_draw_schedule(criteria: &ScheduleCriteria) -> Schedule {\n\n ScheduleBuilder::new(criteria).always(draw).build()\n\n}\n", "file_path": "demo/src/systems/mod.rs", "rank": 38, "score": 93979.33932842224 }, { "content": "// Call this to mark the start of the next frame (i.e. \"key just down\" will return false). This goes\n\n// at the end of the frame, winit will fire events after we exit the frame, and then\n\n// update_input_resource will be called at the start of the next frame\n\npub fn input_reset_for_next_frame() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"input end frame\")\n\n .write_resource::<InputResource>()\n\n .build(|_, _, input, _| {\n\n input.end_frame();\n\n })\n\n}\n", "file_path": "demo/src/systems/input_systems.rs", "rank": 39, "score": 92523.06459929995 }, { "content": "pub fn editor_mouse_input() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"editor_input\")\n\n .read_resource::<InputResource>()\n\n .write_resource::<CameraResource>()\n\n .read_resource::<ViewportResource>()\n\n .build(\n\n |command_buffer, subworld, (input_state, camera_resource, viewport_resource), _| {\n\n // Right click drag pans the viewport\n\n if let Some(mouse_drag) = input_state.mouse_drag_in_progress(MouseButton::Right) {\n\n let mut delta = mouse_drag.world_scale_previous_frame_delta;\n\n delta *= glam::Vec2::new(-1.0, -1.0);\n\n camera_resource.position += delta;\n\n }\n\n\n\n // Right click drag pans the viewport\n\n let mouse_scroll = input_state.mouse_wheel_delta();\n\n let mut delta = match mouse_scroll {\n\n MouseScrollDelta::LineDelta(_, y) => y,\n\n MouseScrollDelta::PixelDelta(delta) => delta.y as f32,\n\n };\n\n\n\n let delta = 1.05_f32.powf(-delta);\n\n camera_resource.x_half_extents *= delta;\n\n },\n\n )\n\n}\n\n\n", "file_path": "demo/src/systems/editor_systems/mod.rs", "rank": 40, "score": 92519.9515502968 }, { "content": "pub fn draw_selection_shapes() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"draw_selection_shapes\")\n\n .write_resource::<EditorSelectionResource>()\n\n .write_resource::<DebugDrawResource>()\n\n .build(|_, _, (editor_selection, debug_draw), _| {\n\n let aabbs = editor_selection.selected_entity_aabbs();\n\n\n\n for (_, aabb) in aabbs {\n\n if let Some(aabb) = aabb {\n\n let color = glam::vec4(1.0, 1.0, 0.0, 1.0);\n\n\n\n // An amount to expand the AABB by so that we don't draw on top of the shape.\n\n // Found in actual usage this ended up being annoying.\n\n let expand = glam::vec2(0.0, 0.0);\n\n\n\n debug_draw.add_rect(\n\n glam::vec2(aabb.mins().x, aabb.mins().y) - expand,\n\n glam::vec2(aabb.maxs().x, aabb.maxs().y) + expand,\n\n color,\n\n );\n\n }\n\n }\n\n })\n\n}\n", "file_path": "demo/src/systems/editor_systems/selection.rs", "rank": 41, "score": 92519.9515502968 }, { "content": "pub fn update_asset_manager() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"update asset manager\")\n\n .write_resource::<AssetResource>()\n\n .build(|_, _, asset_manager, _| {\n\n asset_manager.update();\n\n })\n\n}\n", "file_path": "demo/src/systems/asset_manager_systems.rs", "rank": 42, "score": 92519.9515502968 }, { "content": "pub fn update_fps_text() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"update fps text\")\n\n .read_resource::<TimeResource>()\n\n .write_resource::<FpsTextResource>()\n\n .build(|_, _, (time_resource, fps_text), _| {\n\n let now = time_resource.time_state.current_instant();\n\n //\n\n // Update FPS once a second\n\n //\n\n let update_text_string = match fps_text.last_fps_text_change {\n\n Some(last_update_instant) => (now - last_update_instant).as_secs_f32() >= 1.0,\n\n None => true,\n\n };\n\n\n\n // Refresh FPS text\n\n if update_text_string {\n\n let fps = time_resource.time_state.updates_per_second();\n\n fps_text.fps_text = format!(\"Fps: {:.1}\", fps);\n\n fps_text.last_fps_text_change = Some(now);\n\n }\n\n })\n\n}\n", "file_path": "demo/src/systems/fps_text_systems.rs", "rank": 43, "score": 92519.9515502968 }, { "content": "pub fn quit_if_escape_pressed() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"quit_if_escape_pressed\")\n\n .read_resource::<InputResource>()\n\n .write_resource::<AppControlResource>()\n\n .build(|_, _, (input_state, app_control), _| {\n\n if input_state.is_key_down(VirtualKeyCode::Escape) {\n\n app_control.enqueue_terminate_process();\n\n }\n\n })\n\n}\n", "file_path": "demo/src/systems/app_control_systems.rs", "rank": 44, "score": 92519.9515502968 }, { "content": "fn distance_to_segment_sq(\n\n test_point: glam::Vec2,\n\n p0: glam::Vec2,\n\n p1: glam::Vec2,\n\n) -> f32 {\n\n let p0_to_p1 = p1 - p0;\n\n\n\n // Early out in case of extremely short segment, get distance to midpoint\n\n if p0_to_p1.length_squared() < 0.01 {\n\n let midpoint = p0 + (p0_to_p1 / 2.0);\n\n return (test_point - midpoint).length_squared();\n\n }\n\n\n\n // Get \"tangent\" and \"normal\" of the segment\n\n let tangent = p0_to_p1.normalize();\n\n let normal = glam::Vec2::new(tangent.y(), -tangent.x());\n\n\n\n // distance to the infinite line described by the points\n\n let p0_to_test_point = test_point - p0;\n\n\n", "file_path": "demo/src/resources/editor_resources/editor_draw.rs", "rank": 45, "score": 91632.16523225629 }, { "content": "pub fn editor_imgui_menu() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"editor_imgui_menu\")\n\n .write_resource::<ImguiResource>()\n\n .write_resource::<EditorStateResource>()\n\n .read_resource::<TimeResource>()\n\n .build(|command_buffer, _, (imgui, editor_state, time_state), _| {\n\n imgui.with_ui(|ui| {\n\n {\n\n let window_settings = editor_state.window_options_mut();\n\n if window_settings.show_imgui_metrics {\n\n ui.show_metrics_window(&mut window_settings.show_imgui_metrics);\n\n }\n\n\n\n if window_settings.show_imgui_style_editor {\n\n imgui::Window::new(im_str!(\"Editor\")).build(ui, || {\n\n ui.show_default_style_editor();\n\n });\n\n }\n\n\n\n if window_settings.show_imgui_demo {\n", "file_path": "demo/src/systems/editor_systems/main_menu.rs", "rank": 46, "score": 91130.6748735304 }, { "content": "pub fn create_component_registry() -> HashMap<ComponentTypeId, ComponentRegistration> {\n\n let comp_registrations = legion_prefab::iter_component_registrations();\n\n use std::iter::FromIterator;\n\n let component_types: HashMap<ComponentTypeId, ComponentRegistration> = HashMap::from_iter(\n\n comp_registrations.map(|reg| (ComponentTypeId(reg.ty().clone(), #[cfg(feature = \"ffi\")] 0), reg.clone())),\n\n );\n\n\n\n component_types\n\n}\n\n\n", "file_path": "demo/src/lib.rs", "rank": 47, "score": 91130.6748735304 }, { "content": "pub fn editor_update_editor_draw() -> Box<dyn Schedulable> {\n\n SystemBuilder::new(\"editor_input\")\n\n .write_resource::<EditorStateResource>()\n\n .read_resource::<InputResource>()\n\n .read_resource::<ViewportResource>()\n\n .write_resource::<EditorSelectionResource>()\n\n .write_resource::<DebugDrawResource>()\n\n .write_resource::<EditorDrawResource>()\n\n .read_resource::<UniverseResource>()\n\n .with_query(<(Read<Position2DComponent>)>::query())\n\n .build(\n\n |command_buffer,\n\n subworld,\n\n (\n\n editor_state,\n\n input_state,\n\n viewport,\n\n editor_selection,\n\n debug_draw,\n\n editor_draw,\n\n universe_resource,\n\n ),\n\n (position_query)| {\n\n editor_draw.update(input_state.input_state(), &*viewport);\n\n },\n\n )\n\n}\n", "file_path": "demo/src/systems/editor_systems/mod.rs", "rank": 48, "score": 91130.6748735304 }, { "content": "struct CanvasDrawContextInner {\n\n canvas: *mut skia_safe::Canvas,\n\n coordinate_system_helper: CoordinateSystemHelper,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct CanvasDrawResource {\n\n inner: std::sync::Mutex<Option<CanvasDrawContextInner>>,\n\n}\n\n\n\nunsafe impl Send for CanvasDrawResource {}\n\nunsafe impl Sync for CanvasDrawResource {}\n\n\n\nimpl CanvasDrawResource {\n\n pub fn begin_draw_context(\n\n &mut self,\n\n canvas: &mut skia_safe::Canvas,\n\n coordinate_system_helper: skulpin::CoordinateSystemHelper,\n\n ) {\n\n let mut lock = self.inner.lock().unwrap();\n", "file_path": "demo/src/resources/canvas_draw.rs", "rank": 49, "score": 90813.19098535297 }, { "content": "fn distance_to_circle_outline_sq(\n\n test_point: glam::Vec2,\n\n center: glam::Vec2,\n\n radius: f32,\n\n) -> f32 {\n\n ((test_point - center).length_squared() - (radius * radius)).abs()\n\n}\n\n\n", "file_path": "demo/src/resources/editor_resources/editor_draw.rs", "rank": 50, "score": 90162.40784888207 }, { "content": "pub fn vec3_glam_to_glm(value: glam::Vec3) -> glm::Vec3 {\n\n glm::Vec3::new(value.x(), value.y(), value.z())\n\n}\n\n\n", "file_path": "demo/src/math.rs", "rank": 51, "score": 90095.99022416037 }, { "content": "pub fn vec3_glm_to_glam(value: glm::Vec3) -> glam::Vec3 {\n\n glam::Vec3::new(value.x, value.y, value.z)\n\n}\n\n\n", "file_path": "demo/src/math.rs", "rank": 52, "score": 90095.99022416037 }, { "content": "pub fn vec4_glam_to_glm(value: glam::Vec4) -> glm::Vec4 {\n\n glm::Vec4::new(value.x(), value.y(), value.z(), value.w())\n\n}\n\n\n", "file_path": "demo/src/math.rs", "rank": 53, "score": 90095.99022416037 }, { "content": "pub fn vec4_glm_to_glam(value: glm::Vec4) -> glam::Vec4 {\n\n glam::Vec4::new(value.x, value.y, value.z, value.w)\n\n}\n\n\n", "file_path": "demo/src/math.rs", "rank": 54, "score": 90095.99022416037 }, { "content": "pub fn init_imgui_manager(window: &winit::window::Window) -> ImguiManager {\n\n let mut imgui_context = init_imgui(&window);\n\n let mut imgui_platform = imgui_winit_support::WinitPlatform::init(&mut imgui_context);\n\n\n\n imgui_platform.attach_window(\n\n imgui_context.io_mut(),\n\n &window,\n\n imgui_winit_support::HiDpiMode::Locked(window.scale_factor()),\n\n );\n\n\n\n ImguiManager::new(imgui_context, imgui_platform)\n\n}\n", "file_path": "demo/src/app.rs", "rank": 55, "score": 88636.60244603493 }, { "content": "fn handle_selection(\n\n editor_draw: &EditorDrawResource,\n\n input_state: &InputResource,\n\n viewport: &ViewportResource,\n\n editor_selection: &mut EditorSelectionResource,\n\n debug_draw: &mut DebugDrawResource,\n\n) {\n\n let mut intersecting_entities = None;\n\n\n\n if editor_draw.is_interacting_with_anything() {\n\n println!(\"is interacting\");\n\n //\n\n // If the user is doing something with the editor draw API, disable the selection logic\n\n //\n\n } else if let Some(position) = input_state.mouse_button_just_clicked_position(MouseButton::Left)\n\n {\n\n println!(\"just clicked\");\n\n //\n\n // Handle a single click. Do a raycast to find find anything under the mouse position\n\n //\n", "file_path": "demo/src/systems/editor_systems/selection.rs", "rank": 56, "score": 88265.18992394027 }, { "content": "#[derive(TypeUuid, Serialize, Deserialize, Default)]\n\n#[uuid = \"3c8367c8-45fb-40bb-a229-00e5e9c3fc70\"]\n\nstruct SimpleState(Option<AssetUuid>);\n\n\n", "file_path": "demo/src/pipeline/image/importers.rs", "rank": 58, "score": 87499.93330436217 }, { "content": "pub fn init_imgui_manager(window: &winit::window::Window) -> ImguiManager {\n\n let mut imgui_context = init_imgui(&window);\n\n let mut imgui_platform = imgui_winit_support::WinitPlatform::init(&mut imgui_context);\n\n\n\n imgui_platform.attach_window(\n\n imgui_context.io_mut(),\n\n &window,\n\n imgui_winit_support::HiDpiMode::Rounded,\n\n );\n\n\n\n ImguiManager::new(imgui_context, imgui_platform)\n\n}\n", "file_path": "demo/src/imgui_support.rs", "rank": 59, "score": 87247.32576926853 }, { "content": "// this is a virtual coordinate system\n\n// top-left: (0, 0)\n\n// bottom-right: (600 * aspect_ratio, 600) where aspect_ratio is window_width / window_height\n\nfn calculate_screen_space_matrix(\n\n logical_size: PhysicalSize<u32>,\n\n view_half_extents: glam::Vec2,\n\n) -> glam::Mat4 {\n\n let view = glam::Mat4::look_at_rh(\n\n glam::Vec3::from([0.0, 0.0, 5.0]),\n\n glam::Vec3::from([0.0, 0.0, 0.0]),\n\n glam::Vec3::from([0.0, 1.0, 0.0]),\n\n );\n\n\n\n let projection = glam::Mat4::orthographic_rh(\n\n 0.0,\n\n view_half_extents.x() * 2.0,\n\n view_half_extents.y() * 2.0,\n\n 0.0,\n\n -100.0,\n\n 100.0,\n\n );\n\n\n\n projection * view\n\n}\n\n\n", "file_path": "demo/src/resources/viewport.rs", "rank": 60, "score": 85808.01931610503 }, { "content": "fn handle_rotate_gizmo_input(\n\n editor_draw: &mut EditorDrawResource,\n\n tx: &mut EditorTransaction,\n\n) -> GizmoResult {\n\n if let Some(drag_in_progress) =\n\n editor_draw.shape_drag_in_progress_or_just_finished(MouseButton::Left)\n\n {\n\n // See what if any axis we will operate on\n\n let mut rotate_z = false;\n\n if drag_in_progress.shape_id == \"z_axis_rotate\" {\n\n rotate_z = true;\n\n }\n\n\n\n // Early out if we didn't touch either axis\n\n if !rotate_z {\n\n return GizmoResult::NoChange;\n\n }\n\n\n\n //TODO: It might be possible to detect the dragged shape's center, compare it to mouse\n\n // position, and track a 1:1 rotation with mouse movement\n", "file_path": "demo/src/systems/editor_systems/gizmos.rs", "rank": 61, "score": 84730.23249832002 }, { "content": "fn handle_scale_gizmo_input(\n\n editor_draw: &mut EditorDrawResource,\n\n tx: &mut EditorTransaction,\n\n) -> GizmoResult {\n\n if let Some(drag_in_progress) =\n\n editor_draw.shape_drag_in_progress_or_just_finished(MouseButton::Left)\n\n {\n\n // See what if any axis we will operate on\n\n let mut scale_x = false;\n\n let mut scale_y = false;\n\n let mut scale_uniform = false;\n\n if drag_in_progress.shape_id == \"x_axis_scale\" {\n\n scale_x = true;\n\n } else if drag_in_progress.shape_id == \"y_axis_scale\" {\n\n scale_y = true;\n\n } else if drag_in_progress.shape_id == \"uniform_scale\" {\n\n scale_uniform = true;\n\n }\n\n\n\n // Early out if we didn't touch either axis\n", "file_path": "demo/src/systems/editor_systems/gizmos.rs", "rank": 62, "score": 84730.23249832002 }, { "content": "fn handle_translate_gizmo_input(\n\n editor_draw: &mut EditorDrawResource,\n\n tx: &mut EditorTransaction,\n\n) -> GizmoResult {\n\n if let Some(drag_in_progress) =\n\n editor_draw.shape_drag_in_progress_or_just_finished(MouseButton::Left)\n\n {\n\n // See what if any axis we will operate on\n\n let mut translate_x = false;\n\n let mut translate_y = false;\n\n if drag_in_progress.shape_id == \"x_axis_translate\" {\n\n translate_x = true;\n\n } else if drag_in_progress.shape_id == \"y_axis_translate\" {\n\n translate_y = true;\n\n } else if drag_in_progress.shape_id == \"xy_axis_translate\" {\n\n translate_x = true;\n\n translate_y = true;\n\n }\n\n\n\n // Early out if we didn't touch either axis\n", "file_path": "demo/src/systems/editor_systems/gizmos.rs", "rank": 63, "score": 84730.23249832002 }, { "content": "use glam::Vec2;\n\nuse nphysics2d::object::{DefaultBodySet, DefaultColliderSet, DefaultBodyHandle, DefaultColliderHandle};\n\nuse nphysics2d::force_generator::DefaultForceGeneratorSet;\n\nuse nphysics2d::joint::DefaultJointConstraintSet;\n\nuse nphysics2d::world::{DefaultMechanicalWorld, DefaultGeometricalWorld};\n\n\n\nuse crossbeam_channel::{Sender, Receiver};\n\n\n\n// Handles setting up the physics system and stepping it\n\npub struct PhysicsResource {\n\n pub geometrical_world: DefaultGeometricalWorld<f32>,\n\n pub mechanical_world: DefaultMechanicalWorld<f32>,\n\n pub bodies: DefaultBodySet<f32>,\n\n pub colliders: DefaultColliderSet<f32>,\n\n pub joint_constraints: DefaultJointConstraintSet<f32>,\n\n pub force_generators: DefaultForceGeneratorSet<f32>,\n\n pub delete_body_tx: Sender<DefaultBodyHandle>,\n\n pub delete_body_rx: Receiver<DefaultBodyHandle>,\n\n}\n\n\n", "file_path": "demo/src/resources/physics.rs", "rank": 64, "score": 72801.05356124576 }, { "content": "impl PhysicsResource {\n\n pub fn new(gravity: Vec2) -> Self {\n\n let geometrical_world = DefaultGeometricalWorld::<f32>::new();\n\n let mechanical_world = DefaultMechanicalWorld::new(crate::math::vec2_glam_to_glm(gravity));\n\n\n\n let bodies = DefaultBodySet::<f32>::new();\n\n let colliders = DefaultColliderSet::new();\n\n let joint_constraints = DefaultJointConstraintSet::<f32>::new();\n\n let force_generators = DefaultForceGeneratorSet::<f32>::new();\n\n\n\n let (delete_body_tx, delete_body_rx) = crossbeam_channel::unbounded();\n\n\n\n PhysicsResource {\n\n geometrical_world,\n\n mechanical_world,\n\n bodies,\n\n colliders,\n\n joint_constraints,\n\n force_generators,\n\n delete_body_tx,\n", "file_path": "demo/src/resources/physics.rs", "rank": 65, "score": 72797.61000747164 }, { "content": " colliders_to_remove.push(collider_handle);\n\n }\n\n }\n\n\n\n for collider_to_remove in colliders_to_remove {\n\n self.colliders.remove(collider_to_remove);\n\n }\n\n }\n\n }\n\n\n\n pub fn maintain(&mut self) {\n\n self.handle_deletes();\n\n\n\n self.mechanical_world.maintain(\n\n &mut self.geometrical_world,\n\n &mut self.bodies,\n\n &mut self.colliders,\n\n &mut self.joint_constraints,\n\n );\n\n }\n", "file_path": "demo/src/resources/physics.rs", "rank": 66, "score": 72785.92824863495 }, { "content": "\n\n pub fn step(&mut self) {\n\n self.handle_deletes();\n\n\n\n // Run the simulation.\n\n self.mechanical_world.step(\n\n &mut self.geometrical_world,\n\n &mut self.bodies,\n\n &mut self.colliders,\n\n &mut self.joint_constraints,\n\n &mut self.force_generators,\n\n );\n\n }\n\n}\n", "file_path": "demo/src/resources/physics.rs", "rank": 67, "score": 72784.10648946004 }, { "content": " delete_body_rx,\n\n }\n\n }\n\n\n\n pub fn delete_body_tx(&self) -> &Sender<DefaultBodyHandle> {\n\n &self.delete_body_tx\n\n }\n\n\n\n fn handle_deletes(&mut self) {\n\n // Delete any bodies that were destroyed since the previous update\n\n for body_to_delete in self.delete_body_rx.try_iter() {\n\n self.bodies.remove(body_to_delete);\n\n\n\n // This is a workaround for this issue: https://github.com/rustsim/nphysics/issues/248\n\n // It's not a long term fix since a linear search across all colliders to find the ones\n\n // attached to this body is expensive. This is only necessary if creating/destroying\n\n // entities in the same frame (between step() and maintain() calls)\n\n let mut colliders_to_remove = vec![];\n\n for (collider_handle, collider) in self.colliders.iter() {\n\n if collider.body() == body_to_delete {\n", "file_path": "demo/src/resources/physics.rs", "rank": 68, "score": 72783.11286706766 }, { "content": "pub struct LineList {\n\n pub points: Vec<glam::Vec2>,\n\n pub color: glam::Vec4,\n\n}\n\n\n\nimpl LineList {\n\n pub fn new(\n\n points: Vec<glam::Vec2>,\n\n color: glam::Vec4,\n\n ) -> Self {\n\n LineList { points, color }\n\n }\n\n}\n\n\n\npub struct DebugDrawResource {\n\n line_lists: Vec<LineList>,\n\n}\n\n\n\nimpl DebugDrawResource {\n\n pub fn new() -> Self {\n", "file_path": "demo/src/resources/debug_draw.rs", "rank": 69, "score": 70841.77919816498 }, { "content": " DebugDrawResource { line_lists: vec![] }\n\n }\n\n\n\n // Adds a single polygon\n\n pub fn add_polygon(\n\n &mut self,\n\n mut points: Vec<glam::Vec2>,\n\n color: glam::Vec4,\n\n ) {\n\n // Nothing will draw if we don't have at least 2 points\n\n if points.len() > 1 {\n\n points.push(points[0].clone());\n\n self.line_lists.push(LineList::new(points, color));\n\n }\n\n }\n\n\n\n pub fn add_tristrip(\n\n &mut self,\n\n points: &Vec<glam::Vec2>,\n\n color: glam::Vec4,\n", "file_path": "demo/src/resources/debug_draw.rs", "rank": 70, "score": 70837.23387595554 }, { "content": " ) {\n\n // Nothing will draw if we don't have at least 2 points\n\n for index in 0..(points.len() - 2) {\n\n let v = vec![points[index], points[index + 1], points[index + 2]];\n\n self.add_polygon(v, color);\n\n }\n\n }\n\n\n\n pub fn add_circle(\n\n &mut self,\n\n center: glam::Vec2,\n\n radius: f32,\n\n color: glam::Vec4,\n\n ) {\n\n let point_count = 12;\n\n\n\n let mut points = Vec::with_capacity(point_count);\n\n for index in 0..point_count {\n\n let fraction = (index as f32 / point_count as f32) * std::f32::consts::PI * 2.0;\n\n\n", "file_path": "demo/src/resources/debug_draw.rs", "rank": 71, "score": 70831.64133457393 }, { "content": " self.add_polygon(points, color);\n\n }\n\n\n\n pub fn add_line(\n\n &mut self,\n\n p0: glam::Vec2,\n\n p1: glam::Vec2,\n\n color: glam::Vec4,\n\n ) {\n\n let points = vec![p0, p1];\n\n\n\n self.add_polygon(points, color);\n\n }\n\n\n\n // Returns the draw data, leaving this object in an empty state\n\n pub fn take_line_lists(&mut self) -> Vec<LineList> {\n\n std::mem::replace(&mut self.line_lists, vec![])\n\n }\n\n\n\n // Recommended to call every frame to ensure that this doesn't grow unbounded\n\n pub fn clear(&mut self) {\n\n self.line_lists.clear();\n\n }\n\n}\n", "file_path": "demo/src/resources/debug_draw.rs", "rank": 72, "score": 70829.82786085115 }, { "content": " points.push(glam::Vec2::new(fraction.sin() * radius, fraction.cos() * radius) + center);\n\n }\n\n\n\n self.add_polygon(points, color);\n\n }\n\n\n\n pub fn add_rect(\n\n &mut self,\n\n p0: glam::Vec2,\n\n p1: glam::Vec2,\n\n color: glam::Vec4,\n\n ) {\n\n let points = vec![\n\n p0,\n\n glam::vec2(p0.x(), p1.y()),\n\n p1,\n\n glam::vec2(p1.x(), p0.y()),\n\n p0,\n\n ];\n\n\n", "file_path": "demo/src/resources/debug_draw.rs", "rank": 73, "score": 70828.7164366591 }, { "content": "// Inner state for ImguiManager, which will be protected by a Mutex. Mutex protection required since\n\n// this object is Send but not Sync\n\nstruct Inner {\n\n context: imgui::Context,\n\n\n\n // Pointer to the font atlas. Assuming no direct calls to C imgui interface, this pointer is\n\n // valid as long as context is not destroyed\n\n font_atlas_texture: *mut imgui::FontAtlasTexture<'static>,\n\n\n\n // Pointer to the current UI. Assuming no direct calls to C imgui interface, this pointer is\n\n // valid as long as context is not destroyed, and a frame has started and not ended\n\n ui: Option<*mut imgui::Ui<'static>>,\n\n\n\n // Handles the integration between imgui and winit\n\n platform: imgui_winit_support::WinitPlatform,\n\n\n\n // These are all refreshed when frame is started\n\n want_capture_keyboard: bool,\n\n want_capture_mouse: bool,\n\n want_set_mouse_pos: bool,\n\n want_text_input: bool,\n\n}\n", "file_path": "demo/src/imgui_support.rs", "rank": 74, "score": 64461.77851684553 }, { "content": "#[derive(TypeUuid)]\n\n#[uuid = \"720d636b-b79c-42d4-8f46-a2d8e1ada46e\"]\n\nstruct ImageImporter;\n\nimpl Importer for ImageImporter {\n\n fn version_static() -> u32\n\n where\n\n Self: Sized,\n\n {\n\n 1\n\n }\n\n fn version(&self) -> u32 {\n\n Self::version_static()\n\n }\n\n\n\n type Options = ();\n\n\n\n type State = SimpleState;\n\n\n\n /// Reads the given bytes and produces assets.\n\n fn import(\n\n &self,\n\n source: &mut dyn Read,\n", "file_path": "demo/src/pipeline/image/importers.rs", "rank": 75, "score": 62352.89063851435 }, { "content": "struct AssetState<A> {\n\n version: u32,\n\n asset: A,\n\n}\n\npub struct Storage<A: TypeUuid> {\n\n refop_sender: Arc<Sender<RefOp>>,\n\n assets: HashMap<LoadHandle, AssetState<A>>,\n\n uncommitted: HashMap<LoadHandle, AssetState<A>>,\n\n}\n\nimpl<A: TypeUuid> Storage<A> {\n\n fn new(sender: Arc<Sender<RefOp>>) -> Self {\n\n Self {\n\n refop_sender: sender,\n\n assets: HashMap::new(),\n\n uncommitted: HashMap::new(),\n\n }\n\n }\n\n fn get<T: AssetHandle>(\n\n &self,\n\n handle: &T,\n", "file_path": "demo/src/asset_storage.rs", "rank": 76, "score": 61207.206274035096 }, { "content": "struct ScheduleBuilder<'a> {\n\n criteria: &'a ScheduleCriteria,\n\n schedule: legion::systems::schedule::Builder,\n\n}\n\n\n\nimpl<'a> ScheduleBuilder<'a> {\n\n fn new(criteria: &'a ScheduleCriteria) -> Self {\n\n ScheduleBuilder::<'a> {\n\n criteria,\n\n schedule: Default::default(),\n\n }\n\n }\n\n\n\n fn build(self) -> Schedule {\n\n self.schedule.build()\n\n }\n\n\n\n fn always<F>(\n\n mut self,\n\n f: F,\n", "file_path": "demo/src/systems/mod.rs", "rank": 77, "score": 61207.206274035096 }, { "content": "#[derive(Default)]\n\nstruct RegisteredEditorSelectable<T> {\n\n phantom_data: PhantomData<T>,\n\n}\n\n\n\nimpl<T> RegisteredEditorSelectable<T>\n\nwhere\n\n T: EditorSelectable,\n\n{\n\n fn new() -> Self {\n\n RegisteredEditorSelectable {\n\n phantom_data: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> RegisteredEditorSelectableT for RegisteredEditorSelectable<T>\n\nwhere\n\n T: EditorSelectable,\n\n{\n\n fn create_editor_selection_world(\n", "file_path": "demo/src/selection.rs", "rank": 78, "score": 60186.20408053014 }, { "content": "fn main() {\n\n // Setup logging\n\n env_logger::Builder::from_default_env()\n\n .filter_level(log::LevelFilter::Debug)\n\n .filter_module(\"tokio_reactor\", log::LevelFilter::Info)\n\n .init();\n\n\n\n // Spawn the daemon in a background thread. This could be a different process, but\n\n // for simplicity we'll launch it here.\n\n std::thread::spawn(move || {\n\n daemon::run();\n\n });\n\n\n\n // Build the app and run it\n\n let example_app = DemoApp::new();\n\n let renderer_builder = skulpin::RendererBuilder::new()\n\n .app_name(CString::new(\"Skulpin Example App\").unwrap())\n\n .use_vulkan_debug_layer(true);\n\n\n\n atelier_legion_demo::app::App::run(\n\n example_app,\n\n LogicalSize::new(900, 600),\n\n renderer_builder,\n\n );\n\n}\n", "file_path": "demo/src/main.rs", "rank": 79, "score": 58763.44422350391 }, { "content": "pub trait AppHandler {\n\n /// Called once at start, put one-time init code here\n\n fn init(\n\n &mut self,\n\n world: &mut World,\n\n resources: &mut Resources,\n\n );\n\n\n\n /// Called frequently, this is the intended place to put non-rendering logic\n\n fn update(\n\n &mut self,\n\n world: &mut World,\n\n resources: &mut Resources,\n\n );\n\n\n\n /// Called frequently, this is the intended place to put drawing code\n\n fn draw(\n\n &mut self,\n\n world: &mut World,\n\n resources: &mut Resources,\n", "file_path": "demo/src/app.rs", "rank": 80, "score": 57776.32362132293 }, { "content": "// This is required because rustc does not recognize .ctor segments when considering which symbols\n\n// to include when linking static libraries to avoid having the module eliminated as \"dead code\".\n\n// We need to reference a symbol in each module (crate) that registers an importer since atelier_importer uses\n\n// inventory::submit and the .ctor linkage hack.\n\n// Note that this is only required if you use the built-in `atelier_importer::get_source_importers` to\n\n// register importers with the daemon builder.\n\nfn init_modules() {\n\n // An example of how referencing of types could look to avoid dead code elimination\n\n // #[cfg(feature = \"amethyst-importers\")]\n\n // {\n\n // use amethyst::assets::Asset;\n\n // amethyst::renderer::types::Texture::name();\n\n // amethyst::assets::experimental::DefaultLoader::default();\n\n // let _w = amethyst::audio::output::outputs();\n\n // }\n\n}\n\n\n", "file_path": "demo/src/daemon.rs", "rank": 81, "score": 57632.89608906138 }, { "content": "#[derive(Default)]\n\nstruct RegisteredEditorSelectableTransformed<T, U> {\n\n phantom_data: PhantomData<(T, U)>,\n\n}\n\n\n\nimpl<T, U> RegisteredEditorSelectableTransformed<T, U>\n\nwhere\n\n T: EditorSelectableTransformed<U>,\n\n{\n\n fn new() -> Self {\n\n RegisteredEditorSelectableTransformed {\n\n phantom_data: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T, U> RegisteredEditorSelectableT for RegisteredEditorSelectableTransformed<T, U>\n\nwhere\n\n T: EditorSelectableTransformed<U>,\n\n U: Component,\n\n{\n", "file_path": "demo/src/selection.rs", "rank": 82, "score": 56449.63780139365 }, { "content": "// This function does a recursive blocking load on the provided prefab asset and all prefabs\n\n// that it references. As it does this, prefab_lookup and prefab_cook_order are populated\n\nfn request_prefab_dependency(\n\n asset_manager: &mut AssetResource,\n\n id: AssetUuid,\n\n prefab_lookup: &mut HashMap<AssetUuid, Handle<PrefabAsset>>,\n\n prefab_cook_order: &mut Vec<AssetUuid>,\n\n) {\n\n // Request the asset\n\n let load_handle = asset_manager.loader().add_ref(id);\n\n let handle = Handle::<PrefabAsset>::new(asset_manager.tx().clone(), load_handle);\n\n\n\n // Block until it loads\n\n loop {\n\n asset_manager.update();\n\n if let LoadStatus::Loaded = handle.load_status::<RpcLoader>(asset_manager.loader()) {\n\n break;\n\n }\n\n }\n\n\n\n // Grab a reference to the asset\n\n let prefab_asset: &PrefabAsset = handle.asset(asset_manager.storage()).unwrap();\n", "file_path": "demo/src/prefab_cooking.rs", "rank": 83, "score": 55557.3529164335 }, { "content": "fn draw_scale_gizmo(\n\n debug_draw: &mut DebugDrawResource,\n\n editor_draw: &mut EditorDrawResource,\n\n selection_world: &mut EditorSelectionResource,\n\n subworld: &SubWorld,\n\n scale_query: &mut legion::systems::SystemQuery<\n\n (\n\n Read<Position2DComponent>,\n\n TryRead<UniformScale2DComponent>,\n\n TryRead<NonUniformScale2DComponent>,\n\n ),\n\n EntityFilterTuple<\n\n And<(\n\n ComponentFilter<Position2DComponent>,\n\n Passthrough,\n\n Passthrough,\n\n )>,\n\n And<(Passthrough, Passthrough, Passthrough)>,\n\n And<(Passthrough, Passthrough, Passthrough)>,\n\n >,\n", "file_path": "demo/src/systems/editor_systems/gizmos.rs", "rank": 84, "score": 53711.70758612544 }, { "content": "fn draw_rotate_gizmo(\n\n debug_draw: &mut DebugDrawResource,\n\n editor_draw: &mut EditorDrawResource,\n\n selection_world: &mut EditorSelectionResource,\n\n subworld: &SubWorld,\n\n scale_query: &mut legion::systems::SystemQuery<\n\n (Read<Position2DComponent>, Read<Rotation2DComponent>),\n\n EntityFilterTuple<\n\n And<(\n\n ComponentFilter<Position2DComponent>,\n\n ComponentFilter<Rotation2DComponent>,\n\n )>,\n\n And<(Passthrough, Passthrough)>,\n\n And<(Passthrough, Passthrough)>,\n\n >,\n\n >,\n\n) {\n\n for (entity, (position, rotation)) in scale_query.iter_entities(subworld) {\n\n if !selection_world.is_entity_selected(entity) {\n\n continue;\n", "file_path": "demo/src/systems/editor_systems/gizmos.rs", "rank": 85, "score": 53711.70758612544 }, { "content": "fn draw_translate_gizmo(\n\n debug_draw: &mut DebugDrawResource,\n\n editor_draw: &mut EditorDrawResource,\n\n selection_world: &mut EditorSelectionResource,\n\n subworld: &SubWorld,\n\n translate_query: &mut legion::systems::SystemQuery<\n\n Read<Position2DComponent>,\n\n EntityFilterTuple<ComponentFilter<Position2DComponent>, Passthrough, Passthrough>,\n\n >,\n\n) {\n\n for (entity, position) in translate_query.iter_entities(subworld) {\n\n if !selection_world.is_entity_selected(entity) {\n\n continue;\n\n }\n\n\n\n let x_color = glam::vec4(0.0, 1.0, 0.0, 1.0);\n\n let y_color = glam::vec4(1.0, 0.6, 0.0, 1.0);\n\n let xy_color = glam::vec4(1.0, 1.0, 0.0, 1.0);\n\n\n\n let xy_position = glam::Vec2::new(position.position.x(), position.position.y());\n", "file_path": "demo/src/systems/editor_systems/gizmos.rs", "rank": 86, "score": 53711.70758612544 }, { "content": "pub trait TypedStorage: Any + Send {\n\n fn update_asset(\n\n &mut self,\n\n loader_info: &dyn LoaderInfoProvider,\n\n data: &[u8],\n\n load_handle: LoadHandle,\n\n load_op: AssetLoadOp,\n\n version: u32,\n\n ) -> Result<(), Box<dyn Error>>;\n\n fn commit_asset_version(\n\n &mut self,\n\n handle: LoadHandle,\n\n version: u32,\n\n );\n\n fn free(\n\n &mut self,\n\n handle: LoadHandle,\n\n );\n\n}\n\nmopafy!(TypedStorage);\n", "file_path": "demo/src/asset_storage.rs", "rank": 87, "score": 52751.99201640595 }, { "content": "fn imgui_menu_tool_button(\n\n ui: &imgui::Ui,\n\n editor_state: &mut EditorStateResource,\n\n editor_tool: EditorTool,\n\n string: &'static str,\n\n) {\n\n let color_stack_token = if editor_state.active_editor_tool() == editor_tool {\n\n Some(ui.push_style_color(imgui::StyleColor::Text, [0.8, 0.0, 0.0, 1.0]))\n\n } else {\n\n None\n\n };\n\n\n\n if imgui::MenuItem::new(&im_str!(\"{}\", string)).build(ui) {\n\n editor_state.enqueue_set_active_editor_tool(editor_tool);\n\n }\n\n\n\n if let Some(color_stack_token) = color_stack_token {\n\n color_stack_token.pop(ui);\n\n }\n\n}\n\n\n", "file_path": "demo/src/systems/editor_systems/main_menu.rs", "rank": 88, "score": 52057.16875140127 }, { "content": "/// Any selectable component must implement this trait\n\npub trait EditorSelectable: legion::storage::Component {\n\n /// When called, the implementation is expected to place shapes into the collision world\n\n fn create_editor_selection_world(\n\n &self,\n\n collision_world: &mut CollisionWorld<f32, Entity>,\n\n resources: &Resources,\n\n opened_prefab: &OpenedPrefabState,\n\n world: &World,\n\n entity: Entity,\n\n );\n\n}\n\n\n", "file_path": "demo/src/selection.rs", "rank": 89, "score": 50609.4108391468 }, { "content": "/// Any selectable component must implement this trait\n\npub trait EditorSelectableTransformed<T>: legion::storage::Component {\n\n /// When called, the implementation is expected to place shapes into the collision world\n\n fn create_editor_selection_world(\n\n &self,\n\n collision_world: &mut CollisionWorld<f32, Entity>,\n\n resources: &Resources,\n\n opened_prefab: &OpenedPrefabState,\n\n prefab_world: &World,\n\n prefab_entity: Entity,\n\n transformed_world: &World,\n\n transformed_entity: Entity,\n\n transformed_component: &T,\n\n );\n\n}\n\n\n", "file_path": "demo/src/selection.rs", "rank": 90, "score": 47850.57019416712 }, { "content": "#[derive(PartialEq)]\n\nenum InspectResult {\n\n Unchanged,\n\n Edited,\n\n Deleted,\n\n}\n\n\n", "file_path": "demo/src/inspect.rs", "rank": 91, "score": 45695.69342861684 }, { "content": "fn init_imgui(window: &winit::window::Window) -> imgui::Context {\n\n use imgui::Context;\n\n\n\n let mut imgui = Context::create();\n\n {\n\n // Fix incorrect colors with sRGB framebuffer\n\n fn imgui_gamma_to_linear(col: [f32; 4]) -> [f32; 4] {\n\n let x = col[0].powf(2.2);\n\n let y = col[1].powf(2.2);\n\n let z = col[2].powf(2.2);\n\n let w = 1.0 - (1.0 - col[3]).powf(2.2);\n\n [x, y, z, w]\n\n }\n\n\n\n let style = imgui.style_mut();\n\n for col in 0..style.colors.len() {\n\n style.colors[col] = imgui_gamma_to_linear(style.colors[col]);\n\n }\n\n }\n\n\n", "file_path": "demo/src/app.rs", "rank": 92, "score": 44750.162034942856 }, { "content": "fn init_imgui(window: &winit::window::Window) -> imgui::Context {\n\n use imgui::Context;\n\n\n\n let mut imgui = Context::create();\n\n {\n\n // Fix incorrect colors with sRGB framebuffer\n\n fn imgui_gamma_to_linear(col: [f32; 4]) -> [f32; 4] {\n\n let x = col[0].powf(2.2);\n\n let y = col[1].powf(2.2);\n\n let z = col[2].powf(2.2);\n\n let w = 1.0 - (1.0 - col[3]).powf(2.2);\n\n [x, y, z, w]\n\n }\n\n\n\n let style = imgui.style_mut();\n\n for col in 0..style.colors.len() {\n\n style.colors[col] = imgui_gamma_to_linear(style.colors[col]);\n\n }\n\n }\n\n\n", "file_path": "demo/src/imgui_support.rs", "rank": 93, "score": 43985.51596425665 }, { "content": "mod editor_state;\n\npub use editor_state::PostCommitSelection;\n\npub use editor_state::EditorStateResource;\n\npub use editor_state::EditorTool;\n\npub use editor_state::EditorMode;\n\npub use editor_state::EditorTransactionId;\n\npub use editor_state::EditorTransaction;\n\npub use editor_state::OpenedPrefabState;\n\n\n\nmod editor_selection;\n\npub use editor_selection::EditorSelectionResource;\n\n\n\nmod editor_draw;\n\npub use editor_draw::EditorDrawResource;\n\npub use editor_draw::EditorShapeClickedState;\n\npub use editor_draw::EditorShapeDragState;\n", "file_path": "demo/src/resources/editor_resources/mod.rs", "rank": 94, "score": 41689.748130685126 }, { "content": "/// Parses a string as a socket address.\n\nfn parse_socket_addr(s: &str) -> std::result::Result<SocketAddr, AddrParseError> {\n\n s.parse()\n\n}\n\n\n", "file_path": "demo/src/daemon.rs", "rank": 95, "score": 41140.92536198911 }, { "content": " match self.editor_mode {\n\n EditorMode::Active => self.play(time_state),\n\n EditorMode::Inactive => self.pause(time_state),\n\n };\n\n }\n\n\n\n pub fn open_prefab(\n\n world: &mut World,\n\n resources: &Resources,\n\n prefab_uuid: AssetUuid,\n\n ) {\n\n {\n\n let mut asset_resource = resources.get_mut::<AssetResource>().unwrap();\n\n\n\n use atelier_loader::Loader;\n\n use atelier_loader::handle::AssetHandle;\n\n\n\n let load_handle = asset_resource.loader().add_ref(prefab_uuid);\n\n let handle = atelier_loader::handle::Handle::<crate::pipeline::PrefabAsset>::new(\n\n asset_resource.tx().clone(),\n", "file_path": "demo/src/resources/editor_resources/editor_state.rs", "rank": 96, "score": 40937.06191593732 }, { "content": " resources: &Resources,\n\n world: &World,\n\n ) -> Self {\n\n let editor_selection_world = registry.create_editor_selection_world(resources, world);\n\n\n\n EditorSelectionResource {\n\n registry: Arc::new(registry),\n\n editor_selection_world,\n\n selected_entities: Default::default(),\n\n pending_selection_ops: Default::default(),\n\n }\n\n }\n\n\n\n pub fn create_editor_selection_world(\n\n resources: &Resources,\n\n world: &World,\n\n ) -> CollisionWorld<f32, Entity> {\n\n let registry = { resources.get::<Self>().unwrap().registry.clone() };\n\n\n\n registry.create_editor_selection_world(resources, world)\n", "file_path": "demo/src/resources/editor_resources/editor_selection.rs", "rank": 97, "score": 40936.85011697883 }, { "content": " }\n\n\n\n editor_state.opened_prefab.as_ref().unwrap().clone()\n\n };\n\n\n\n // Get the UUIDs of all selected entities\n\n let selected_uuids = editor_state.get_selected_uuids(&mut *selection_resource, world);\n\n\n\n // Delete the old stuff from the world\n\n for x in opened_prefab.prefab_to_world_mappings.values() {\n\n world.delete(*x);\n\n }\n\n\n\n {\n\n let component_registry = crate::create_component_registry();\n\n let component_registry_by_uuid = crate::create_component_registry_by_uuid();\n\n let copy_clone_impl = CopyCloneImpl::new(&component_registry);\n\n\n\n // Apply the diffs to the cooked data\n\n let mut universe = resources.get_mut::<UniverseResource>().unwrap();\n", "file_path": "demo/src/resources/editor_resources/editor_state.rs", "rank": 98, "score": 40934.353487019725 }, { "content": " }\n\n\n\n pub fn set_editor_selection_world(\n\n &mut self,\n\n editor_selection_world: CollisionWorld<f32, Entity>,\n\n ) {\n\n self.editor_selection_world = editor_selection_world;\n\n }\n\n\n\n pub fn editor_selection_world(&mut self) -> &CollisionWorld<f32, Entity> {\n\n &self.editor_selection_world\n\n }\n\n\n\n pub fn selected_entities(&self) -> &HashSet<Entity> {\n\n &self.selected_entities\n\n }\n\n\n\n pub fn selected_entity_aabbs(&mut self) -> HashMap<Entity, Option<AABB<f32>>> {\n\n Self::get_entity_aabbs(&self.selected_entities, &mut self.editor_selection_world)\n\n }\n", "file_path": "demo/src/resources/editor_resources/editor_selection.rs", "rank": 99, "score": 40934.0448070392 } ]
Rust
src/view/root.rs
PENGUINLIONG/Writus
63f47f0730380f83cdfda69899b0aa6427c0c15a
use std::sync::Arc; use serde_json::Value as JsonValue; use pulldown_cmark::Parser; use pulldown_cmark::{Options as ParserOptions, OPTION_ENABLE_TABLES}; use writium::prelude::*; use writium_cache::{Cache, DumbCacheSource}; use api::index::Index; use super::template::*; pub struct RootView { index_template: Template, digest_template: Template, post_cache: Arc<Cache<String>>, metadata_cache: Arc<Cache<JsonValue>>, index: Index, entries_per_request: usize, } impl RootView { pub fn new() -> RootView { RootView { index_template: Template::default(), digest_template: Template::default(), post_cache: Arc::new(Cache::new(0, DumbCacheSource::new())), metadata_cache: Arc::new(Cache::new(0, DumbCacheSource::new())), index: Index::default(), entries_per_request: 5, } } pub fn set_post_cache(&mut self, cache: Arc<Cache<String>>) { self.post_cache = cache; } pub fn set_metadata_cache(&mut self, cache: Arc<Cache<JsonValue>>) { self.metadata_cache = cache; } pub fn set_digest_template(&mut self, template: Template) { self.digest_template = template; } pub fn set_index_template(&mut self, template: Template) { self.index_template = template; } pub fn set_index(&mut self, index: Index) { self.index = index; } pub fn set_entries_per_request(&mut self, epr: usize) { self.entries_per_request = epr; } fn render_digest(&self, id: &str, post: &str, meta: &JsonValue) -> String { fn get_digest(full_text: &str) -> (String, String) { let mut lines = full_text.lines(); let title = lines .next() .unwrap_or_default() .chars() .skip_while(|ch| ch == &'#') .skip_while(|ch| ch == &' ') .collect(); let mut content = String::new(); lines .skip_while(|line| line.trim().len() == 0) .take_while(|line| line.trim().len() > 0) .for_each(|x| content += x); (title, content) } fn md_to_html(md: &str) -> String { let mut buf = String::with_capacity(md.len()); let mut opts = ParserOptions::empty(); opts.insert(OPTION_ENABLE_TABLES); let parser = Parser::new_ext(&md, opts); ::pulldown_cmark::html::push_html(&mut buf, parser); buf } let path = format!("/posts/{}", id); let (title, content) = get_digest(&post); self.digest_template.render(meta, &[ ("link", &path), ("id", &id), ("title", &title), ("content", &md_to_html(&content)), ]) } fn render_index(&self, req: &mut Request) -> ApiResult { use self::header::ContentType; #[derive(Deserialize)] struct Param { page: Option<usize>, } let param = req.to_param::<Param>()?; let guard = self.index.read().unwrap(); let max_page = { let len = guard.len(); if len % self.entries_per_request == 0 { len / self.entries_per_request } else { len / self.entries_per_request + 1 } }; let page = param.page.unwrap_or_default() .min(max_page) .max(1); let skip = (page - 1) * self.entries_per_request; let take = self.entries_per_request; let ids = guard.get_range(skip, take); let mut digests = String::new(); for id in ids { let post_cache = self.post_cache.get(&id)?; let post_guard = post_cache.read().unwrap(); let post: &str = post_guard.as_ref(); let metadata_cache = self.metadata_cache.get(&id)?; let metadata_guard = metadata_cache.read().unwrap(); let metadata: &JsonValue = &metadata_guard; digests.push_str(&self.render_digest(&id, post, metadata)); } let current = page.to_string(); let (prev, prev_link) = if page - 1 > 0 { ((page - 1).to_string(), format!("?page={}", page - 1)) } else { (String::new(), String::new()) }; let (next, next_link) = if page + 1 <= max_page { ((page + 1).to_string(), format!("?page={}", page + 1)) } else { (String::new(), String::new()) }; let res = Response::new() .with_header(ContentType( "text/html; charset=UTF-8".parse().unwrap()) ) .with_body(self.index_template.render(&JsonValue::Null, &[ ("digests", &digests), ("current", &current), ("previous_link", &prev_link), ("previous", &prev), ("next_link", &next_link), ("next", &next), ])); Ok(res) } } impl Api for RootView { fn name(&self) -> &[&str] { &[] } fn route(&self, req: &mut Request) -> ApiResult { use self::header::{Allow, Location}; match req.method() { Method::Get => { if req.path_segs().len() == 0 || req.path_segs()[0] == "" { self.render_index(req) } else { let mut loc = "/api/v1/resources".to_owned(); for seg in req.path_segs() { loc.push('/'); loc.push_str(seg); } let res = Response::new() .with_status(StatusCode::MovedPermanently) .with_header(Location::new(loc)); Ok(res) } }, Method::Options => { let res = Response::new() .with_header(Allow(vec![Method::Options, Method::Get])); Ok(res) }, _ => Err(Error::method_not_allowed()) } } }
use std::sync::Arc; use serde_json::Value as JsonValue; use pulldown_cmark::Parser; use pulldown_cmark::{Options as ParserOptions, OPTION_ENABLE_TABLES}; use writium::prelude::*; use writium_cache::{Cache, DumbCacheSource}; use api::index::Index; use super::template::*; pub struct RootView { index_template: Template, digest_template: Template, post_cache: Arc<Cache<String>>, metadata_cache: Arc<Cache<JsonValue>>, index: Index, entries_per_request: usize, } impl RootView { pub fn new() -> RootView { RootView { index_template: Template::default(), digest_template: Template::default(), post_cache: Arc::new(Cache::new(0, DumbCacheSource::new())), metadata_cache: Arc::new(Cache::new(0, DumbCacheSource::new())), index: Index::default(), entries_per_request: 5, } } pub fn set_post_cache(&mut self, cache: Arc<Cache<String>>) { self.post_cache = cache; } pub fn set_metadata_cache(&mut self, cache: Arc<Cache<JsonValue>>) { self.metadata_cache = cache; } pub fn set_digest_template(&mut self, template: Template) { self.digest_template = template; } pub fn set_index_template(&mut self, template: Template) { self.index_template = template; } pub fn set_index(&mut self, index: Index) { self.index = index; } pub fn set_entries_per_request(&mut self, epr: usize) { self.entries_per_request = epr; } fn render_digest(&self, id: &str, post: &str, meta: &JsonValue) -> String { fn get_digest(full_text: &str) -> (String, String) { let mut lines = full_text.lines(); let title = lines .next() .unwrap_or_default()
ew(); lines .skip_while(|line| line.trim().len() == 0) .take_while(|line| line.trim().len() > 0) .for_each(|x| content += x); (title, content) } fn md_to_html(md: &str) -> String { let mut buf = String::with_capacity(md.len()); let mut opts = ParserOptions::empty(); opts.insert(OPTION_ENABLE_TABLES); let parser = Parser::new_ext(&md, opts); ::pulldown_cmark::html::push_html(&mut buf, parser); buf } let path = format!("/posts/{}", id); let (title, content) = get_digest(&post); self.digest_template.render(meta, &[ ("link", &path), ("id", &id), ("title", &title), ("content", &md_to_html(&content)), ]) } fn render_index(&self, req: &mut Request) -> ApiResult { use self::header::ContentType; #[derive(Deserialize)] struct Param { page: Option<usize>, } let param = req.to_param::<Param>()?; let guard = self.index.read().unwrap(); let max_page = { let len = guard.len(); if len % self.entries_per_request == 0 { len / self.entries_per_request } else { len / self.entries_per_request + 1 } }; let page = param.page.unwrap_or_default() .min(max_page) .max(1); let skip = (page - 1) * self.entries_per_request; let take = self.entries_per_request; let ids = guard.get_range(skip, take); let mut digests = String::new(); for id in ids { let post_cache = self.post_cache.get(&id)?; let post_guard = post_cache.read().unwrap(); let post: &str = post_guard.as_ref(); let metadata_cache = self.metadata_cache.get(&id)?; let metadata_guard = metadata_cache.read().unwrap(); let metadata: &JsonValue = &metadata_guard; digests.push_str(&self.render_digest(&id, post, metadata)); } let current = page.to_string(); let (prev, prev_link) = if page - 1 > 0 { ((page - 1).to_string(), format!("?page={}", page - 1)) } else { (String::new(), String::new()) }; let (next, next_link) = if page + 1 <= max_page { ((page + 1).to_string(), format!("?page={}", page + 1)) } else { (String::new(), String::new()) }; let res = Response::new() .with_header(ContentType( "text/html; charset=UTF-8".parse().unwrap()) ) .with_body(self.index_template.render(&JsonValue::Null, &[ ("digests", &digests), ("current", &current), ("previous_link", &prev_link), ("previous", &prev), ("next_link", &next_link), ("next", &next), ])); Ok(res) } } impl Api for RootView { fn name(&self) -> &[&str] { &[] } fn route(&self, req: &mut Request) -> ApiResult { use self::header::{Allow, Location}; match req.method() { Method::Get => { if req.path_segs().len() == 0 || req.path_segs()[0] == "" { self.render_index(req) } else { let mut loc = "/api/v1/resources".to_owned(); for seg in req.path_segs() { loc.push('/'); loc.push_str(seg); } let res = Response::new() .with_status(StatusCode::MovedPermanently) .with_header(Location::new(loc)); Ok(res) } }, Method::Options => { let res = Response::new() .with_header(Allow(vec![Method::Options, Method::Get])); Ok(res) }, _ => Err(Error::method_not_allowed()) } } }
.chars() .skip_while(|ch| ch == &'#') .skip_while(|ch| ch == &' ') .collect(); let mut content = String::n
function_block-random_span
[ { "content": "fn make_index(dir: &str, key: &str, index: &mut IndexCollection) {\n\n info!(\"Indexing files with key '{}'.\", key);\n\n for entry in WalkDir::new(&dir)\n\n .into_iter()\n\n .filter_map(|x| x.ok()) {\n\n // Seek for `content.md`.\n\n if !entry.file_type().is_file() ||\n\n entry.file_name() != \"content.md\" {\n\n continue\n\n }\n\n if let Some(parent) = entry.path().parent() {\n\n info!(\"Indexing article '{}'...\", &parent.to_string_lossy());\n\n if let Some(val) = get_index_val_for(parent, key) {\n\n let path = parent.strip_prefix(&dir).unwrap()\n\n .to_string_lossy()\n\n .to_string();\n\n index.insert(&path, &val);\n\n } else {\n\n warn!(\"Article is not indexed.\");\n\n }\n\n } else {\n\n error!(\"Unexpected error accessing parent of: {}\",\n\n &entry.path().to_string_lossy());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/api/index/mod.rs", "rank": 0, "score": 155436.12689427496 }, { "content": "fn concat_subfragments(base: &Path, mut template: String)\n\n -> Result<String, String> {\n\n let mut rv = String::with_capacity(template.len());\n\n loop {\n\n if let Some(beg) = template.find(\"<?\") {\n\n rv.extend(template.drain(..beg))\n\n } else {\n\n // No more processing instructions, get out of the loop.\n\n return Ok(rv + &template)\n\n }\n\n let end = template.find(\"?>\")\n\n .ok_or(\"Unclosed processing instruction.\".to_owned())?;\n\n let mut extend = false;\n\n if end < 2 {\n\n return Err(\"Tag beginning and ending overlaps.\".to_owned())\n\n } else {\n\n let parts: Vec<&str> = template[2..end]\n\n .splitn(2, ' ')\n\n .collect();\n\n if parts.len() == 2 {\n", "file_path": "src/view/template/mod.rs", "rank": 1, "score": 132415.84377386904 }, { "content": "#[inline]\n\nfn clean_id<'a>(id: &'a str) -> &'a str {\n\n let pos_non_slash = id.bytes()\n\n .position(|x| x != b'/')\n\n .unwrap_or(0);\n\n &id[pos_non_slash..]\n\n}\n", "file_path": "src/model/mod.rs", "rank": 2, "score": 131167.70033377555 }, { "content": "fn compile(mut concated: String) -> Vec<Box<TemplateSection>> {\n\n let mut rv: Vec<Box<TemplateSection>> = Vec::new();\n\n loop {\n\n if let Some(beg) = concated.find(\"<?\") {\n\n let string = concated.drain(..beg).collect();\n\n rv.push(Box::new(StringSection::new(string)));\n\n } else {\n\n // No more processing instructions, get out of the loop.\n\n rv.push(Box::new(StringSection::new(concated)));\n\n return rv\n\n }\n\n // There should be no invalid syntax present (after\n\n // concat_subfragments).\n\n let end = concated.find(\"?>\").unwrap();\n\n {\n\n let parts: Vec<&str> = concated[2..end]\n\n .splitn(2, ' ')\n\n .collect();\n\n // `parts[0]` can be nothing other than 'var'.\n\n rv.push(Box::new(MetadataSection::new(parts[1].to_owned())));\n", "file_path": "src/view/template/mod.rs", "rank": 3, "score": 126835.32209461492 }, { "content": "fn mk_idx(key: &str, mut col: Box<IndexCollection>, dir: Option<&str>)\n\n -> Index {\n\n Index {\n\n index: {\n\n if let Some(dir) = dir {\n\n make_index(dir, key, &mut *col);\n\n }\n\n Arc::new(RwLock::new(col))\n\n },\n\n key: key.to_owned(),\n\n }\n\n}\n\n#[derive(Clone)]\n\npub struct Index {\n\n index: Arc<RwLock<Box<IndexCollection>>>,\n\n key: String,\n\n}\n\nimpl Index {\n\n /// Make a new `Index` with given index collection and index key.\n\n pub fn with_index_collection<T>(key: &str, col: T, dir: Option<&str>)\n", "file_path": "src/api/index/mod.rs", "rank": 4, "score": 124849.38043235047 }, { "content": "pub fn check_type(res: &Response, ty: &str, sub: &str) {\n\n let ctype = res.header::<ContentType>();\n\n let ctype = ctype.unwrap();\n\n assert_eq!(ctype.0.type_(), ty);\n\n assert_eq!(ctype.0.subtype(), sub);\n\n}\n", "file_path": "src/api/test_common.rs", "rank": 5, "score": 107745.62489648655 }, { "content": "pub fn check_content(res: &Response, content: &str) {\n\n let res_content = res.to_str();\n\n assert_eq!(res_content.unwrap(), content);\n\n}\n", "file_path": "src/api/test_common.rs", "rank": 6, "score": 101251.6974180497 }, { "content": "fn load_fragement(base: &Path, file_path: &Path) -> Result<String, String> {\n\n let file = File::open(path_buf![&base, &file_path])\n\n .map_err(|err| format!(\"Unable to open template file: {}\", err))?;\n\n let mut reader = BufReader::new(file);\n\n let mut buf = String::new();\n\n reader.read_to_string(&mut buf)\n\n .map_err(|err| format!(\"Unable to read from template file: {}\", err))?;\n\n concat_subfragments(base, buf)\n\n}\n", "file_path": "src/view/template/mod.rs", "rank": 7, "score": 93299.41491395675 }, { "content": "pub fn test_err(api: &Api, mut req: Request) -> Error {\n\n let result = api.route(&mut req);\n\n result.unwrap_err()\n\n}\n", "file_path": "src/api/test_common.rs", "rank": 8, "score": 92519.68519697724 }, { "content": "pub fn test_ok(api: &Api, mut req: Request) -> Response {\n\n let result = api.route(&mut req);\n\n result.unwrap()\n\n}\n", "file_path": "src/api/test_common.rs", "rank": 9, "score": 92519.68519697724 }, { "content": "fn make_article(author_content: &[(&str, &str)]) -> BTreeMap<usize, Comment> {\n\n let mut article = BTreeMap::new();\n\n let mut index = 0;\n\n for &(ref author, ref content) in author_content {\n\n article.insert(index, make_comment(author, content));\n\n index += 2;\n\n }\n\n article\n\n}\n\npub struct MockSource(Mutex<HashMap<String, BTreeMap<usize, Comment>>>);\n\nimpl MockSource {\n\n /// Make a source with article `foo` loaded, the article contains 1 comment\n\n /// with metadata `author` equals `PENGUINLIONG` and content `Wow!`.\n\n pub fn new() -> MockSource {\n\n let mut map = HashMap::new();\n\n map.insert(\"foo\".to_owned(), make_article(&[(\"PENGUINLIONG\", \"Wow!\")]));\n\n\n\n MockSource(Mutex::new(map))\n\n }\n\n pub fn new_privilege() -> MockSource {\n", "file_path": "src/api/comment/tests/source.rs", "rank": 10, "score": 90569.50307795424 }, { "content": "fn indexed_api() -> PostApi {\n\n use api::Index;\n\n use serde_json::Value as JsonValue;\n\n let index = Index::new(\"key\", \"integer\", None);\n\n let mut api = api();\n\n api.set_entries_per_request(2);\n\n index.write().unwrap().insert(\"/foo\".into(), &JsonValue::Number(0.into()));\n\n index.write().unwrap().insert(\"/bar\".into(), &JsonValue::Number(1.into()));\n\n index.write().unwrap().insert(\"/baz\".into(), &JsonValue::Number(2.into()));\n\n api.set_index(index);\n\n api\n\n}\n\n\n", "file_path": "src/api/post/tests/mod.rs", "rank": 11, "score": 86642.39587689904 }, { "content": "fn get_index_val_for(parent: &Path, key: &str) -> Option<JsonValue> {\n\n use std::fs::File;\n\n use std::io::Read;\n\n // Find `metadata.json`.\n\n let mut text = Vec::new();\n\n let mut file = File::open(path_buf![parent, \"metadata.json\"])\n\n .map_err(|err| error!(\"Unable to open metadata from '{}': {}\",\n\n parent.to_string_lossy(), err))\n\n .ok()?;\n\n file.read_to_end(&mut text)\n\n .map_err(|err| error!(\"Unable to read metadata from '{}': {}\",\n\n parent.to_string_lossy(), err))\n\n .ok()?;\n\n let json = ::serde_json::from_slice::<JsonValue>(&text)\n\n .map_err(|err| warn!(\"Unable to serialize content of '{}': {}\",\n\n parent.to_string_lossy(), err))\n\n .ok()?;\n\n // If field `noIndex` presents and is set true, ignore the article.\n\n if let Some(&JsonValue::Bool(true)) = json.get(\"noIndex\") {\n\n return None\n\n }\n\n json.get(key)\n\n .map(|x| x.to_owned())\n\n}\n", "file_path": "src/api/index/mod.rs", "rank": 12, "score": 86565.3114520893 }, { "content": "#[test]\n\nfn test_get_index() {\n\n let api = indexed_api();\n\n let req = Request::new(Method::Get);\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, r#\"[\"/foo\",\"/bar\"]\"#);\n\n}\n", "file_path": "src/api/post/tests/mod.rs", "rank": 13, "score": 81920.3080787735 }, { "content": "#[test]\n\nfn test_get_index_from() {\n\n let api = indexed_api();\n\n let req = Request::new(Method::Get)\n\n .with_query(\"from=1\");\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, r#\"[\"/bar\",\"/baz\"]\"#);\n\n}\n\n\n", "file_path": "src/api/post/tests/mod.rs", "rank": 14, "score": 81920.3080787735 }, { "content": "struct IndexItem<T: IndexKeyType> {\n\n pub key: T,\n\n pub path: String,\n\n}\n\n\n", "file_path": "src/api/index/index_map.rs", "rank": 15, "score": 79339.32910116228 }, { "content": "fn make_comment(author: &str, content: &str) -> Comment {\n\n Comment {\n\n metadata: {\n\n let mut meta = HashMap::new();\n\n meta.insert(\"author\".to_owned(), author.to_owned());\n\n meta\n\n },\n\n content: content.to_owned(),\n\n }\n\n}\n", "file_path": "src/api/comment/tests/source.rs", "rank": 16, "score": 78747.72578089441 }, { "content": "pub trait IndexCollection: Send + Sync {\n\n fn insert(&mut self, path: &str, key: &JsonValue);\n\n fn get_range(&self, skip: usize, take: usize) -> Vec<String>;\n\n fn remove(&mut self, path: &str);\n\n fn len(&self) -> usize;\n\n}\n\n\n\npub struct DefaultIndexCollection<T: IndexKeyType> {\n\n index: Vec<IndexItem<T>>,\n\n asc: bool,\n\n}\n\nimpl<T: IndexKeyType> DefaultIndexCollection<T> {\n\n pub fn new(asc: bool) -> DefaultIndexCollection<T> {\n\n DefaultIndexCollection {\n\n index: Vec::new(),\n\n asc: asc,\n\n }\n\n }\n\n fn search_by_key(&self, key: &T) -> usize {\n\n match if self.asc {\n", "file_path": "src/api/index/index_map.rs", "rank": 17, "score": 75698.97548088682 }, { "content": "pub trait TemplateSection: Send + Sync {\n\n fn get_section(&self, meta: &JsonValue, extra: &[(&str, &str)], out: &mut String);\n\n}\n\npub struct StringSection {\n\n string: String\n\n}\n\nimpl StringSection {\n\n pub fn new(string: String) -> StringSection {\n\n StringSection {\n\n string: string,\n\n }\n\n }\n\n}\n\nimpl TemplateSection for StringSection {\n\n fn get_section(&self, _meta: &JsonValue, _extra: &[(&str, &str)], out: &mut String) {\n\n out.push_str(&self.string)\n\n }\n\n}\n\npub struct MetadataSection {\n\n key: String,\n", "file_path": "src/view/template/section.rs", "rank": 18, "score": 73246.24147956955 }, { "content": "fn api_with_many_articles() -> (MetadataApi, Index) {\n\n let mut api = MetadataApi::new();\n\n api.set_cache(Arc::new(Cache::new(2, MockSource::new())));\n\n api.set_auth(Arc::new(SimpleAuthority::new(\"PASSWORD\")));\n\n let index = Index::new(\"key\", \"string\", None);\n\n {\n\n let mut idx = index.write().unwrap();\n\n idx.insert(\"foo\", &JsonValue::String(\"111\".to_owned()));\n\n idx.insert(\"bar\", &JsonValue::String(\"222\".to_owned()));\n\n idx.insert(\"baz\", &JsonValue::String(\"333\".to_owned()));\n\n }\n\n api.set_index(index.clone());\n\n (api, index)\n\n}\n\n\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 19, "score": 71189.3765000628 }, { "content": "fn gen_unauthorized(msg: &'static str) -> Error {\n\n Error::unauthorized(msg)\n\n .with_header(\n\n WwwAuthenticate::new()\n\n .with_challenge(\n\n Challenge::new(\"Bearer\")\n\n .with_param(\"realm\", \"Unsafe HTTP method.\")\n\n )\n\n )\n\n}\n\nimpl Authority for SimpleAuthority {\n\n type Privilege = ();\n\n fn authorize(&self, _pr: (), req: &Request) -> Result<()> {\n\n if self.cred.is_none() {\n\n return Err(gen_unauthorized(\"No credential to be matched. Maybe \\\n\n the administrator intended to do so. For safety reasons, any \\\n\n authentication request is rejected.\"))\n\n }\n\n if let Some(cr) = req.header::<Authorization<Bearer>>() {\n\n if self.cred.as_ref().unwrap() == &*cr.0.token {\n", "file_path": "src/auth.rs", "rank": 20, "score": 71122.37813747312 }, { "content": "pub trait IndexKeyType: Sized + Send + Sync + Ord {\n\n fn try_from_json(json: &JsonValue) -> Option<Self>;\n\n}\n\nimpl IndexKeyType for i64 {\n\n fn try_from_json(json: &JsonValue) -> Option<i64> {\n\n if let Some(ref int) = json.as_i64() {\n\n return Some(int.to_owned())\n\n }\n\n None\n\n }\n\n}\n\nimpl IndexKeyType for String {\n\n fn try_from_json(json: &JsonValue) -> Option<String> {\n\n if let Some(ref string) = json.as_str() {\n\n return Some(string.to_string())\n\n }\n\n None\n\n }\n\n}\n\nimpl IndexKeyType for DateTime {\n\n fn try_from_json(json: &JsonValue) -> Option<DateTime> {\n\n if let Some(ref rfc3339) = json.as_str() {\n\n if let Ok(dt) = DateTime::parse_from_rfc3339(rfc3339) {\n\n return Some(dt)\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/api/index/index_map.rs", "rank": 21, "score": 68830.56349219494 }, { "content": "fn load_private_key(path: &str) -> rustls::PrivateKey {\n\n use rustls::internal::pemfile::rsa_private_keys;\n\n let keyfile = std::fs::File::open(path)\n\n .expect(\"Unable to open private key file\");\n\n let mut reader = std::io::BufReader::new(keyfile);\n\n let keys = rsa_private_keys(&mut reader).unwrap();\n\n assert!(keys.len() == 1);\n\n keys[0].clone()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 22, "score": 67702.5384807544 }, { "content": "fn load_certs(path: &str) -> Vec<rustls::Certificate> {\n\n let cert_file = std::fs::File::open(path)\n\n .expect(\"Unable to open certificate file\");\n\n let mut reader = std::io::BufReader::new(cert_file);\n\n rustls::internal::pemfile::certs(&mut reader).unwrap()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 23, "score": 67522.43819929144 }, { "content": "fn api() -> PostApi {\n\n use writium_cache::Cache;\n\n let mut post = PostApi::new();\n\n post.set_auth(Arc::new(SimpleAuthority::new(\"PASSWORD\")));\n\n post.set_cache(Arc::new(Cache::new(3, source::MockSource::new())));\n\n post\n\n}\n", "file_path": "src/api/post/tests/mod.rs", "rank": 24, "score": 65603.26040641664 }, { "content": "#[test]\n\nfn test_put() {\n\n let api = api();\n\n let req = Request::new(Method::Put)\n\n .with_path_segs(&[\"bar\"])\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }))\n\n .with_header(ContentType(\"text/markdown\".parse().unwrap()))\n\n .with_body(CONTENT_DIF);\n\n let _ = test_ok(&api, req);\n\n // Check content is correct.\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"bar\"])\n\n .with_query(\"raw=true\");\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"text\", \"markdown\");\n\n check_content(&res, CONTENT_DIF);\n\n}\n\n\n", "file_path": "src/api/post/tests/mod.rs", "rank": 25, "score": 60174.41202184094 }, { "content": "#[test]\n\nfn test_post() {\n\n let api = api();\n\n let req = Request::new(Method::Post)\n\n .with_path_segs(&[\"foo\"])\n\n .with_json(&Comment { metadata: HashMap::new(), content: \"Panda!\".to_owned() })\n\n .unwrap();\n\n let _ = test_ok(&api, req);\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, POST_JSON);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 26, "score": 60174.41202184094 }, { "content": "#[test]\n\nfn test_delete() {\n\n let api = api();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let _ = test_ok(&api, req);\n\n // Check article is actually removed.\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n", "file_path": "src/api/post/tests/mod.rs", "rank": 27, "score": 60174.41202184094 }, { "content": "#[test]\n\nfn test_index_order() {\n\n fn gen_999() -> JsonValue {\n\n let mut map = ::serde_json::value::Map::new();\n\n map.insert(\"key\".to_string(), JsonValue::String(\"999\".to_owned()));\n\n JsonValue::Object(map)\n\n }\n\n let (api, index) = api_with_many_articles();\n\n let req = Request::new(Method::Put)\n\n .with_path_segs(&[\"foo\"])\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }))\n\n .with_json(&gen_999())\n\n .unwrap();\n\n let _ = test_ok(&api, req);\n\n assert_eq!(index.read().unwrap().get_range(0, 3),\n\n vec![\"bar\", \"baz\", \"foo\"]);\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 28, "score": 58359.44015629585 }, { "content": "#[test]\n\nfn test_index_removal() {\n\n let (api, index) = api_with_many_articles();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"keys[]=key\")\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let _ = test_ok(&api, req);\n\n assert_eq!(index.read().unwrap().get_range(0, 2), vec![\"bar\", \"baz\"]);\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 29, "score": 58359.44015629585 }, { "content": "#[test]\n\nfn fail_put_auth() {\n\n let api = api();\n\n let req = Request::new(Method::Put)\n\n .with_path_segs(&[\"bar\"])\n\n .with_header(ContentType(\"text/markdown\".parse().unwrap()));\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::Unauthorized);\n\n}\n", "file_path": "src/api/post/tests/mod.rs", "rank": 30, "score": 58273.15778824955 }, { "content": "#[test]\n\nfn fail_put_type() {\n\n let api = api();\n\n let req = Request::new(Method::Put)\n\n .with_path_segs(&[\"bar\"])\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::BadRequest);\n\n}\n", "file_path": "src/api/post/tests/mod.rs", "rank": 31, "score": 58273.15778824955 }, { "content": "#[test]\n\nfn test_get_one() {\n\n let api = api();\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"text\", \"markdown\");\n\n check_content(&res, CONTENT_MARKDOWN);\n\n}\n", "file_path": "src/api/post/tests/mod.rs", "rank": 32, "score": 58273.15778824955 }, { "content": "#[test]\n\nfn fail_get_one() {\n\n let api = api();\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"bar\"]);\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n", "file_path": "src/api/post/tests/mod.rs", "rank": 33, "score": 58273.15778824955 }, { "content": "#[test]\n\nfn fail_delete_auth() {\n\n let api = api();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"]);\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::Unauthorized);\n\n}\n", "file_path": "src/api/post/tests/mod.rs", "rank": 34, "score": 58273.15778824955 }, { "content": "/// `LocalFile` is used to access a file on local storage. Some of the files\n\n/// have fixed names due to Writus design.\n\nstruct FileAccessor {\n\n dir: PathBuf,\n\n fixed_file_name: Option<&'static str>,\n\n}\n\nimpl FileAccessor {\n\n pub fn new(dir: &str) -> FileAccessor {\n\n FileAccessor {\n\n dir: Path::new(dir).to_owned(),\n\n fixed_file_name: None,\n\n }\n\n }\n\n pub fn with_fixed_file_name(dir: &str, file: &'static str) -> FileAccessor {\n\n FileAccessor {\n\n dir: Path::new(dir).to_owned(),\n\n fixed_file_name: Some(file),\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn make_path(&self, id: &str) -> PathBuf {\n", "file_path": "src/model/mod.rs", "rank": 35, "score": 52259.29079022781 }, { "content": "#[derive(Deserialize)]\n\nstruct RawExtra {\n\n pub published_dir: Option<String>,\n\n pub auth_token: Option<String>,\n\n pub index_key: Option<String>,\n\n pub index_key_type: Option<String>,\n\n pub entries_per_request: Option<u64>,\n\n pub allowed_exts: Option<HashMap<String, String>>,\n\n pub template_dir: Option<String>,\n\n}\n\npub struct Extra {\n\n pub published_dir: String,\n\n pub auth: Arc<SimpleAuthority>,\n\n pub index_key: String,\n\n pub index_key_type: String,\n\n pub entries_per_request: u64,\n\n pub allowed_exts: HashMap<String, Mime>,\n\n pub template_dir: String,\n\n}\n\nimpl From<Extra> for Namespace {\n\n /// Construct a Namespace containing all the v1 api and views.\n", "file_path": "src/config/v1.rs", "rank": 36, "score": 52256.48232783878 }, { "content": "fn main() {\n\n init_logging();\n\n let cfg = ::config::WritusConfig::load();\n\n\n\n let extra = cfg.extra.unwrap();\n\n let mut writium = Writium::new();\n\n // Load static pages.\n\n if let Some(ref static_pages) = cfg.static_pages.as_ref() {\n\n for (ref name, ref path) in static_pages.iter() {\n\n info!(\"Loading static page: {}\", path);\n\n match ::static_page::StaticPage::from_file(name, path) {\n\n Ok(sp) => writium.bind(sp),\n\n Err(err) => warn!(\"Error occured loading static page: {}\", err),\n\n }\n\n }\n\n }\n\n // Load all Writium v1 APIs.\n\n info!(\"Loading Writus APIs.\");\n\n let extra = ::config::v1::Extra::from(extra);\n\n let v1: Namespace = extra.into();\n", "file_path": "src/main.rs", "rank": 37, "score": 40273.08710979852 }, { "content": "fn init_logging() {\n\n use env_logger::LogBuilder;\n\n use log::{LogLevelFilter, LogRecord};\n\n let format = |record: &LogRecord| {\n\n format!(\"{} {:?} [{}] {}\", chrono::Utc::now().to_rfc3339(),\n\n std::thread::current().id(), record.level(), record.args())\n\n };\n\n let mut builder = LogBuilder::new();\n\n builder.format(format).filter(None, LogLevelFilter::Info);\n\n if ::std::env::var(\"RUST_LOG\").is_ok() {\n\n builder.parse(&::std::env::var(\"RUST_LOG\").unwrap());\n\n }\n\n if let Err(_) = builder.init() {\n\n panic!(\"Initialization failed!\");\n\n };\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 38, "score": 38951.28847008624 }, { "content": "#[test]\n\nfn fail_delete_all() {\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"]);\n\n let err = test_err(&api(), req);\n\n assert_eq!(err.status(), StatusCode::Unauthorized);\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 39, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn test_get_all() {\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let res = test_ok(&api(), req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, FOO_META_ALL);\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 40, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn test_get() {\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let comments = test_ok(&api(), req);\n\n check_type(&comments, \"application\", \"json\");\n\n check_content(&comments, DEFAULT_JSON);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 41, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn fail_patch() {\n\n let req = Request::new(Method::Patch)\n\n .with_path_segs(&[\"foo\"])\n\n .with_json(&gen_null_neko())\n\n .unwrap();\n\n let err = test_err(&api(), req);\n\n assert_eq!(err.status(), StatusCode::Unauthorized);\n\n}\n\n\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 42, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn test_delete_all() {\n\n let req = Request::new(Method::Delete)\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }))\n\n .with_path_segs(&[\"foo\"]);\n\n let _ = test_ok(&api(), req);\n\n let req = Request::new(Method::Get);\n\n let err = test_err(&api(), req);\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 43, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn test_get_some() {\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"keys[]=neko\");\n\n let res = test_ok(&api(), req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, FOO_META);\n\n}\n\n\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 44, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn test_delete_some() {\n\n let api = api();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }))\n\n .with_query(\"keys[]=neko\");\n\n let _ = test_ok(&api, req);\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, DELETED_FOO_META);\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 45, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn test_put() {\n\n let api = api();\n\n let req = Request::new(Method::Put)\n\n .with_path_segs(&[\"bar\"])\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }))\n\n .with_json(&gen_null_neko())\n\n .unwrap();\n\n let _ = test_ok(&api, req);\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"bar\"])\n\n .with_query(\"keys[]=neko\");\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, NULL_NEKO);\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 46, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn test_patch() {\n\n let api = api();\n\n let req = Request::new(Method::Patch)\n\n .with_path_segs(&[\"foo\"])\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }))\n\n .with_json(&gen_null_neko())\n\n .unwrap();\n\n let _ = test_ok(&api, req);\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, NULL_NEKO_ALL);\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 47, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn test_delete_all() {\n\n let api = api_with_many_comments();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let _ = test_ok(&api, req);\n\n // The comment should be removed.\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n\n\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 48, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn fail_put() {\n\n let req = Request::new(Method::Put)\n\n .with_path_segs(&[\"bar\"])\n\n .with_json(&gen_null_neko())\n\n .unwrap();\n\n let err = test_err(&api(), req);\n\n assert_eq!(err.status(), StatusCode::Unauthorized);\n\n}\n\n\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 49, "score": 35640.77651133879 }, { "content": "#[test]\n\nfn fail_delete_some() {\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"keys[]=neko\");\n\n let err = test_err(&api(), req);\n\n assert_eq!(err.status(), StatusCode::Unauthorized);\n\n}\n\n\n\n\n\n\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 50, "score": 35640.77651133879 }, { "content": " .position(|item| (&*item.path).eq(path)) {\n\n self.index.remove(pos);\n\n }\n\n }\n\n fn len(&self) -> usize {\n\n self.index.len()\n\n }\n\n}\n\n\n\npub struct DumbIndexCollection();\n\nimpl DumbIndexCollection {\n\n pub fn new() -> DumbIndexCollection{\n\n DumbIndexCollection()\n\n }\n\n}\n\nimpl IndexCollection for DumbIndexCollection {\n\n fn insert(&mut self, _path: &str, _key: &JsonValue) {}\n\n fn get_range(&self, _skip: usize, _take: usize) -> Vec<String> {\n\n Vec::new()\n\n }\n", "file_path": "src/api/index/index_map.rs", "rank": 51, "score": 35175.6183681747 }, { "content": " fn remove(&mut self, _path: &str) {}\n\n fn len(&self) -> usize { 0 }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{DateTime, DefaultIndexCollection, IndexCollection};\n\n #[test]\n\n fn test_default_index_col_num() {\n\n let mut col = DefaultIndexCollection::<i64>::new(true);\n\n col.insert(\"4\", &json!(4));\n\n col.insert(\"3\", &json!(3));\n\n col.insert(\"2\", &json!(2));\n\n col.insert(\"1\", &json!(1));\n\n col.insert(\"0\", &json!(0));\n\n assert_eq!(col.get_range(0, 5), &[\"0\", \"1\", \"2\", \"3\", \"4\"]);\n\n }\n\n #[test]\n\n fn test_default_index_col_str() {\n\n let mut col = DefaultIndexCollection::<String>::new(true);\n", "file_path": "src/api/index/index_map.rs", "rank": 52, "score": 35171.03308446424 }, { "content": " path: path.to_owned(),\n\n });\n\n info!(\"Indexed article: {}\", path);\n\n },\n\n }\n\n }\n\n fn get_range(&self, skip: usize, take: usize) -> Vec<String> {\n\n self.index.iter()\n\n .skip(skip)\n\n .take(take)\n\n .map(|item| if cfg!(windows) {\n\n item.path.replace('\\\\', \"/\")\n\n } else {\n\n item.path.to_owned()\n\n }\n\n )\n\n .collect::<Vec<_>>()\n\n }\n\n fn remove(&mut self, path: &str) {\n\n if let Some(pos) = self.index.iter()\n", "file_path": "src/api/index/index_map.rs", "rank": 53, "score": 35167.891795283154 }, { "content": " self.index.binary_search_by(|item| item.key.cmp(key))\n\n } else {\n\n self.index.binary_search_by(|item| item.key.cmp(key).reverse())\n\n } {\n\n Ok(pos) => pos,\n\n Err(pos) => pos,\n\n }\n\n }\n\n}\n\nimpl<T: IndexKeyType> IndexCollection for DefaultIndexCollection<T> {\n\n fn insert(&mut self, path: &str, key: &JsonValue) {\n\n let key = if let Some(key) = T::try_from_json(key) {\n\n key\n\n } else {\n\n if let Ok(pos) = self.index.binary_search_by(|item| {\n\n let str_ref: &str = item.path.as_ref();\n\n str_ref.cmp(path)\n\n }) {\n\n self.index.remove(pos);\n\n warn!(\"`{}` is updated with a new key which cannot be parsed \\\n", "file_path": "src/api/index/index_map.rs", "rank": 54, "score": 35166.71612883467 }, { "content": " }\n\n #[test]\n\n fn test_default_index_col_num_reverse() {\n\n let mut col = DefaultIndexCollection::<i64>::new(false);\n\n col.insert(\"0\", &json!(0));\n\n col.insert(\"1\", &json!(1));\n\n col.insert(\"2\", &json!(2));\n\n col.insert(\"3\", &json!(3));\n\n col.insert(\"4\", &json!(4));\n\n assert_eq!(col.get_range(0, 5), &[\"4\", \"3\", \"2\", \"1\", \"0\"]);\n\n }\n\n #[test]\n\n fn test_default_index_col_str_reverse() {\n\n let mut col = DefaultIndexCollection::<String>::new(false);\n\n col.insert(\"0\", &json!(\"0\"));\n\n col.insert(\"1\", &json!(\"1\"));\n\n col.insert(\"2\", &json!(\"2\"));\n\n col.insert(\"3\", &json!(\"3\"));\n\n col.insert(\"4\", &json!(\"4\"));\n\n assert_eq!(col.get_range(0, 5), &[\"4\", \"3\", \"2\", \"1\", \"0\"]);\n", "file_path": "src/api/index/index_map.rs", "rank": 55, "score": 35166.10729053804 }, { "content": " into `DateTime`. So it's removed from the index.\", path)\n\n } else {\n\n warn!(\"`{}` has a key which cannot be parsed into `DateTime`. \\\n\n So it's not indexed.\", path);\n\n }\n\n return\n\n };\n\n match self.index.iter().position(|item| (&*item.path).eq(path)) {\n\n Some(pos) => {\n\n let mut item = self.index.remove(pos);\n\n let new_pos = self.search_by_key(&key);\n\n item.key = key;\n\n self.index.insert(new_pos, item);\n\n info!(\"Updated index key for article: {}\", path);\n\n },\n\n None => {\n\n let new_pos = self.search_by_key(&key);\n\n self.index.insert(new_pos,\n\n IndexItem {\n\n key: key,\n", "file_path": "src/api/index/index_map.rs", "rank": 56, "score": 35164.154455163756 }, { "content": " col.insert(\"4\", &json!(\"4\"));\n\n col.insert(\"3\", &json!(\"3\"));\n\n col.insert(\"2\", &json!(\"2\"));\n\n col.insert(\"1\", &json!(\"1\"));\n\n col.insert(\"0\", &json!(\"0\"));\n\n assert_eq!(col.get_range(0, 5), &[\"0\", \"1\", \"2\", \"3\", \"4\"]);\n\n }\n\n #[test]\n\n fn test_default_index_col_dt() {\n\n let mut col = DefaultIndexCollection::<DateTime>::new(true);\n\n col.insert(\"4\", &json!(\"2018-01-01T00:00:04+00:00\"));\n\n col.insert(\"3\", &json!(\"2018-01-01T00:00:03+00:00\"));\n\n col.insert(\"2\", &json!(\"2018-01-01T00:00:02+00:00\"));\n\n col.insert(\"1\", &json!(\"2018-01-01T00:00:01+00:00\"));\n\n col.insert(\"0\", &json!(\"2018-01-01T00:00:00+00:00\"));\n\n assert_eq!(col.get_range(0, 5), &[\"0\", \"1\", \"2\", \"3\", \"4\"]);\n\n }\n\n #[test]\n\n fn test_default_index_col_range() {\n\n let mut col = DefaultIndexCollection::<i64>::new(true);\n", "file_path": "src/api/index/index_map.rs", "rank": 57, "score": 35162.990206780596 }, { "content": " }\n\n #[test]\n\n fn test_default_index_col_dt_reverse() {\n\n let mut col = DefaultIndexCollection::<DateTime>::new(false);\n\n col.insert(\"0\", &json!(\"2018-01-01T00:00:00+00:00\"));\n\n col.insert(\"1\", &json!(\"2018-01-01T00:00:01+00:00\"));\n\n col.insert(\"2\", &json!(\"2018-01-01T00:00:02+00:00\"));\n\n col.insert(\"3\", &json!(\"2018-01-01T00:00:03+00:00\"));\n\n col.insert(\"4\", &json!(\"2018-01-01T00:00:04+00:00\"));\n\n assert_eq!(col.get_range(0, 5), &[\"4\", \"3\", \"2\", \"1\", \"0\"]);\n\n }\n\n}\n", "file_path": "src/api/index/index_map.rs", "rank": 58, "score": 35162.31632896156 }, { "content": "use serde_json::Value as JsonValue;\n\n\n\npub type DateTime = ::chrono::DateTime<::chrono::FixedOffset>;\n\n\n", "file_path": "src/api/index/index_map.rs", "rank": 59, "score": 35161.31269969189 }, { "content": " col.insert(\"4\", &json!(4));\n\n col.insert(\"3\", &json!(3));\n\n col.insert(\"2\", &json!(2));\n\n col.insert(\"1\", &json!(1));\n\n col.insert(\"0\", &json!(0));\n\n assert_eq!(col.get_range(1, 3), &[\"1\", \"2\", \"3\"]);\n\n }\n\n #[test]\n\n fn test_default_index_col_range_multiple_times() {\n\n let mut col = DefaultIndexCollection::<i64>::new(true);\n\n col.insert(\"5\", &json!(5));\n\n col.insert(\"4\", &json!(4));\n\n col.insert(\"3\", &json!(3));\n\n col.insert(\"2\", &json!(2));\n\n col.insert(\"1\", &json!(1));\n\n col.insert(\"0\", &json!(0));\n\n assert_eq!(col.get_range(0, 3), &[\"0\", \"1\", \"2\"]);\n\n assert_eq!(col.get_range(1, 3), &[\"1\", \"2\", \"3\"]);\n\n assert_eq!(col.get_range(2, 3), &[\"2\", \"3\", \"4\"]);\n\n assert_eq!(col.get_range(3, 3), &[\"3\", \"4\", \"5\"]);\n", "file_path": "src/api/index/index_map.rs", "rank": 60, "score": 35160.74388360966 }, { "content": "#[test]\n\nfn test_get_many_from() {\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"from=1\");\n\n let comments = test_ok(&api_with_many_comments(), req);\n\n check_type(&comments, \"application\", \"json\");\n\n check_content(&comments, MANY_FROM_JSON);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 61, "score": 34712.2898657719 }, { "content": "#[test]\n\nfn test_get_one() {\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"index=0\");\n\n let comments = test_ok(&api(), req);\n\n check_type(&comments, \"application\", \"json\");\n\n check_content(&comments, DEFAULT_ONE_JSON);\n\n}\n\n\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 62, "score": 34712.2898657719 }, { "content": "#[test]\n\nfn test_delete_range_to() {\n\n let api = api_with_many_comments();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"to=2\")\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let _ = test_ok(&api, req);\n\n // The comment should be removed.\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, MANY_FROM_JSON);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 63, "score": 34712.2898657719 }, { "content": "#[test]\n\nfn test_delete_range_from_to() {\n\n let api = api_with_many_comments();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"from=2&to=3\")\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let _ = test_ok(&api, req);\n\n // The comment should be removed.\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, CENTER_CUT_JSON);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 64, "score": 34712.2898657719 }, { "content": "#[test]\n\nfn fail_get_one() {\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"index=1\");\n\n let err = test_err(&api(), req);\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 65, "score": 34712.2898657719 }, { "content": "#[test]\n\nfn test_get_many() {\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let comments = test_ok(&api_with_many_comments(), req);\n\n check_type(&comments, \"application\", \"json\");\n\n check_content(&comments, MANY_JSON);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 66, "score": 34712.2898657719 }, { "content": "#[test]\n\nfn test_delete_range_from() {\n\n let api = api_with_many_comments();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"from=2\")\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let _ = test_ok(&api, req);\n\n // The comment should be removed.\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"]);\n\n let res = test_ok(&api, req);\n\n check_type(&res, \"application\", \"json\");\n\n check_content(&res, DEFAULT_JSON);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 67, "score": 34712.2898657719 }, { "content": "#[test]\n\nfn fail_delete_range_from_to() {\n\n let api = api_with_many_comments();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"from=2&to=2\")\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::BadRequest);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 68, "score": 34712.2898657719 }, { "content": "#[test]\n\nfn fail_delete_one_auth() {\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"index=0\");\n\n let err = test_err(&api(), req);\n\n assert_eq!(err.status(), StatusCode::Unauthorized);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 69, "score": 33854.62478694903 }, { "content": "#[test]\n\nfn test_delete_one_auth() {\n\n let api = api();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"index=0\")\n\n .with_header(Authorization(Bearer { token: \"PASSWORD\".to_owned() }));\n\n let _ = test_ok(&api, req);\n\n // The comment should be removed.\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"index=0\");\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 70, "score": 33854.62478694903 }, { "content": "#[test]\n\nfn test_delete_one_token() {\n\n let api = api_privilege();\n\n let req = Request::new(Method::Delete)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"index=0\")\n\n .with_header(Authorization(Bearer { token: \"POWER!\".to_owned() }));\n\n let _ = test_ok(&api, req);\n\n // The comment should be removed.\n\n let req = Request::new(Method::Get)\n\n .with_path_segs(&[\"foo\"])\n\n .with_query(\"index=0\");\n\n let err = test_err(&api, req);\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 71, "score": 33854.62478694903 }, { "content": "fn api() -> CommentApi {\n\n let mut api = CommentApi::new();\n\n api.set_cache(Arc::new(Cache::new(3, MockSource::new())));\n\n api.set_auth(Arc::new(SimpleAuthority::new(\"PASSWORD\")));\n\n api\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 72, "score": 33421.29357204925 }, { "content": "fn api() -> MetadataApi {\n\n let mut api = MetadataApi::new();\n\n api.set_cache(Arc::new(Cache::new(2, MockSource::new())));\n\n api.set_auth(Arc::new(SimpleAuthority::new(\"PASSWORD\")));\n\n let index = Index::new(\"key\", \"string\", None);\n\n api.set_index(index);\n\n api\n\n}\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 73, "score": 33421.29357204925 }, { "content": "fn api_privilege() -> CommentApi {\n\n let mut api = CommentApi::new();\n\n api.set_cache(Arc::new(Cache::new(3, MockSource::new_privilege())));\n\n api.set_auth(Arc::new(SimpleAuthority::new(\"PASSWORD\")));\n\n api\n\n}\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 74, "score": 32563.628493226395 }, { "content": "fn api_with_many_comments() -> CommentApi {\n\n let mut api = CommentApi::new();\n\n api.set_cache(Arc::new(Cache::new(3, MockSource::many_comments())));\n\n api.set_auth(Arc::new(SimpleAuthority::new(\"PASSWORD\")));\n\n api.set_entries_per_request(2);\n\n api\n\n}\n\n\n", "file_path": "src/api/comment/tests/mod.rs", "rank": 75, "score": 31768.979647044307 }, { "content": "fn gen_null_neko() -> JsonValue {\n\n let mut map = ::serde_json::value::Map::new();\n\n map.insert(\"neko\".to_string(), JsonValue::Null);\n\n JsonValue::Object(map)\n\n}\n\n\n", "file_path": "src/api/metadata/tests/mod.rs", "rank": 76, "score": 31768.979647044307 }, { "content": "fn raw_to_extra(extra: RawExtra) -> Extra {\n\n Extra {\n\n published_dir: extra.published_dir.unwrap_or(\"./published\".to_string()),\n\n auth: if let Some(token) = extra.auth_token.as_ref() {\n\n Arc::new(SimpleAuthority::new(token))\n\n } else {\n\n Arc::new(SimpleAuthority::default())\n\n },\n\n index_key: extra.index_key.unwrap_or(\"published\".to_string()),\n\n index_key_type: extra.index_key_type.unwrap_or(\"-datetime\".to_string()),\n\n entries_per_request: extra.entries_per_request.unwrap_or(5),\n\n allowed_exts: extra.allowed_exts.unwrap_or_default()\n\n .into_iter()\n\n .map(|(x, y)| (x, y.parse().expect(\"Unable to parse MIME in field `allowed_ext`.\")))\n\n .collect(),\n\n template_dir: extra.template_dir.unwrap_or(\"./templates\".to_owned()),\n\n }\n\n}\n", "file_path": "src/config/v1.rs", "rank": 77, "score": 30657.043402328512 }, { "content": " }\n\n pub fn set_post_cache(&mut self, cache: Arc<Cache<String>>) {\n\n self.post_cache = cache;\n\n }\n\n pub fn set_metadata_cache(&mut self, cache: Arc<Cache<JsonValue>>) {\n\n self.metadata_cache = cache;\n\n }\n\n pub fn set_template(&mut self, template: Template) {\n\n self.template = template;\n\n }\n\n pub fn render(&self, req: &mut Request) -> ApiResult {\n\n fn get_post(full_text: &str) -> (String, String) {\n\n let mut lines = full_text.lines();\n\n let title: String = lines\n\n .next()\n\n .unwrap_or_default()\n\n .chars()\n\n .skip_while(|ch| ch == &'#')\n\n .skip_while(|ch| ch == &' ')\n\n .collect();\n", "file_path": "src/view/post.rs", "rank": 78, "score": 29414.414245612945 }, { "content": " let mut content = String::new();\n\n lines.for_each(|x| {\n\n content += \"\\n\";\n\n content += x;\n\n });\n\n (title, content)\n\n }\n\n fn md_to_html(md: &str) -> String {\n\n let mut buf = String::with_capacity(md.len());\n\n let mut opts = ParserOptions::empty();\n\n opts.insert(OPTION_ENABLE_TABLES);\n\n let parser = Parser::new_ext(&md, opts);\n\n ::pulldown_cmark::html::push_html(&mut buf, parser);\n\n buf\n\n }\n\n use self::header::ContentType;\n\n let id = req.path_segs().join(\"/\");\n\n let post_cache = self.post_cache.get(&id)?;\n\n let content_guard = post_cache.read().unwrap();\n\n let (title, content) = get_post(content_guard.as_ref());\n", "file_path": "src/view/post.rs", "rank": 79, "score": 29409.356111164285 }, { "content": "use std::sync::Arc;\n\nuse serde_json::Value as JsonValue;\n\nuse pulldown_cmark::Parser;\n\nuse pulldown_cmark::{Options as ParserOptions, OPTION_ENABLE_TABLES};\n\nuse writium::prelude::*;\n\nuse writium_cache::{Cache, DumbCacheSource};\n\nuse super::template::*;\n\n\n\npub struct PostView {\n\n template: Template,\n\n post_cache: Arc<Cache<String>>,\n\n metadata_cache: Arc<Cache<JsonValue>>,\n\n}\n\nimpl PostView {\n\n pub fn new() -> PostView {\n\n PostView {\n\n template: Template::default(),\n\n post_cache: Arc::new(Cache::new(0, DumbCacheSource::new())),\n\n metadata_cache: Arc::new(Cache::new(0, DumbCacheSource::new())),\n\n }\n", "file_path": "src/view/post.rs", "rank": 80, "score": 29408.88240005937 }, { "content": "use writium_cache::CacheSource;\n\nuse writium::prelude::*;\n\nuse super::FileAccessor;\n\n\n\nconst ERR_IO: &str = \"Resource accessed but error occured during IO.\";\n\nconst ERR_PARENT: &str = \"Parent of requested post cannot be created. Maybe \\\n\n there is a file occupying a segment of name in the path.\";\n\n\n\npub struct PostSource {\n\n accessor: FileAccessor,\n\n}\n\nimpl PostSource {\n\n pub fn new(dir: &str) -> PostSource {\n\n PostSource {\n\n accessor: FileAccessor::with_fixed_file_name(dir, \"content.md\"),\n\n }\n\n }\n\n}\n\nimpl CacheSource for PostSource {\n\n type Value = String;\n", "file_path": "src/model/post.rs", "rank": 81, "score": 29406.456464971216 }, { "content": " let metadata_cache = self.metadata_cache.get(&id)?;\n\n let metadata_guard = metadata_cache.read().unwrap();\n\n let metadata: &JsonValue = &metadata_guard;\n\n let path = format!(\"/posts/{}\", id);\n\n let res = Response::new()\n\n .with_header(ContentType(\n\n \"text/html; charset=UTF-8\".parse().unwrap()\n\n ))\n\n .with_body(self.template.render(&metadata, &[\n\n (\"link\", &path),\n\n (\"id\", &id),\n\n (\"title\", &title),\n\n (\"content\", &md_to_html(&content)),\n\n ]));\n\n Ok(res)\n\n }\n\n}\n\nimpl Api for PostView {\n\n fn name(&self) -> &[&str] {\n\n &[\"posts\"]\n", "file_path": "src/view/post.rs", "rank": 82, "score": 29405.543277010176 }, { "content": " fn load(&self, id: &str, create: bool) -> Result<String> {\n\n use std::io::Read;\n\n match self.accessor.read(id) {\n\n Ok(mut reader) => {\n\n let mut post = String::new();\n\n reader.read_to_string(&mut post)\n\n .map(|_| post)\n\n .map_err(|err| Error::internal(ERR_IO).with_cause(err))\n\n // Convert Markdown to HTML only when it's needed. So new posts\n\n // can be published.\n\n },\n\n Err(err) => if create {\n\n // Parent might not exist.\n\n if let Some(parent) = self.accessor.make_path(id).parent() {\n\n // Create all directory so that all subsequent uploading of\n\n // resources can be realized.\n\n ::std::fs::create_dir_all(parent)\n\n .map_err(|err| {\n\n Error::internal(ERR_PARENT).with_cause(err)\n\n })?;\n", "file_path": "src/model/post.rs", "rank": 83, "score": 29402.480432422897 }, { "content": " }\n\n Ok(String::new())\n\n } else {\n\n Err(err)\n\n },\n\n }\n\n }\n\n fn unload(&self, id: &str, val: &String) -> Result<()> {\n\n use std::io::Write;\n\n let mut writer = self.accessor.write(id)?;\n\n writer.write_all(val.as_bytes())\n\n .map_err(|err| Error::internal(ERR_IO).with_cause(err))\n\n }\n\n fn remove(&self, id: &str) -> Result<()> {\n\n self.accessor.remove(id)\n\n }\n\n}\n", "file_path": "src/model/post.rs", "rank": 84, "score": 29401.695904939043 }, { "content": " }\n\n fn route(&self, req: &mut Request) -> ApiResult {\n\n use self::header::Allow;\n\n match req.method() {\n\n Method::Get => self.render(req),\n\n Method::Options => {\n\n let res = Response::new()\n\n .with_header(Allow(vec![Method::Options, Method::Get]));\n\n Ok(res)\n\n },\n\n _ => Err(Error::method_not_allowed())\n\n }\n\n }\n\n}\n", "file_path": "src/view/post.rs", "rank": 85, "score": 29392.97461762031 }, { "content": " }\n\n pub fn render(&self, meta: &JsonValue, extra: &[(&str, &str)]) -> String {\n\n let mut rv = String::new();\n\n for sec in self.sections.iter() {\n\n sec.get_section(meta, extra, &mut rv);\n\n }\n\n rv\n\n }\n\n}\n\nimpl Default for Template {\n\n fn default() -> Template {\n\n Template {\n\n sections: Vec::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/view/template/mod.rs", "rank": 86, "score": 28644.60117476893 }, { "content": "}\n\nimpl MetadataSection {\n\n pub fn new(key: String) -> MetadataSection {\n\n MetadataSection {\n\n key: key,\n\n }\n\n }\n\n}\n\nimpl TemplateSection for MetadataSection {\n\n fn get_section(&self, meta: &JsonValue, extra: &[(&str, &str)], out: &mut String) {\n\n if let Some(meta) = meta.get(&self.key) {\n\n if let Some(string) = meta.as_str() {\n\n out.push_str(string);\n\n } else if let Ok(string) = ::serde_json::to_string(meta) {\n\n out.push_str(&string);\n\n }\n\n } else if let Some(&(_, extra)) = extra.into_iter()\n\n .find(|&&(key, _)| key == &self.key) {\n\n out.push_str(extra);\n\n } else {\n\n // Do nothing when there is no such value.\n\n }\n\n }\n\n}\n", "file_path": "src/view/template/section.rs", "rank": 87, "score": 28641.906012935287 }, { "content": " // Ignore unknown processing instructions.\n\n }\n\n concated.drain(..(end + 2));\n\n }\n\n}\n\n\n\npub struct Template {\n\n sections: Vec<Box<TemplateSection>>,\n\n}\n\nimpl Template {\n\n pub fn from_file(base: &str, path: &str) -> Option<Template> {\n\n info!(\"Loading template from file: {}\", [base, path].join(\"/\"));\n\n let concated = match load_fragement(Path::new(base), Path::new(path)) {\n\n Ok(concated) => concated,\n\n Err(err) => {\n\n error!(\"Cannot compile template: {}\", err);\n\n return None\n\n },\n\n };\n\n Some(Template { sections: compile(concated) })\n", "file_path": "src/view/template/mod.rs", "rank": 88, "score": 28635.08593334031 }, { "content": " if parts[0] == \"frag\" {\n\n // Insert fragment.\n\n let frag_path = parts[1].trim();\n\n let subfrag_path = path_buf![&base, &frag_path];\n\n rv += &load_fragement(base, &subfrag_path)?;\n\n } else if parts[0] == \"var\" {\n\n // Keep variables for the next stage (compilation).\n\n extend = true;\n\n }\n\n }\n\n // Ignore unknown processing instructions.\n\n }\n\n if extend {\n\n rv.extend(template.drain(..(end + 2)));\n\n } else {\n\n template.drain(..(end + 2));\n\n }\n\n }\n\n}\n", "file_path": "src/view/template/mod.rs", "rank": 89, "score": 28627.092987461594 }, { "content": "use std::io::{Read, BufReader};\n\nuse std::fs::File;\n\nuse std::path::Path;\n\nuse serde_json::Value as JsonValue;\n\n\n\nmod section;\n\nuse self::section::*;\n\n\n", "file_path": "src/view/template/mod.rs", "rank": 90, "score": 28625.918331053676 }, { "content": "use serde_json::Value as JsonValue;\n\n\n", "file_path": "src/view/template/section.rs", "rank": 91, "score": 28625.193066435913 }, { "content": "}\n\nimpl Default for Index {\n\n /// Make a `Index` that do literally nothing.\n\n fn default() -> Index {\n\n Index {\n\n index: Arc::new(RwLock::new(Box::new(DumbIndexCollection::new()))),\n\n key: String::new(),\n\n }\n\n }\n\n}\n\nimpl Deref for Index {\n\n type Target = RwLock<Box<IndexCollection>>;\n\n fn deref(&self) -> &Self::Target {\n\n &*self.index\n\n }\n\n}\n", "file_path": "src/api/index/mod.rs", "rank": 92, "score": 28119.302692409332 }, { "content": " };\n\n let col: Box<IndexCollection> = match ty {\n\n \"string\" => Box::new(\n\n DefaultIndexCollection::<String>::new(ascending)\n\n ),\n\n \"integer\" => Box::new(\n\n DefaultIndexCollection::<i64>::new(ascending)\n\n ),\n\n \"datetime\" => Box::new(\n\n DefaultIndexCollection::<DateTime>::new(ascending)\n\n ),\n\n _ => panic!(\"Index key type should be one of `datetime`, `string`, \\\n\n or `integer`.\"),\n\n };\n\n mk_idx(key, col, dir)\n\n }\n\n /// Get the index key of the current index.\n\n pub fn index_key(&self) -> &String {\n\n &self.key\n\n }\n", "file_path": "src/api/index/mod.rs", "rank": 93, "score": 28116.762848548613 }, { "content": " -> Index where T: 'static + IndexCollection {\n\n mk_idx(key, Box::new(col), dir)\n\n }\n\n /// Make a new `Index` with given index key and corresponding default index\n\n /// collection. If `dir` has a value, index will be generated from local\n\n /// storage, searching for articles in that directory and its subdirectory.\n\n pub fn new(key: &str, mut ty: &str, dir: Option<&str>) -> Index {\n\n let ascending = if ty.starts_with('+') {\n\n // Prefixed by '+', the index order is ascending. Larger value will\n\n // be placed at the back.\n\n ty = &ty[1..];\n\n true\n\n } else if ty.starts_with('-') {\n\n // Prefixed by '-', the index order is descending, Larger value will\n\n // be placed at the front.\n\n ty = &ty[1..];\n\n false\n\n } else {\n\n // Prefixed by nothing, the index order is by default ascending.\n\n true\n", "file_path": "src/api/index/mod.rs", "rank": 94, "score": 28116.049774650968 }, { "content": "use std::ops::Deref;\n\nuse std::path::Path;\n\nuse std::sync::{Arc, RwLock};\n\nuse serde_json::Value as JsonValue;\n\nuse walkdir::WalkDir;\n\n\n\nmod index_map;\n\nuse self::index_map::{DateTime, DumbIndexCollection,\n\n DefaultIndexCollection};\n\npub use self::index_map::IndexCollection;\n\n\n", "file_path": "src/api/index/mod.rs", "rank": 95, "score": 28114.13189814126 }, { "content": "\n\nimpl PostApi {\n\n pub fn new() -> PostApi {\n\n PostApi {\n\n auth: Arc::new(DumbAuthority::new()),\n\n cache: Arc::new(Cache::new(0, DumbCacheSource::new())),\n\n index: Index::default(),\n\n entries_per_request: DEFAULT_ENTRIES_PER_REQUEST,\n\n }\n\n }\n\n pub fn set_cache(&mut self, cache: Arc<Cache<String>>) {\n\n self.cache = cache;\n\n }\n\n pub fn set_auth(&mut self, auth: Arc<Authority<Privilege=()>>) {\n\n self.auth = auth;\n\n }\n\n pub fn set_entries_per_request(&mut self, entries_per_request: u64) {\n\n self.entries_per_request = entries_per_request;\n\n }\n\n pub fn set_index(&mut self, index: Index) {\n", "file_path": "src/api/post/mod.rs", "rank": 96, "score": 28023.756747241714 }, { "content": " \n\n let id = req.path_segs().join(\"/\");\n\n self.cache.remove(&id)\n\n .map(|_| Response::new())\n\n }\n\n}\n\nimpl Api for PostApi {\n\n fn name(&self) -> &[&str] {\n\n &[\"posts\"]\n\n }\n\n\n\n fn route(&self, req: &mut Request) -> ApiResult {\n\n use self::header::Allow;\n\n use self::Method::*;\n\n match req.method() {\n\n Options => Ok(Response::new()\n\n .with_header(Allow(vec![Options, Get, Put, Delete]))),\n\n Get => self.get(req),\n\n Put => self.put(req),\n\n Delete => self.delete(req),\n\n _ => Err(Error::method_not_allowed()),\n\n }\n\n }\n\n}\n", "file_path": "src/api/post/mod.rs", "rank": 97, "score": 28018.544158337092 }, { "content": "use std::sync::Arc;\n\nuse hyper::header::ContentType;\n\nuse writium::prelude::*;\n\nuse writium_auth::{Authority, DumbAuthority};\n\nuse writium_cache::{Cache, DumbCacheSource};\n\nuse super::index::Index;\n\n\n\nconst ERR_MIME: &'static str = \"Only data of type 'text/markdown' is accepted.\";\n\n\n\nconst DEFAULT_ENTRIES_PER_REQUEST: u64 = 5;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\npub struct PostApi {\n\n auth: Arc<Authority<Privilege=()>>,\n\n cache: Arc<Cache<String>>,\n\n index: Index,\n\n entries_per_request: u64,\n\n}\n", "file_path": "src/api/post/mod.rs", "rank": 98, "score": 28018.430302615034 }, { "content": " self.index = index;\n\n }\n\n\n\n fn get_content(&self, req: &mut Request) -> ApiResult {\n\n let id = req.path_segs().join(\"/\");\n\n let cache = self.cache.get(&id)?;\n\n let text = cache.read().unwrap();\n\n let text_ref: &[u8] = text.as_ref();\n\n let res = Response::new()\n\n .with_header(ContentType(\n\n \"text/markdown; charset=UTF-8\".parse().unwrap()))\n\n .with_body(text_ref);\n\n Ok(res)\n\n }\n\n fn get_index(&self, req: &mut Request) -> ApiResult {\n\n #[derive(Deserialize)]\n\n struct Param {\n\n /// The index of the first article to be included.\n\n from: Option<usize>,\n\n }\n", "file_path": "src/api/post/mod.rs", "rank": 99, "score": 28016.108795461947 } ]
Rust
host/src/fs/cache.rs
manasrivastava/tinychain
e6082f587ac089307ca9264d90d20c3f0991da52
use std::convert::{TryFrom, TryInto}; use std::path::PathBuf; #[cfg(feature = "tensor")] use afarray::Array; use async_trait::async_trait; use destream::IntoStream; use freqache::Entry; use futures::{Future, TryFutureExt}; use log::{debug, error, info, warn}; use tokio::fs; use tokio::io::AsyncWrite; use tokio::sync::mpsc; use uplock::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use tc_btree::Node; use tc_error::*; use tc_transact::fs::BlockData; use crate::chain::ChainBlock; use crate::scalar::Value; use super::{create_parent, io_err, TMP}; struct Policy; #[async_trait] impl freqache::Policy<PathBuf, CacheBlock> for Policy { fn can_evict(&self, block: &CacheBlock) -> bool { block.ref_count() <= 1 } async fn evict(&self, path: PathBuf, block: &CacheBlock) { debug!("evict block at {:?} from cache", path); let size = persist(&path, block) .await .expect("persist cache block to disk"); debug!("block at {:?} evicted, wrote {} bytes to disk", path, size); } } type LFU = freqache::LFUCache<PathBuf, CacheBlock, Policy>; #[derive(Clone)] pub enum CacheBlock { BTree(CacheLock<Node>), Chain(CacheLock<ChainBlock>), Value(CacheLock<Value>), #[cfg(feature = "tensor")] Tensor(CacheLock<Array>), } impl CacheBlock { async fn persist<W: AsyncWrite + Send + Unpin>(&self, sink: &mut W) -> TCResult<u64> { match self { Self::BTree(block) => { let contents = block.read().await; contents.persist(sink).await } Self::Chain(block) => { let contents = block.read().await; contents.persist(sink).await } Self::Value(block) => { let contents = block.read().await; contents.persist(sink).await } #[cfg(feature = "tensor")] Self::Tensor(block) => { let contents = block.read().await; contents.persist(sink).await } } } fn ref_count(&self) -> usize { match self { Self::BTree(block) => block.ref_count(), Self::Chain(block) => block.ref_count(), Self::Value(block) => block.ref_count(), #[cfg(feature = "tensor")] Self::Tensor(block) => block.ref_count(), } } } impl Entry for CacheBlock { fn weight(&self) -> u64 { match self { Self::BTree(_) => Node::max_size(), Self::Chain(_) => ChainBlock::max_size(), Self::Value(_) => Value::max_size(), #[cfg(feature = "tensor")] Self::Tensor(_) => Array::max_size(), } } } #[cfg(feature = "tensor")] impl From<CacheLock<Array>> for CacheBlock { fn from(lock: CacheLock<Array>) -> CacheBlock { Self::Tensor(lock) } } impl From<CacheLock<ChainBlock>> for CacheBlock { fn from(lock: CacheLock<ChainBlock>) -> CacheBlock { Self::Chain(lock) } } impl From<CacheLock<Node>> for CacheBlock { fn from(lock: CacheLock<Node>) -> CacheBlock { Self::BTree(lock) } } impl From<CacheLock<Value>> for CacheBlock { fn from(lock: CacheLock<Value>) -> CacheBlock { Self::Value(lock) } } #[cfg(feature = "tensor")] impl TryFrom<CacheBlock> for CacheLock<Array> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Tensor(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<ChainBlock> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Chain(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<Node> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::BTree(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<Value> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Value(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } pub struct CacheLock<T> { lock: RwLock<T>, } impl<T> CacheLock<T> { fn new(value: T) -> Self { Self { lock: RwLock::new(value), } } pub async fn read(&self) -> RwLockReadGuard<T> { debug!( "CacheLock got read lock request on a lock with {} refs...", self.lock.ref_count() ); self.lock.read().await } pub async fn write(&self) -> RwLockWriteGuard<T> { debug!( "CacheLock got write lock request on a lock with {} refs...", self.lock.ref_count() ); self.lock.write().await } pub fn ref_count(&self) -> usize { self.lock.ref_count() } } impl<T> Clone for CacheLock<T> { fn clone(&self) -> Self { Self { lock: self.lock.clone(), } } } struct Evict; #[derive(Clone)] pub struct Cache { tx: mpsc::Sender<Evict>, lfu: RwLock<LFU>, } impl Cache { pub fn new(max_size: u64) -> Self { assert!(max_size > 0); let (tx, rx) = mpsc::channel(1024); let cache = Self { tx, lfu: RwLock::new(LFU::new(max_size, Policy)), }; spawn_cleanup_thread(cache.lfu.clone(), rx); cache } async fn _read_and_insert<B: BlockData>( mut cache: RwLockWriteGuard<LFU>, path: PathBuf, ) -> TCResult<CacheLock<B>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { let block_file = read_file(&path).await?; let block = B::load(block_file).await?; debug!("cache insert: {:?}", path); let block = CacheLock::new(block); cache.insert(path, block.clone().into()).await; Ok(block) } pub async fn read<B: BlockData>(&self, path: &PathBuf) -> TCResult<Option<CacheLock<B>>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { debug!("Cache::read {:?}", path); let mut cache = self.lfu.write().await; if let Some(block) = cache.get(path).await { debug!("cache hit: {:?}", path); let block = block.clone().try_into()?; return Ok(Some(block)); } else if !path.exists() { return Ok(None); } else { info!("cache miss: {:?}", path); } Self::_read_and_insert(cache, path.clone()) .map_ok(Some) .await } pub async fn delete(&self, path: &PathBuf) -> Option<CacheBlock> { debug!("Cache::delete {:?}", path); let mut cache = self.lfu.write().await; cache.remove(path).await } pub async fn delete_and_sync(&self, path: PathBuf) -> TCResult<()> { debug!("Cache::delete_and_sync {:?}", path); let mut cache = self.lfu.write().await; cache.remove(&path).await; let tmp = path.with_extension(TMP); if tmp.exists() { tokio::fs::remove_file(&tmp) .map_err(|e| io_err(e, &tmp)) .await?; } if path.exists() { tokio::fs::remove_file(&path) .map_err(|e| io_err(e, &path)) .await?; } Ok(()) } pub async fn delete_dir(&self, path: PathBuf) -> TCResult<()> { let _lock = self.lfu.write().await; tokio::fs::remove_dir_all(&path) .map_err(|e| io_err(e, &path)) .await } async fn _sync(cache: &mut LFU, path: &PathBuf) -> TCResult<bool> { debug!("sync block at {:?} with filesystem", &path); if let Some(block) = cache.get(path).await { let size = persist(path, &block).await?; debug!("sync'd block at {:?}, wrote {} bytes", path, size); Ok(true) } else { info!("cache sync miss: {:?}", path); Ok(path.exists()) } } pub async fn sync(&self, path: &PathBuf) -> TCResult<bool> { debug!("sync block at {:?} with filesystem", &path); let mut cache = self.lfu.write().await; Self::_sync(&mut cache, path).await } pub async fn sync_and_copy<'en, B: BlockData + IntoStream<'en> + 'en>( &self, source: PathBuf, dest: PathBuf, ) -> TCResult<CacheLock<B>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { debug!("cache sync + copy from {:?} to {:?}", source, dest); let mut cache = self.lfu.write().await; Self::_sync(&mut cache, &source).await?; tokio::fs::copy(&source, &dest) .map_err(|e| io_err(e, format!("copy from {:?} to {:?}", source, dest))) .await?; Self::_read_and_insert(cache, dest).await } async fn _write<'en, B: BlockData + IntoStream<'en> + 'en>( cache: &mut LFU, tx: &mpsc::Sender<Evict>, path: PathBuf, block: CacheLock<B>, ) where CacheBlock: From<CacheLock<B>>, { cache.insert(path, block.into()).await; if cache.is_full() { info!("the block cache is full ({} occupied out of {} capacity), triggering garbage collection...", cache.occupied(), cache.capacity()); if let Err(err) = tx.send(Evict).await { error!("the cache cleanup thread is dead! {}", err); } } } pub async fn write<'en, B: BlockData + IntoStream<'en> + 'en>( &self, path: PathBuf, block: B, ) -> TCResult<CacheLock<B>> where CacheBlock: From<CacheLock<B>>, { debug!("cache insert: {:?}", &path); let block = CacheLock::new(block); let mut cache = self.lfu.write().await; Self::_write(&mut cache, &self.tx, path, block.clone()).await; Ok(block) } } fn spawn_cleanup_thread(cache: RwLock<LFU>, mut rx: mpsc::Receiver<Evict>) { tokio::spawn(async move { info!("cache cleanup thread is running..."); while rx.recv().await.is_some() { let lfu = cache.read().await; debug!( "got Evict message, cache has {} entries (capacity {} bytes)", lfu.len(), lfu.capacity() ); if lfu.is_full() { let mut lfu = lfu.upgrade().await; debug!("running cache eviction with {} entries...", lfu.len()); lfu.evict().await; debug!("cache eviction complete, {} entries remain", lfu.len()); } else { debug!("cache eviction already ran, ignoring redundant Evict message"); } } warn!("cache cleanup thread shutting down"); }); } async fn persist(path: &PathBuf, block: &CacheBlock) -> TCResult<u64> { let tmp = path.with_extension(TMP); let size = { let mut tmp_file = if tmp.exists() { write_file(&tmp).await? } else { create_parent(&tmp).await?; create_file(&tmp).await? }; let size = block.persist(&mut tmp_file).await?; tmp_file.sync_all().map_err(|e| io_err(e, &tmp)).await?; size }; tokio::fs::rename(&tmp, path) .map_err(|e| io_err(e, &tmp)) .await?; Ok(size) } #[inline] fn create_file(path: &PathBuf) -> impl Future<Output = TCResult<fs::File>> + '_ { tokio::fs::File::create(path).map_err(move |e| io_err(e, path)) } #[inline] fn read_file(path: &PathBuf) -> impl Future<Output = TCResult<fs::File>> + '_ { fs::File::open(path).map_err(move |e| io_err(e, path)) } async fn write_file(path: &PathBuf) -> TCResult<fs::File> { fs::OpenOptions::new() .truncate(true) .write(true) .open(path) .map_err(move |e| io_err(e, path)) .await }
use std::convert::{TryFrom, TryInto}; use std::path::PathBuf; #[cfg(feature = "tensor")] use afarray::Array; use async_trait::async_trait; use destream::IntoStream; use freqache::Entry; use futures::{Future, TryFutureExt}; use log::{debug, error, info, warn}; use tokio::fs; use tokio::io::AsyncWrite; use tokio::sync::mpsc; use uplock::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use tc_btree::Node; use tc_error::*; use tc_transact::fs::BlockData; use crate::chain::ChainBlock; use crate::scalar::Value; use super::{create_parent, io_err, TMP}; struct Policy; #[async_trait] impl freqache::Policy<PathBuf, CacheBlock> for Policy { fn can_evict(&self, block: &CacheBlock) -> bool { block.ref_count() <= 1 } async fn evict(&self, path: PathBuf, block: &CacheBlock) { debug!("evict block at {:?} from cache", path); let size = persist(&path, block) .await .expect("persist cache block to disk"); debug!("block at {:?} evicted, wrote {} bytes to disk", path, size); } } type LFU = freqache::LFUCache<PathBuf, CacheBlock, Policy>; #[derive(Clone)] pub enum CacheBlock { BTree(CacheLock<Node>), Chain(CacheLock<ChainBlock>), Value(CacheLock<Value>), #[cfg(feature = "tensor")] Tensor(CacheLock<Array>), } impl CacheBlock { async fn persist<W: AsyncWrite + Send + Unpin>(&self, sink: &mut W) -> TCResult<u64> { match self { Self::BTree(block) => { let contents = block.read().await; contents.persist(sink).await } Self::Chain(block) => { let contents = block.read().await; contents.persist(sink).await } Self::Value(block) => { let contents = block.read().await; contents.persist(sink).await } #[cfg(feature = "tensor")] Self::Tensor(block) => { let contents = block.read().await; contents.persist(sink).await } } } fn ref_count(&self) -> usize { match self { Self::BTree(block) => block.ref_count(), Self::Chain(block) => block.ref_count(), Self::Value(block) => block.ref_count(), #[cfg(feature = "tensor")] Self::Tensor(block) => block.ref_count(), } } } impl Entry for CacheBlock { fn weight(&self) -> u64 { match self { Self::BTree(_) => Node::max_size(), Self::Chain(_) => ChainBlock::max_size(), Self::Value(_) => Value::max_size(), #[cfg(feature = "tensor")] Self::Tensor(_) => Array::max_size(), } } } #[cfg(feature = "tensor")] impl From<CacheLock<Array>> for CacheBlock { fn from(lock: CacheLock<Array>) -> CacheBlock { Self::Tensor(lock) } } impl From<CacheLock<ChainBlock>> for CacheBlock { fn from(lock: CacheLock<ChainBlock>) -> CacheBlock { Self::Chain(lock) } } impl From<CacheLock<Node>> for CacheBlock { fn from(lock: CacheLock<Node>) -> CacheBlock { Self::BTree(lock) } } impl From<CacheLock<Value>> for CacheBlock { fn from(lock: CacheLock<Value>) -> CacheBlock { Self::Value(lock) } } #[cfg(feature = "tensor")] impl TryFrom<CacheBlock> for CacheLock<Array> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Tensor(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<ChainBlock> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Chain(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<Node> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::BTree(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } impl TryFrom<CacheBlock> for CacheLock<Value> { type Error = TCError; fn try_from(block: CacheBlock) -> TCResult<Self> { match block { CacheBlock::Value(block) => Ok(block), _ => Err(TCError::unsupported("unexpected block type")), } } } pub struct CacheLock<T> { lock: RwLock<T>, } impl<T> CacheLock<T> { fn new(value: T) -> Self { Self { lock: RwLock::new(value), } } pub async fn read(&self) -> RwLockReadGuard<T> { debug!( "CacheLock got read lock request on a lock with {} refs...", self.lock.ref_count() ); self.lock.read().await } pub async fn write(&self) -> RwLockWriteGuard<T> { debug!( "CacheLock got write lock request on a lock with {} refs...", self.lock.ref_count() ); self.lock.write().await } pub fn ref_count(&self) -> usize { self.lock.ref_count() } } impl<T> Clone for CacheLock<T> { fn clone(&self) -> Self { Self { lock: self.lock.clone(), } } } struct Evict; #[derive(Clone)] pub struct Cache { tx: mpsc::Sender<Evict>, lfu: RwLock<LFU>, } impl Cache { pub fn new(max_size: u64) -> Self { assert!(max_size > 0); let (tx, rx) = mpsc::channel(1024); let cache = Self { tx, lfu: RwLock::new(LFU::new(max_size, Policy)), }; spawn_cleanup_thread(cache.lfu.clone(), rx); cache } async fn _read_and_insert<B: BlockData>( mut cache: RwLockWriteGuard<LFU>, path: PathBuf, ) -> TCResult<CacheLock<B>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { let block_file = read_file(&path).await?; let block = B::load(block_file).await?; debug!("cache insert: {:?}", path); let block = CacheLock::new(block); cache.insert(path, block.clone().into()).await; Ok(block) } pub async fn read<B: Block
pub async fn delete(&self, path: &PathBuf) -> Option<CacheBlock> { debug!("Cache::delete {:?}", path); let mut cache = self.lfu.write().await; cache.remove(path).await } pub async fn delete_and_sync(&self, path: PathBuf) -> TCResult<()> { debug!("Cache::delete_and_sync {:?}", path); let mut cache = self.lfu.write().await; cache.remove(&path).await; let tmp = path.with_extension(TMP); if tmp.exists() { tokio::fs::remove_file(&tmp) .map_err(|e| io_err(e, &tmp)) .await?; } if path.exists() { tokio::fs::remove_file(&path) .map_err(|e| io_err(e, &path)) .await?; } Ok(()) } pub async fn delete_dir(&self, path: PathBuf) -> TCResult<()> { let _lock = self.lfu.write().await; tokio::fs::remove_dir_all(&path) .map_err(|e| io_err(e, &path)) .await } async fn _sync(cache: &mut LFU, path: &PathBuf) -> TCResult<bool> { debug!("sync block at {:?} with filesystem", &path); if let Some(block) = cache.get(path).await { let size = persist(path, &block).await?; debug!("sync'd block at {:?}, wrote {} bytes", path, size); Ok(true) } else { info!("cache sync miss: {:?}", path); Ok(path.exists()) } } pub async fn sync(&self, path: &PathBuf) -> TCResult<bool> { debug!("sync block at {:?} with filesystem", &path); let mut cache = self.lfu.write().await; Self::_sync(&mut cache, path).await } pub async fn sync_and_copy<'en, B: BlockData + IntoStream<'en> + 'en>( &self, source: PathBuf, dest: PathBuf, ) -> TCResult<CacheLock<B>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { debug!("cache sync + copy from {:?} to {:?}", source, dest); let mut cache = self.lfu.write().await; Self::_sync(&mut cache, &source).await?; tokio::fs::copy(&source, &dest) .map_err(|e| io_err(e, format!("copy from {:?} to {:?}", source, dest))) .await?; Self::_read_and_insert(cache, dest).await } async fn _write<'en, B: BlockData + IntoStream<'en> + 'en>( cache: &mut LFU, tx: &mpsc::Sender<Evict>, path: PathBuf, block: CacheLock<B>, ) where CacheBlock: From<CacheLock<B>>, { cache.insert(path, block.into()).await; if cache.is_full() { info!("the block cache is full ({} occupied out of {} capacity), triggering garbage collection...", cache.occupied(), cache.capacity()); if let Err(err) = tx.send(Evict).await { error!("the cache cleanup thread is dead! {}", err); } } } pub async fn write<'en, B: BlockData + IntoStream<'en> + 'en>( &self, path: PathBuf, block: B, ) -> TCResult<CacheLock<B>> where CacheBlock: From<CacheLock<B>>, { debug!("cache insert: {:?}", &path); let block = CacheLock::new(block); let mut cache = self.lfu.write().await; Self::_write(&mut cache, &self.tx, path, block.clone()).await; Ok(block) } } fn spawn_cleanup_thread(cache: RwLock<LFU>, mut rx: mpsc::Receiver<Evict>) { tokio::spawn(async move { info!("cache cleanup thread is running..."); while rx.recv().await.is_some() { let lfu = cache.read().await; debug!( "got Evict message, cache has {} entries (capacity {} bytes)", lfu.len(), lfu.capacity() ); if lfu.is_full() { let mut lfu = lfu.upgrade().await; debug!("running cache eviction with {} entries...", lfu.len()); lfu.evict().await; debug!("cache eviction complete, {} entries remain", lfu.len()); } else { debug!("cache eviction already ran, ignoring redundant Evict message"); } } warn!("cache cleanup thread shutting down"); }); } async fn persist(path: &PathBuf, block: &CacheBlock) -> TCResult<u64> { let tmp = path.with_extension(TMP); let size = { let mut tmp_file = if tmp.exists() { write_file(&tmp).await? } else { create_parent(&tmp).await?; create_file(&tmp).await? }; let size = block.persist(&mut tmp_file).await?; tmp_file.sync_all().map_err(|e| io_err(e, &tmp)).await?; size }; tokio::fs::rename(&tmp, path) .map_err(|e| io_err(e, &tmp)) .await?; Ok(size) } #[inline] fn create_file(path: &PathBuf) -> impl Future<Output = TCResult<fs::File>> + '_ { tokio::fs::File::create(path).map_err(move |e| io_err(e, path)) } #[inline] fn read_file(path: &PathBuf) -> impl Future<Output = TCResult<fs::File>> + '_ { fs::File::open(path).map_err(move |e| io_err(e, path)) } async fn write_file(path: &PathBuf) -> TCResult<fs::File> { fs::OpenOptions::new() .truncate(true) .write(true) .open(path) .map_err(move |e| io_err(e, path)) .await }
Data>(&self, path: &PathBuf) -> TCResult<Option<CacheLock<B>>> where CacheLock<B>: TryFrom<CacheBlock, Error = TCError>, CacheBlock: From<CacheLock<B>>, { debug!("Cache::read {:?}", path); let mut cache = self.lfu.write().await; if let Some(block) = cache.get(path).await { debug!("cache hit: {:?}", path); let block = block.clone().try_into()?; return Ok(Some(block)); } else if !path.exists() { return Ok(None); } else { info!("cache miss: {:?}", path); } Self::_read_and_insert(cache, path.clone()) .map_ok(Some) .await }
function_block-function_prefixed
[ { "content": "fn io_err<I: fmt::Debug + Send>(err: io::Error, info: I) -> TCError {\n\n match err.kind() {\n\n io::ErrorKind::NotFound => {\n\n TCError::internal(format!(\"host filesystem has no such entry {:?}\", info))\n\n }\n\n io::ErrorKind::PermissionDenied => TCError::internal(format!(\n\n \"Tinychain does not have permission to access the host filesystem: {:?}\",\n\n info\n\n )),\n\n other => TCError::internal(format!(\"host filesystem error: {:?}: {}\", other, err)),\n\n }\n\n}\n", "file_path": "host/src/fs/mod.rs", "rank": 2, "score": 303130.37363867223 }, { "content": "#[inline]\n\npub fn coord_to_offset(coord: &[u64], coord_bounds: &[u64]) -> u64 {\n\n coord_bounds\n\n .iter()\n\n .zip(coord.iter())\n\n .map(|(d, x)| d * x)\n\n .sum()\n\n}\n", "file_path": "host/tensor/src/sparse/combine.rs", "rank": 4, "score": 247490.31581473094 }, { "content": "pub trait BlockWrite<B: BlockData, F: File<B>>: DerefMut<Target = B> + Send {\n\n fn downgrade(\n\n self,\n\n file: &F,\n\n ) -> TCBoxTryFuture<<<F as File<B>>::Block as Block<B, F>>::ReadLock>;\n\n}\n\n\n\n/// A transactional filesystem block.\n", "file_path": "host/transact/src/fs.rs", "rank": 6, "score": 235246.41688054713 }, { "content": "struct UnaryHandlerAsync<F: Send> {\n\n tensor: Tensor,\n\n op: fn(Tensor, Txn) -> F,\n\n}\n\n\n\nimpl<'a, F: Send> UnaryHandlerAsync<F> {\n\n fn new(tensor: Tensor, op: fn(Tensor, Txn) -> F) -> Self {\n\n Self { tensor, op }\n\n }\n\n}\n\n\n\nimpl<'a, F> Handler<'a> for UnaryHandlerAsync<F>\n\nwhere\n\n F: Future<Output = TCResult<bool>> + Send + 'a,\n\n{\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|txn, key| {\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 7, "score": 224236.0136512113 }, { "content": "#[async_trait]\n\npub trait BlockData: de::FromStream<Context = ()> + Clone + Send + Sync + 'static {\n\n fn ext() -> &'static str;\n\n\n\n fn max_size() -> u64;\n\n\n\n async fn hash<'en>(&'en self) -> TCResult<Bytes>\n\n where\n\n Self: en::ToStream<'en>,\n\n {\n\n let mut hasher = Sha256::default();\n\n hash_chunks(&mut hasher, self).await?;\n\n let digest = hasher.finalize();\n\n Ok(Bytes::from(digest.to_vec()))\n\n }\n\n\n\n async fn load<S: AsyncReadExt + Send + Unpin>(source: S) -> TCResult<Self> {\n\n tbon::de::read_from((), source)\n\n .map_err(|e| TCError::internal(format!(\"unable to parse saved block: {}\", e)))\n\n .await\n\n }\n", "file_path": "host/transact/src/fs.rs", "rank": 8, "score": 220469.52875977647 }, { "content": "fn coord_block(coords: Coords, shape: &[u64]) -> (Vec<u64>, ArrayExt<u64>, Offsets) {\n\n let af_per_block = af::constant(PER_BLOCK as u64, af::Dim4::new(&[1, 1, 1, 1]));\n\n\n\n let offsets = coords.to_offsets(shape);\n\n let block_offsets = ArrayExt::from(af::div(offsets.af(), &af_per_block, true));\n\n let block_ids = block_offsets.unique(true);\n\n (block_ids.to_vec(), block_offsets, offsets)\n\n}\n", "file_path": "host/tensor/src/dense/file.rs", "rank": 10, "score": 211551.23543100985 }, { "content": "#[inline]\n\nfn div_ceil(l: u64, r: u64) -> u64 {\n\n if l % r == 0 {\n\n l / r\n\n } else {\n\n (l / r) + 1\n\n }\n\n}\n\n\n", "file_path": "host/tensor/src/dense/file.rs", "rank": 11, "score": 203598.4790326428 }, { "content": "pub trait BlockRead<B: BlockData, F: File<B>>: Deref<Target = B> + Send {\n\n fn upgrade(self, file: &F)\n\n -> TCBoxTryFuture<<<F as File<B>>::Block as Block<B, F>>::WriteLock>;\n\n}\n\n\n", "file_path": "host/transact/src/fs.rs", "rank": 12, "score": 202184.7744833319 }, { "content": "#[async_trait]\n\npub trait Transaction<D: fs::Dir>: Clone + Sized + Send + Sync + 'static {\n\n /// The [`TxnId`] of this transaction context.\n\n fn id(&'_ self) -> &'_ TxnId;\n\n\n\n /// Borrow the [`fs::Dir`] of this transaction context.\n\n fn context(&'_ self) -> &'_ D;\n\n\n\n /// Consume this `Txn` and return its [`fs::Dir`].\n\n fn into_context(self) -> D;\n\n\n\n /// Return a transaction subcontext with its own [`fs::Dir`].\n\n async fn subcontext(&self, id: Id) -> TCResult<Self>;\n\n\n\n /// Return a transaction subcontext with its own unique [`fs::Dir`].\n\n async fn subcontext_tmp(&self) -> TCResult<Self>;\n\n}\n", "file_path": "host/transact/src/lib.rs", "rank": 13, "score": 201377.11595036087 }, { "content": "#[async_trait]\n\npub trait Store: Clone + Send + Sync {\n\n /// Return `true` if this store contains no data as of the given [`TxnId`].\n\n async fn is_empty(&self, txn_id: &TxnId) -> TCResult<bool>;\n\n}\n\n\n\n/// A transactional file.\n", "file_path": "host/transact/src/fs.rs", "rank": 14, "score": 198169.90343229333 }, { "content": "type TokioError = Box<dyn std::error::Error + Send + Sync + 'static>;\n\n\n", "file_path": "host/src/main.rs", "rank": 15, "score": 197370.68029839374 }, { "content": "pub fn reference_self(form: Vec<(Id, Scalar)>, path: &TCPathBuf) -> Vec<(Id, Scalar)> {\n\n form.into_iter()\n\n .map(|(id, scalar)| (id, scalar.reference_self(path)))\n\n .collect()\n\n}\n", "file_path": "host/src/scalar/op/def.rs", "rank": 16, "score": 196334.54923001025 }, { "content": "pub fn dereference_self(form: Vec<(Id, Scalar)>, path: &TCPathBuf) -> Vec<(Id, Scalar)> {\n\n form.into_iter()\n\n .map(|(id, scalar)| (id, scalar.dereference_self(path)))\n\n .collect()\n\n}\n\n\n", "file_path": "host/src/scalar/op/def.rs", "rank": 17, "score": 196334.54923001025 }, { "content": "fn delimiter<E>(content: &'static [u8]) -> impl Stream<Item = Result<Bytes, E>> {\n\n stream::once(future::ready(Ok(Bytes::from_static(content))))\n\n}\n", "file_path": "host/src/http/server.rs", "rank": 18, "score": 195280.64956253665 }, { "content": "#[inline]\n\nfn coord_bounds(shape: &[u64]) -> Vec<u64> {\n\n (0..shape.len())\n\n .map(|axis| shape[axis + 1..].iter().product())\n\n .collect()\n\n}\n", "file_path": "host/tensor/src/lib.rs", "rank": 19, "score": 194940.6715834758 }, { "content": "#[async_trait]\n\npub trait Block<B: BlockData, F: File<B>>: Send + Sync {\n\n type ReadLock: BlockRead<B, F>;\n\n type WriteLock: BlockWrite<B, F>;\n\n\n\n /// Get a read lock on this block.\n\n async fn read(self) -> Self::ReadLock;\n\n\n\n /// Get a write lock on this block.\n\n async fn write(self) -> Self::WriteLock;\n\n}\n\n\n\n/// A transactional persistent data store.\n", "file_path": "host/transact/src/fs.rs", "rank": 20, "score": 194402.60900814008 }, { "content": "fn is_empty(contents: &DirContents) -> bool {\n\n for (handle, _) in contents {\n\n if !handle.file_name().to_str().unwrap().starts_with('.') {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n", "file_path": "host/src/fs/dir.rs", "rank": 21, "score": 193000.25622947072 }, { "content": "#[inline]\n\nfn u64_into_value(u: u64) -> Value {\n\n Value::Number(Number::UInt(UInt::U64(u)))\n\n}\n\n\n", "file_path": "host/tensor/src/sparse/table.rs", "rank": 22, "score": 191344.00020043453 }, { "content": "#[inline]\n\nfn block_version(file_path: &PathBuf, txn_id: &TxnId, block_id: &BlockId) -> PathBuf {\n\n let mut path = file_version(file_path, txn_id);\n\n path.push(block_id.to_string());\n\n path\n\n}\n", "file_path": "host/src/fs/file.rs", "rank": 23, "score": 189156.4949328985 }, { "content": "#[inline]\n\nfn file_ext(path: &'_ PathBuf) -> Option<&'_ str> {\n\n path.extension().and_then(|ext| ext.to_str())\n\n}\n\n\n", "file_path": "host/src/fs/mod.rs", "rank": 24, "score": 181312.88377048692 }, { "content": "#[inline]\n\nfn expect_u64(value: Value) -> TCResult<u64> {\n\n if let Value::Number(Number::UInt(UInt::U64(unwrapped))) = value {\n\n Ok(unwrapped)\n\n } else {\n\n Err(TCError::bad_request(\"expected u64 but found\", value))\n\n }\n\n}\n", "file_path": "host/tensor/src/sparse/table.rs", "rank": 26, "score": 178193.61453308206 }, { "content": "fn cast_bound(dim: u64, bound: Value) -> TCResult<u64> {\n\n let bound = i64::try_cast_from(bound, |v| TCError::bad_request(\"invalid bound\", v))?;\n\n if bound.abs() as u64 > dim {\n\n return Err(TCError::bad_request(\n\n format!(\"Index out of bounds for dimension {}\", dim),\n\n bound,\n\n ));\n\n }\n\n\n\n if bound < 0 {\n\n Ok(dim - bound.abs() as u64)\n\n } else {\n\n Ok(bound as u64)\n\n }\n\n}\n\n\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 27, "score": 176582.4144076283 }, { "content": "fn error_type(err_type: &Id) -> Option<ErrorType> {\n\n match err_type.as_str() {\n\n \"bad_gateway\" => Some(ErrorType::BadGateway),\n\n \"bad_request\" => Some(ErrorType::BadRequest),\n\n \"conflict\" => Some(ErrorType::Conflict),\n\n \"forbidden\" => Some(ErrorType::Forbidden),\n\n \"internal\" => Some(ErrorType::Internal),\n\n \"method_not_allowed\" => Some(ErrorType::MethodNotAllowed),\n\n \"not_found\" => Some(ErrorType::NotFound),\n\n \"not_implemented\" => Some(ErrorType::NotImplemented),\n\n \"timeout\" => Some(ErrorType::Timeout),\n\n \"unauthorized\" => Some(ErrorType::Unauthorized),\n\n _ => None,\n\n }\n\n}\n", "file_path": "host/src/route/mod.rs", "rank": 28, "score": 176558.8863240995 }, { "content": "fn data_size(flag: &str) -> TCResult<u64> {\n\n const ERR: &str = \"unable to parse data size\";\n\n\n\n if flag.is_empty() || flag == \"0\" {\n\n return Ok(0);\n\n }\n\n\n\n let size = u64::from_str_radix(&flag[0..flag.len() - 1], 10)\n\n .map_err(|_| TCError::bad_request(ERR, flag))?;\n\n\n\n if flag.ends_with('K') {\n\n Ok(size * 1000)\n\n } else if flag.ends_with('M') {\n\n Ok(size * 1_000_000)\n\n } else if flag.ends_with('G') {\n\n Ok(size * 1_000_000_000)\n\n } else {\n\n Err(TCError::bad_request(ERR, flag))\n\n }\n\n}\n\n\n", "file_path": "host/src/main.rs", "rank": 29, "score": 172721.32248079096 }, { "content": "fn block_offsets(\n\n indices: &ArrayExt<u64>,\n\n offsets: &ArrayExt<u64>,\n\n start: usize,\n\n block_id: u64,\n\n) -> (ArrayExt<u64>, usize) {\n\n assert_eq!(indices.len(), offsets.len());\n\n\n\n let num_to_update = af::sum_all(&af::eq(\n\n indices.af(),\n\n &af::constant(block_id, af::Dim4::new(&[1, 1, 1, 1])),\n\n true,\n\n ))\n\n .0;\n\n\n\n if num_to_update == 0 {\n\n return (af::Array::new_empty(af::Dim4::default()).into(), start);\n\n }\n\n\n\n let end = start + num_to_update as usize;\n\n let block_offsets = offsets.slice(start, end);\n\n\n\n (block_offsets, end)\n\n}\n\n\n", "file_path": "host/tensor/src/dense/file.rs", "rank": 30, "score": 172387.45655843208 }, { "content": "fn route<'a, T>(tensor: &'a T, path: &'a [PathSegment]) -> Option<Box<dyn Handler<'a> + 'a>>\n\nwhere\n\n T: TensorAccess\n\n + TensorIO<fs::Dir, Txn = Txn>\n\n + TensorCompare<Tensor, Compare = Tensor, Dense = Tensor>\n\n + TensorBoolean<Tensor, Combine = Tensor>\n\n + TensorDualIO<fs::Dir, Tensor, Txn = Txn>\n\n + TensorMath<fs::Dir, Tensor, Combine = Tensor>\n\n + TensorReduce<fs::Dir, Txn = Txn>\n\n + TensorTransform\n\n + TensorUnary<fs::Dir, Txn = Txn>\n\n + Clone\n\n + Send\n\n + Sync,\n\n Collection: From<T>,\n\n Tensor: From<T>,\n\n <T as TensorTransform>::Slice: TensorAccess + Send,\n\n Tensor: From<<T as TensorReduce<fs::Dir>>::Reduce>,\n\n Tensor: From<<T as TensorTransform>::Expand>,\n\n Tensor: From<<T as TensorTransform>::Slice>,\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 31, "score": 170934.80737088286 }, { "content": "fn contract<D, T>(mut op: T, dimensions: BTreeMap<char, u64>, f_output: Label) -> TCResult<T>\n\nwhere\n\n D: Dir,\n\n T: TensorAccess + TensorReduce<D, Reduce = T> + TensorTransform<Transpose = T>,\n\n{\n\n let mut f_input = dimensions.keys().cloned().collect::<Label>();\n\n let mut axis = 0;\n\n while op.ndim() > f_output.len() {\n\n assert_eq!(f_input.len(), op.ndim());\n\n\n\n if !f_output.contains(&f_input[axis]) {\n\n op = op.sum(axis)?;\n\n f_input.remove(axis);\n\n } else {\n\n axis += 1;\n\n }\n\n }\n\n\n\n if f_input == f_output {\n\n Ok(op)\n", "file_path": "host/tensor/src/einsum.rs", "rank": 32, "score": 170039.08909697202 }, { "content": "fn empty_dir(workspace: PathBuf) -> Result<(), TokioError> {\n\n let contents = std::fs::read_dir(workspace)?;\n\n for entry in contents {\n\n let result = match entry {\n\n Ok(entry) => rm_entry(entry),\n\n Err(cause) => Err(cause),\n\n };\n\n\n\n if let Err(cause) = result {\n\n log::error!(\"unable to clean up workspace: {}\", cause);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "host/src/main.rs", "rank": 33, "score": 169653.92421992694 }, { "content": "#[async_trait]\n\npub trait Dir: Store + Send + Sized + 'static {\n\n /// The type of a file entry in this `Dir`\n\n type File: Send;\n\n\n\n /// The `Class` of a file stored in this `Dir`\n\n type FileClass: Send;\n\n\n\n /// Return `true` if this directory has an entry at the given [`PathSegment`].\n\n async fn contains(&self, txn_id: &TxnId, name: &PathSegment) -> TCResult<bool>;\n\n\n\n /// Create a new `Dir`.\n\n async fn create_dir(&self, txn_id: TxnId, name: PathSegment) -> TCResult<Self>;\n\n\n\n /// Create a new `Dir` with a new unique ID.\n\n async fn create_dir_tmp(&self, txn_id: TxnId) -> TCResult<Self>;\n\n\n\n /// Create a new [`Self::File`].\n\n async fn create_file<F: TryFrom<Self::File, Error = TCError>, C: Send>(\n\n &self,\n\n txn_id: TxnId,\n", "file_path": "host/transact/src/fs.rs", "rank": 34, "score": 168948.712755726 }, { "content": "fn rm_entry(entry: std::fs::DirEntry) -> Result<(), std::io::Error> {\n\n if entry.metadata()?.is_dir() {\n\n std::fs::remove_dir_all(entry.path())\n\n } else {\n\n std::fs::remove_file(entry.path())\n\n }\n\n}\n", "file_path": "host/src/main.rs", "rank": 35, "score": 168835.68787230027 }, { "content": "fn encodable_c64<'en>(blocks: TCBoxTryStream<'en, Array>) -> impl Stream<Item = Vec<f64>> + 'en {\n\n blocks\n\n .take_while(|r| future::ready(r.is_ok()))\n\n .map(|block| block.expect(\"tensor block\"))\n\n .map(|arr| {\n\n let source = arr.type_cast::<afarray::Complex<f64>>();\n\n let re = source.re();\n\n let im = source.im();\n\n\n\n let mut i = 0;\n\n let mut dest = vec![0.; source.len() * 2];\n\n for (re, im) in re.to_vec().into_iter().zip(im.to_vec()) {\n\n dest[i] = re;\n\n dest[i + 1] = im;\n\n i += 2;\n\n }\n\n\n\n dest\n\n })\n\n}\n", "file_path": "host/tensor/src/dense/mod.rs", "rank": 36, "score": 160767.8115703269 }, { "content": "fn encodable_c32<'en>(blocks: TCBoxTryStream<'en, Array>) -> impl Stream<Item = Vec<f32>> + 'en {\n\n blocks\n\n .take_while(|r| future::ready(r.is_ok()))\n\n .map(|block| block.expect(\"tensor block\"))\n\n .map(|arr| {\n\n let source = arr.type_cast::<afarray::Complex<f32>>();\n\n let re = source.re();\n\n let im = source.im();\n\n\n\n let mut i = 0;\n\n let mut dest = vec![0.; source.len() * 2];\n\n for (re, im) in re.to_vec().into_iter().zip(im.to_vec()) {\n\n dest[i] = re;\n\n dest[i + 1] = im;\n\n i += 2;\n\n }\n\n\n\n dest\n\n })\n\n}\n\n\n", "file_path": "host/tensor/src/dense/mod.rs", "rank": 37, "score": 160767.8115703269 }, { "content": "struct BlockStreamView<'en> {\n\n dtype: NumberType,\n\n blocks: TCBoxTryStream<'en, Array>,\n\n}\n\n\n\nimpl<'en> en::IntoStream<'en> for BlockStreamView<'en> {\n\n fn into_stream<E: en::Encoder<'en>>(self, encoder: E) -> Result<E::Ok, E::Error> {\n\n use tc_value::{\n\n ComplexType as CT, FloatType as FT, IntType as IT, NumberType as NT, UIntType as UT,\n\n };\n\n\n\n fn encodable<'en, T: af::HasAfEnum + Clone + Default + 'en>(\n\n blocks: TCBoxTryStream<'en, Array>,\n\n ) -> impl Stream<Item = Vec<T>> + 'en {\n\n // an error can't be encoded within an array\n\n // so in case of a read error, let the receiver figure out that the tensor\n\n // doesn't have enough elements\n\n blocks\n\n .take_while(|r| future::ready(r.is_ok()))\n\n .map(|r| r.expect(\"tensor block\").type_cast().to_vec())\n", "file_path": "host/tensor/src/dense/mod.rs", "rank": 38, "score": 160594.59448669612 }, { "content": "#[async_trait]\n\npub trait File<B: BlockData>: Store + Sized + 'static {\n\n /// The type of block which this file is divided into.\n\n type Block: Block<B, Self>;\n\n\n\n /// Return the IDs of all this `File``'s blocks.\n\n async fn block_ids(&self, txn_id: &TxnId) -> TCResult<HashSet<BlockId>>;\n\n\n\n /// Return a new [`BlockId`] which is not used within this `File`.\n\n async fn unique_id(&self, txn_id: &TxnId) -> TCResult<BlockId>;\n\n\n\n /// Return true if this `File` contains the given [`BlockId`] as of the given [`TxnId`].\n\n async fn contains_block(&self, txn_id: &TxnId, name: &BlockId) -> TCResult<bool>;\n\n\n\n /// Copy all blocks from the source `File` into this `File`.\n\n async fn copy_from(&self, other: &Self, txn_id: TxnId) -> TCResult<()>;\n\n\n\n /// Create a new [`Self::Block`].\n\n async fn create_block(\n\n &self,\n\n txn_id: TxnId,\n", "file_path": "host/transact/src/fs.rs", "rank": 39, "score": 159766.78891538072 }, { "content": "struct BlockListVisitor<'a, F> {\n\n txn_id: TxnId,\n\n file: &'a F,\n\n}\n\n\n\nimpl<'a, F: File<Array>> BlockListVisitor<'a, F> {\n\n fn new(txn_id: TxnId, file: &'a F) -> Self {\n\n Self { txn_id, file }\n\n }\n\n\n\n async fn create_block<T: af::HasAfEnum, E: de::Error>(\n\n &self,\n\n block_id: u64,\n\n block: ArrayExt<T>,\n\n ) -> Result<<F as File<Array>>::Block, E>\n\n where\n\n Array: From<ArrayExt<T>>,\n\n {\n\n debug!(\"BlockListVisitor::create_block {}\", block_id);\n\n\n", "file_path": "host/tensor/src/dense/file.rs", "rank": 40, "score": 156982.802362554 }, { "content": "struct ComplexBlockListVisitor<'a, F> {\n\n visitor: BlockListVisitor<'a, F>,\n\n}\n\n\n\nimpl<'a, F: File<Array>> ComplexBlockListVisitor<'a, F> {\n\n async fn visit_array<\n\n C: af::HasAfEnum,\n\n T: af::HasAfEnum + Clone + Copy + Default,\n\n A: de::ArrayAccess<T>,\n\n const BUF_SIZE: usize,\n\n >(\n\n &self,\n\n mut access: A,\n\n ) -> Result<u64, A::Error>\n\n where\n\n ArrayExt<C>: From<(ArrayExt<T>, ArrayExt<T>)>,\n\n Array: From<ArrayExt<C>>,\n\n {\n\n let mut buf = [T::default(); BUF_SIZE];\n\n let mut size = 0u64;\n", "file_path": "host/tensor/src/dense/file.rs", "rank": 41, "score": 153853.75568682447 }, { "content": "pub fn einsum<D, T>(format: &str, tensors: Vec<T>) -> TCResult<T>\n\nwhere\n\n D: Dir,\n\n T: TensorAccess\n\n + TensorMath<D, T, Combine = T>\n\n + TensorTransform<Broadcast = T, Expand = T, Transpose = T>\n\n + TensorReduce<D, Reduce = T>\n\n + Clone,\n\n{\n\n let (f_inputs, f_output) = parse_format(format)?;\n\n debug!(\n\n \"einsum with input labels: {:?}, output label {:?}\",\n\n f_inputs, f_output\n\n );\n\n\n\n let dimensions = validate_args(&f_inputs, &tensors)?;\n\n\n\n let op = outer_product(&f_inputs, &dimensions, tensors)?;\n\n debug_assert_eq!(\n\n op.shape().as_slice(),\n\n dimensions\n\n .values()\n\n .cloned()\n\n .collect::<Vec<u64>>()\n\n .as_slice()\n\n );\n\n\n\n contract(op, dimensions, f_output)\n\n}\n", "file_path": "host/tensor/src/einsum.rs", "rank": 42, "score": 151663.4695430189 }, { "content": "#[inline]\n\nfn fs_path(mount_point: &PathBuf, name: &PathSegment) -> PathBuf {\n\n let mut path = mount_point.clone();\n\n path.push(name.to_string());\n\n path\n\n}\n\n\n", "file_path": "host/src/fs/mod.rs", "rank": 43, "score": 151527.24546800382 }, { "content": "/// Trait defining a read operation for a single [`Tensor`] element\n\npub trait ReadValueAt<D: Dir> {\n\n /// The transaction context\n\n type Txn: Transaction<D>;\n\n\n\n /// Read the value of the element at the given [`Coord`].\n\n fn read_value_at<'a>(self, txn: Self::Txn, coord: Coord) -> Read<'a>;\n\n}\n", "file_path": "host/tensor/src/stream/mod.rs", "rank": 44, "score": 151438.75814803704 }, { "content": "fn cast_range(dim: u64, range: Range) -> TCResult<AxisBounds> {\n\n debug!(\"cast range from {} with dimension {}\", range, dim);\n\n\n\n let start = match range.start {\n\n Bound::Un => 0,\n\n Bound::In(start) => cast_bound(dim, start)?,\n\n Bound::Ex(start) => cast_bound(dim, start)? + 1,\n\n };\n\n\n\n let end = match range.end {\n\n Bound::Un => dim,\n\n Bound::In(end) => cast_bound(dim, end)? + 1,\n\n Bound::Ex(end) => cast_bound(dim, end)?,\n\n };\n\n\n\n if end > start {\n\n Ok(AxisBounds::In(start..end))\n\n } else {\n\n Err(TCError::bad_request(\n\n \"invalid range\",\n\n Tuple::from(vec![start, end]),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 45, "score": 150448.32815871006 }, { "content": "type DirContents = Vec<(fs::DirEntry, Metadata)>;\n\n\n\npub async fn load(cache: Cache, path: PathBuf) -> TCResult<Dir> {\n\n let entries = dir_contents(&path).await?;\n\n dir::Dir::load(cache, path, entries).await\n\n}\n\n\n\nasync fn create_parent(path: &PathBuf) -> TCResult<()> {\n\n if let Some(parent) = path.parent() {\n\n if !parent.exists() {\n\n tokio::fs::create_dir_all(parent)\n\n .map_err(|e| io_err(e, parent))\n\n .await?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn dir_contents(dir_path: &PathBuf) -> TCResult<Vec<(fs::DirEntry, Metadata)>> {\n", "file_path": "host/src/fs/mod.rs", "rank": 46, "score": 147829.76440831315 }, { "content": "#[inline]\n\nfn expect_row(mut row: Vec<Value>) -> TCResult<(Coord, Number)> {\n\n if let Some(value) = row.pop() {\n\n let value = value.try_into()?;\n\n expect_coord(row).map(|coord| (coord, value))\n\n } else {\n\n Err(TCError::internal(ERR_CORRUPT))\n\n }\n\n}\n\n\n", "file_path": "host/tensor/src/sparse/table.rs", "rank": 47, "score": 147733.94436523848 }, { "content": "pub fn cast_bounds(shape: &Shape, value: Value) -> TCResult<Bounds> {\n\n debug!(\"tensor bounds from {} (shape is {})\", value, shape);\n\n\n\n match value {\n\n Value::None => Ok(Bounds::all(shape)),\n\n Value::Number(i) => {\n\n let bound = cast_bound(shape[0], i.into())?;\n\n Ok(Bounds::from(vec![bound]))\n\n }\n\n Value::Tuple(range) if range.matches::<(Bound, Bound)>() => {\n\n if shape.is_empty() {\n\n return Err(TCError::bad_request(\n\n \"empty Tensor has no valid bounds, but found\",\n\n range,\n\n ));\n\n }\n\n\n\n let range = range.opt_cast_into().unwrap();\n\n Ok(Bounds::from(vec![cast_range(shape[0], range)?]))\n\n }\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 48, "score": 147516.60235140595 }, { "content": "#[inline]\n\nfn file_version(file_path: &PathBuf, txn_id: &TxnId) -> PathBuf {\n\n let mut path = file_path.clone();\n\n path.push(super::VERSION.to_string());\n\n path.push(txn_id.to_string());\n\n path\n\n}\n\n\n", "file_path": "host/src/fs/file.rs", "rank": 49, "score": 146078.99861799067 }, { "content": "#[async_trait]\n\npub trait Handler<'a>: Send {\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n None\n\n }\n\n\n\n fn put<'b>(self: Box<Self>) -> Option<PutHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n None\n\n }\n\n\n\n fn post<'b>(self: Box<Self>) -> Option<PostHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n None\n\n }\n\n\n\n fn delete<'b>(self: Box<Self>) -> Option<DeleteHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n None\n\n }\n\n}\n\n\n", "file_path": "host/src/route/mod.rs", "rank": 50, "score": 141239.94311766155 }, { "content": "pub trait Route: Send + Sync {\n\n fn route<'a>(&'a self, path: &'a [PathSegment]) -> Option<Box<dyn Handler<'a> + 'a>>;\n\n}\n\n\n", "file_path": "host/src/route/mod.rs", "rank": 51, "score": 138898.88415279906 }, { "content": "/// A generic instance trait\n\npub trait Instance: Send + Sync {\n\n /// The [`Class`] type of this instance\n\n type Class: Class;\n\n\n\n /// Returns the [`Class]` of this instance.\n\n fn class(&self) -> Self::Class;\n\n}\n", "file_path": "host/generic/src/lib.rs", "rank": 52, "score": 138898.88415279906 }, { "content": "struct MapHandler<'a, T: Clone> {\n\n map: &'a Map<T>,\n\n}\n\n\n\nimpl<'a, T: Instance + Clone> Handler<'a> for MapHandler<'a, T>\n\nwhere\n\n State: From<Map<T>>,\n\n State: From<T>,\n\n{\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|_txn, key| {\n\n Box::pin(async move {\n\n if key.is_none() {\n\n Ok(State::from(self.map.clone()))\n\n } else {\n\n let key = Id::try_cast_from(key, |v| TCError::bad_request(\"invalid Id\", v))?;\n\n self.map\n", "file_path": "host/src/route/generic.rs", "rank": 53, "score": 138606.98515941936 }, { "content": "struct TupleHandler<'a, T: Clone> {\n\n tuple: &'a Tuple<T>,\n\n}\n\n\n\nimpl<'a, T: Instance + Clone> Handler<'a> for TupleHandler<'a, T>\n\nwhere\n\n State: From<Tuple<T>>,\n\n State: From<T>,\n\n{\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|_txn, key| {\n\n Box::pin(async move {\n\n if key.is_none() {\n\n Ok(State::from(self.tuple.clone()))\n\n } else {\n\n let i = Number::try_cast_from(key, |v| {\n\n TCError::bad_request(\"invalid tuple index\", v)\n", "file_path": "host/src/route/generic.rs", "rank": 54, "score": 138606.98515941936 }, { "content": "fn file_name(handle: &fs::DirEntry) -> TCResult<PathSegment> {\n\n if let Some(name) = handle.path().file_stem() {\n\n let name = name.to_str().ok_or_else(|| {\n\n TCError::internal(format!(\"invalid file name at {:?}\", handle.path()))\n\n })?;\n\n\n\n name.parse()\n\n } else {\n\n Err(TCError::internal(\"Cannot load file with no name!\"))\n\n }\n\n}\n\n\n", "file_path": "host/src/fs/mod.rs", "rank": 55, "score": 137335.14485704072 }, { "content": "struct LockState<T> {\n\n last_commit: Option<TxnId>,\n\n readers: BTreeMap<TxnId, usize>,\n\n reserved: Option<TxnId>,\n\n wakers: VecDeque<Waker>,\n\n\n\n canon: UnsafeCell<T>,\n\n at: BTreeMap<TxnId, UnsafeCell<T>>,\n\n}\n\n\n", "file_path": "host/transact/src/lock.rs", "rank": 56, "score": 134748.28962116176 }, { "content": "#[async_trait]\n\npub trait BTreeInstance: Clone + Instance {\n\n type Slice: BTreeInstance;\n\n\n\n /// Return a reference to this `BTree`'s collator.\n\n fn collator(&self) -> &ValueCollator;\n\n\n\n /// Return a reference to this `BTree`'s schema.\n\n fn schema(&self) -> &RowSchema;\n\n\n\n /// Return a slice of this `BTree`'s with the given range.\n\n fn slice(self, range: Range, reverse: bool) -> TCResult<Self::Slice>;\n\n\n\n /// Return the number of [`Key`]s in this `BTree`.\n\n async fn count(&self, txn_id: TxnId) -> TCResult<u64> {\n\n // TODO: reimplement this more efficiently\n\n let keys = self.clone().keys(txn_id).await?;\n\n keys.try_fold(0u64, |count, _| future::ready(Ok(count + 1)))\n\n .await\n\n }\n\n\n", "file_path": "host/btree/src/lib.rs", "rank": 57, "score": 134615.3265422781 }, { "content": "fn main() {\n\n pkg_config::Config::new()\n\n .atleast_version(\"3.8\")\n\n .probe(\"arrayfire\")\n\n .unwrap();\n\n}\n", "file_path": "host/tensor/build.rs", "rank": 58, "score": 133894.44676458836 }, { "content": "/// [`Tensor`] transforms\n\npub trait TensorTransform {\n\n /// A broadcast [`Tensor`]\n\n type Broadcast: TensorInstance;\n\n\n\n /// A type-cast [`Tensor`]\n\n type Cast: TensorInstance;\n\n\n\n /// A [`Tensor`] with an expanded dimension\n\n type Expand: TensorInstance;\n\n\n\n /// A [`Tensor`] slice\n\n type Slice: TensorInstance;\n\n\n\n /// A transposed [`Tensor`]\n\n type Transpose: TensorInstance;\n\n\n\n /// Broadcast this [`Tensor`] to the given `shape`.\n\n fn broadcast(self, shape: Shape) -> TCResult<Self::Broadcast>;\n\n\n\n /// Cast this [`Tensor`] to the given `dtype`.\n", "file_path": "host/tensor/src/lib.rs", "rank": 59, "score": 133105.84021554838 }, { "content": "/// A [`Tensor`] instance\n\npub trait TensorInstance {\n\n /// A dense representation of this [`Tensor`]\n\n type Dense: TensorInstance;\n\n\n\n /// A sparse representation of this [`Tensor`]\n\n type Sparse: TensorInstance;\n\n\n\n /// Return a dense representation of this [`Tensor`].\n\n fn into_dense(self) -> Self::Dense;\n\n\n\n /// Return a sparse representation of this [`Tensor`].\n\n fn into_sparse(self) -> Self::Sparse;\n\n}\n\n\n", "file_path": "host/tensor/src/lib.rs", "rank": 60, "score": 133105.84021554838 }, { "content": "/// Basic properties common to all [`Tensor`]s\n\npub trait TensorAccess {\n\n /// The datatype of this [`Tensor`]\n\n fn dtype(&self) -> NumberType;\n\n\n\n /// The number of dimensions of this [`Tensor`]\n\n fn ndim(&self) -> usize;\n\n\n\n /// The shape of this [`Tensor`]\n\n fn shape(&'_ self) -> &'_ Shape;\n\n\n\n /// The number of elements in this [`Tensor`]\n\n fn size(&self) -> u64;\n\n}\n\n\n", "file_path": "host/tensor/src/lib.rs", "rank": 61, "score": 133105.74513547428 }, { "content": "/// Broadcast the given `left` and `right` tensors into the same shape.\n\n///\n\n/// For rules of broadcasting, see:\n\n/// [https://pytorch.org/docs/stable/notes/broadcasting.html](https://pytorch.org/docs/stable/notes/broadcasting.html)\n\npub fn broadcast<L, R>(left: L, right: R) -> TCResult<(L::Broadcast, R::Broadcast)>\n\nwhere\n\n L: TensorAccess + TensorTransform,\n\n R: TensorAccess + TensorTransform,\n\n{\n\n debug!(\n\n \"broadcast tensors with shapes {}, {}\",\n\n left.shape(),\n\n right.shape()\n\n );\n\n\n\n let mut left_shape = left.shape().to_vec();\n\n let mut right_shape = right.shape().to_vec();\n\n\n\n match (left_shape.len(), right_shape.len()) {\n\n (l, r) if l < r => {\n\n for _ in 0..(r - l) {\n\n left_shape.insert(0, 1);\n\n }\n\n }\n", "file_path": "host/tensor/src/lib.rs", "rank": 62, "score": 131399.7401151625 }, { "content": "fn normalize<\n\n T: TensorAccess + TensorTransform<Broadcast = T, Expand = T, Transpose = T> + Clone,\n\n>(\n\n tensor: T,\n\n f_input: &[char],\n\n f_output: &[char],\n\n dimensions: &BTreeMap<char, u64>,\n\n) -> TCResult<T> {\n\n debug!(\n\n \"normalize tensor with shape {} from {:?} -> {:?}\",\n\n tensor.shape(),\n\n f_input,\n\n f_output\n\n );\n\n if f_input == f_output {\n\n return Ok(tensor);\n\n }\n\n\n\n let source: HashMap<char, usize> = f_input.iter().cloned().zip(0..f_input.len()).collect();\n\n let permutation: Vec<usize> = f_output\n", "file_path": "host/tensor/src/einsum.rs", "rank": 63, "score": 131203.7420018873 }, { "content": " def write(self, *args):\n\n \"\"\"\n\n Write a `Tensor` or `Number` to the given slice of this one.\n\n\n\n If only one argument is provided, it is assumed to be a value to write to this entire `Tensor`.\n\n If two arguments are provided, the first is assumed to be the bounds of the write and the second the value.\n\n \"\"\"\n\n\n\n if len(args) == 1:\n\n [value] = args\n\n return self.__setitem__(None, value)\n\n elif len(args) == 2:\n\n [bounds, value] = args\n\n return self.__setitem__(bounds, value)\n\n else:\n", "file_path": "client/tinychain/collection/tensor.py", "rank": 64, "score": 131043.90459522195 }, { "content": "#[derive(Clone)]\n\nenum DirEntry {\n\n Dir(Dir),\n\n File(FileEntry),\n\n}\n\n\n\nimpl fmt::Display for DirEntry {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::Dir(dir) => fmt::Display::fmt(dir, f),\n\n Self::File(file) => fmt::Display::fmt(file, f),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Dir {\n\n path: PathBuf,\n\n cache: Cache,\n\n contents: TxnLock<HashMap<PathSegment, DirEntry>>,\n\n}\n", "file_path": "host/src/fs/dir.rs", "rank": 65, "score": 130895.97379182353 }, { "content": "struct TensorHandler<T> {\n\n tensor: T,\n\n}\n\n\n\nimpl<'a, T: 'a> Handler<'a> for TensorHandler<T>\n\nwhere\n\n T: TensorAccess\n\n + TensorIO<fs::Dir, Txn = Txn>\n\n + TensorDualIO<fs::Dir, Tensor, Txn = Txn>\n\n + TensorTransform\n\n + Clone\n\n + Send\n\n + Sync,\n\n <T as TensorTransform>::Slice: TensorAccess + Send,\n\n Tensor: From<<T as TensorTransform>::Slice>,\n\n{\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 66, "score": 130635.30692723337 }, { "content": "fn single_entry<\n\n 'en,\n\n K: en::IntoStream<'en> + 'en,\n\n V: en::IntoStream<'en> + 'en,\n\n E: en::Encoder<'en>,\n\n>(\n\n key: K,\n\n value: V,\n\n encoder: E,\n\n) -> Result<E::Ok, E::Error> {\n\n use en::EncodeMap;\n\n\n\n let mut map = encoder.encode_map(Some(1))?;\n\n map.encode_entry(key, value)?;\n\n map.end()\n\n}\n", "file_path": "host/src/scalar/mod.rs", "rank": 67, "score": 130371.10713847206 }, { "content": "struct ChainVisitor {\n\n txn: Txn,\n\n}\n\n\n\n#[async_trait]\n\nimpl de::Visitor for ChainVisitor {\n\n type Value = BlockChain;\n\n\n\n fn expecting() -> &'static str {\n\n \"a BlockChain\"\n\n }\n\n\n\n async fn visit_seq<A: de::SeqAccess>(self, mut seq: A) -> Result<Self::Value, A::Error> {\n\n let schema = seq\n\n .next_element(())\n\n .await?\n\n .ok_or_else(|| de::Error::invalid_length(0, \"a BlockChain schema\"))?;\n\n\n\n let history = seq\n\n .next_element(self.txn.clone())\n", "file_path": "host/src/chain/block.rs", "rank": 68, "score": 129953.07718497372 }, { "content": "struct ValueTypeVisitor;\n\n\n\nimpl ValueTypeVisitor {\n\n fn visit_path<E: DestreamError>(self, path: TCPathBuf) -> Result<ValueType, E> {\n\n ValueType::from_path(&path)\n\n .ok_or_else(|| DestreamError::invalid_value(path, Self::expecting()))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl DestreamVisitor for ValueTypeVisitor {\n\n type Value = ValueType;\n\n\n\n fn expecting() -> &'static str {\n\n \"a Value type\"\n\n }\n\n\n\n fn visit_string<E: DestreamError>(self, v: String) -> Result<Self::Value, E> {\n\n let path: TCPathBuf = v.parse().map_err(DestreamError::custom)?;\n\n self.visit_path(path)\n", "file_path": "host/value/src/value.rs", "rank": 69, "score": 127568.32562415174 }, { "content": "struct MutationVisitor;\n\n\n\n#[async_trait]\n\nimpl de::Visitor for MutationVisitor {\n\n type Value = Mutation;\n\n\n\n fn expecting() -> &'static str {\n\n \"a mutation record\"\n\n }\n\n\n\n async fn visit_seq<A: de::SeqAccess>(self, mut seq: A) -> Result<Self::Value, A::Error> {\n\n let path = seq\n\n .next_element(())\n\n .await?\n\n .ok_or_else(|| de::Error::invalid_length(0, Self::expecting()))?;\n\n\n\n let key = seq\n\n .next_element(())\n\n .await?\n\n .ok_or_else(|| de::Error::invalid_length(0, Self::expecting()))?;\n", "file_path": "host/src/chain/data/block.rs", "rank": 70, "score": 127498.11066630458 }, { "content": "/// [`Tensor`] boolean operations.\n\npub trait TensorBoolean<O> {\n\n /// The result type of a boolean operation.\n\n type Combine: TensorInstance;\n\n\n\n /// Logical and\n\n fn and(self, other: O) -> TCResult<Self::Combine>;\n\n\n\n /// Logical or\n\n fn or(self, other: O) -> TCResult<Self::Combine>;\n\n\n\n /// Logical xor\n\n fn xor(self, other: O) -> TCResult<Self::Combine>;\n\n}\n\n\n", "file_path": "host/tensor/src/lib.rs", "rank": 71, "score": 127393.15178586001 }, { "content": "/// Tensor comparison operations\n\npub trait TensorCompare<O> {\n\n /// The result of a comparison operation\n\n type Compare: TensorInstance;\n\n\n\n /// The result of a comparison operation which can only return a dense [`Tensor`]\n\n type Dense: TensorInstance;\n\n\n\n /// Element-wise equality\n\n fn eq(self, other: O) -> TCResult<Self::Dense>;\n\n\n\n /// Element-wise greater-than\n\n fn gt(self, other: O) -> TCResult<Self::Compare>;\n\n\n\n /// Element-wise greater-or-equal\n\n fn gte(self, other: O) -> TCResult<Self::Dense>;\n\n\n\n /// Element-wise less-than\n\n fn lt(self, other: O) -> TCResult<Self::Compare>;\n\n\n\n /// Element-wise less-or-equal\n\n fn lte(self, other: O) -> TCResult<Self::Dense>;\n\n\n\n /// Element-wise not-equal\n\n fn ne(self, other: O) -> TCResult<Self::Compare>;\n\n}\n\n\n\n/// [`Tensor`] I/O operations\n", "file_path": "host/tensor/src/lib.rs", "rank": 72, "score": 127393.15178586001 }, { "content": "struct RangeHandler;\n\n\n\nimpl<'a> Handler<'a> for RangeHandler {\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|txn, key| {\n\n Box::pin(async move {\n\n if key.matches::<(Vec<u64>, Number, Number)>() {\n\n let (shape, start, stop): (Vec<u64>, Number, Number) =\n\n key.opt_cast_into().unwrap();\n\n\n\n let file = create_file(&txn).await?;\n\n\n\n DenseTensor::range(file, *txn.id(), shape, start, stop)\n\n .map_ok(Tensor::from)\n\n .map_ok(Collection::from)\n\n .map_ok(State::from)\n\n .await\n\n } else {\n\n Err(TCError::bad_request(\"invalid schema for range tensor\", key))\n\n }\n\n })\n\n }))\n\n }\n\n}\n\n\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 73, "score": 125921.41522009888 }, { "content": "struct ConstantHandler;\n\n\n\nimpl<'a> Handler<'a> for ConstantHandler {\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|txn, key| {\n\n Box::pin(async move {\n\n let (shape, value): (Vec<u64>, Number) =\n\n key.try_cast_into(|v| TCError::bad_request(\"invalid Tensor schema\", v))?;\n\n\n\n constant(&txn, shape.into(), value)\n\n .map_ok(Tensor::from)\n\n .map_ok(Collection::from)\n\n .map_ok(State::from)\n\n .await\n\n })\n\n }))\n\n }\n\n}\n\n\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 74, "score": 125921.41522009888 }, { "content": "struct DualHandler {\n\n tensor: Tensor,\n\n op: fn(Tensor, Tensor) -> TCResult<Tensor>,\n\n}\n\n\n\nimpl DualHandler {\n\n fn new<T>(tensor: T, op: fn(Tensor, Tensor) -> TCResult<Tensor>) -> Self\n\n where\n\n Tensor: From<T>,\n\n {\n\n Self {\n\n tensor: tensor.into(),\n\n op,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Handler<'a> for DualHandler {\n\n fn post<'b>(self: Box<Self>) -> Option<PostHandler<'a, 'b>>\n\n where\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 75, "score": 125921.41522009888 }, { "content": "struct EinsumHandler;\n\n\n\nimpl<'a> Handler<'a> for EinsumHandler {\n\n fn post<'b>(self: Box<Self>) -> Option<PostHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|_txn, mut params| {\n\n Box::pin(async move {\n\n let format: String = params.require(&label(\"format\").into())?;\n\n let tensors: Vec<Tensor> = params.require(&label(\"tensors\").into())?;\n\n einsum(&format, tensors)\n\n .map(Collection::from)\n\n .map(State::from)\n\n })\n\n }))\n\n }\n\n}\n\n\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 76, "score": 125921.41522009888 }, { "content": "struct CreateHandler {\n\n class: TensorType,\n\n}\n\n\n\nimpl<'a> Handler<'a> for CreateHandler {\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|txn, key| {\n\n Box::pin(async move {\n\n let schema: Schema =\n\n key.try_cast_into(|v| TCError::bad_request(\"invalid Tensor schema\", v))?;\n\n\n\n match self.class {\n\n TensorType::Dense => {\n\n constant(&txn, schema.shape, schema.dtype.zero())\n\n .map_ok(Tensor::from)\n\n .map_ok(Collection::Tensor)\n\n .map_ok(State::Collection)\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 77, "score": 125921.41522009888 }, { "content": "struct UnaryHandler {\n\n tensor: Tensor,\n\n op: fn(&Tensor) -> TCResult<Tensor>,\n\n}\n\n\n\nimpl UnaryHandler {\n\n fn new(tensor: Tensor, op: fn(&Tensor) -> TCResult<Tensor>) -> Self {\n\n Self { tensor, op }\n\n }\n\n}\n\n\n\nimpl<'a> Handler<'a> for UnaryHandler {\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|_txn, key| {\n\n Box::pin(async move {\n\n let tensor = if key.is_none() {\n\n self.tensor\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 78, "score": 125921.41522009888 }, { "content": "fn transform_error(err: TCError, encoding: Encoding) -> hyper::Response<Body> {\n\n let code = match err.code() {\n\n BadGateway => StatusCode::BAD_GATEWAY,\n\n BadRequest => StatusCode::BAD_REQUEST,\n\n Forbidden => StatusCode::FORBIDDEN,\n\n Conflict => StatusCode::CONFLICT,\n\n Internal => StatusCode::INTERNAL_SERVER_ERROR,\n\n MethodNotAllowed => StatusCode::METHOD_NOT_ALLOWED,\n\n NotFound => StatusCode::NOT_FOUND,\n\n NotImplemented => StatusCode::NOT_IMPLEMENTED,\n\n Timeout => StatusCode::REQUEST_TIMEOUT,\n\n Unauthorized => StatusCode::UNAUTHORIZED,\n\n };\n\n\n\n let body = match encoding {\n\n Encoding::Json => {\n\n let encoded = destream_json::encode(err).expect(\"encode error\");\n\n let encoded = encoded.chain(delimiter(b\"\\n\"));\n\n Body::wrap_stream(encoded)\n\n }\n", "file_path": "host/src/http/server.rs", "rank": 79, "score": 125515.82919607448 }, { "content": "fn validate_args<T: TensorAccess>(\n\n f_inputs: &[Label],\n\n tensors: &[T],\n\n) -> TCResult<BTreeMap<char, u64>> {\n\n if f_inputs.len() != tensors.len() {\n\n return Err(TCError::bad_request(\n\n \"number of Tensors passed to einsum does not match number of format strings\",\n\n format!(\"{} != {}\", tensors.len(), f_inputs.len()),\n\n ));\n\n } else if tensors.is_empty() {\n\n return Err(TCError::bad_request(\n\n \"no Tensor was provided to einsum\",\n\n \"[]\",\n\n ));\n\n }\n\n\n\n let mut dimensions = BTreeMap::new();\n\n\n\n for (f_input, tensor) in f_inputs.iter().zip(tensors.iter()) {\n\n if f_input.len() != tensor.ndim() {\n", "file_path": "host/tensor/src/einsum.rs", "rank": 80, "score": 125304.87985627621 }, { "content": "struct Inner<T> {\n\n name: String,\n\n state: Mutex<LockState<T>>,\n\n}\n\n\n\n/// A lock which provides transaction-specific versions of the locked state.\n\npub struct TxnLock<T> {\n\n inner: Arc<Inner<T>>,\n\n}\n\n\n\nimpl<T> Clone for TxnLock<T> {\n\n fn clone(&self) -> Self {\n\n TxnLock {\n\n inner: self.inner.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Clone> TxnLock<T> {\n\n /// Create a new lock.\n", "file_path": "host/transact/src/lock.rs", "rank": 81, "score": 125278.07843677004 }, { "content": "struct ErrorHandler<'a> {\n\n code: &'a Id,\n\n}\n\n\n\nimpl<'a> Handler<'a> for ErrorHandler<'a> {\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|_txn, key| {\n\n Box::pin(async move {\n\n let message = String::try_cast_from(key, |v| {\n\n TCError::bad_request(\"cannot cast into error message string from\", v)\n\n })?;\n\n\n\n if let Some(err_type) = error_type(self.code) {\n\n Err(TCError::new(err_type, message))\n\n } else {\n\n Err(TCError::not_found(self.code))\n\n }\n\n })\n\n }))\n\n }\n\n}\n\n\n", "file_path": "host/src/route/mod.rs", "rank": 82, "score": 125258.87941637062 }, { "content": "struct CopySparseHandler;\n\n\n\nimpl<'a> Handler<'a> for CopySparseHandler {\n\n fn post<'b>(self: Box<Self>) -> Option<PostHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|txn, mut params| {\n\n Box::pin(async move {\n\n let schema: Value = params.require(&label(\"schema\").into())?;\n\n let schema: Schema =\n\n schema.try_cast_into(|v| TCError::bad_request(\"invalid Tensor schema\", v))?;\n\n\n\n let source: TCStream = params.require(&label(\"source\").into())?;\n\n params.expect_empty()?;\n\n\n\n let elements = source.into_stream(txn.clone()).await?;\n\n\n\n let txn_id = *txn.id();\n\n let dir = txn.context().create_dir_tmp(txn_id).await?;\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 83, "score": 123630.48095041697 }, { "content": "struct CopyDenseHandler;\n\n\n\nimpl<'a> Handler<'a> for CopyDenseHandler {\n\n fn post<'b>(self: Box<Self>) -> Option<PostHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|txn, mut params| {\n\n Box::pin(async move {\n\n let schema: Value = params.require(&label(\"schema\").into())?;\n\n let Schema { dtype, shape } =\n\n schema.try_cast_into(|v| TCError::bad_request(\"invalid Tensor schema\", v))?;\n\n\n\n let source: TCStream = params.require(&label(\"source\").into())?;\n\n params.expect_empty()?;\n\n\n\n let elements = source.into_stream(txn.clone()).await?;\n\n let elements = elements.map(|r| {\n\n r.and_then(|n| {\n\n Number::try_cast_from(n, |n| {\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 84, "score": 123630.48095041697 }, { "content": "/// [`Tensor`] reduction operations\n\npub trait TensorReduce<D: Dir> {\n\n /// The type of [`Transaction`] to expect\n\n type Txn: Transaction<D>;\n\n\n\n /// The result type of a reduce operation\n\n type Reduce: TensorInstance;\n\n\n\n /// Return the product of this [`Tensor`] along the given `axis`.\n\n fn product(self, axis: usize) -> TCResult<Self::Reduce>;\n\n\n\n /// Return the product of all elements in this [`Tensor`].\n\n fn product_all(&self, txn: Self::Txn) -> TCBoxTryFuture<Number>;\n\n\n\n /// Return the sum of this [`Tensor`] along the given `axis`.\n\n fn sum(self, axis: usize) -> TCResult<Self::Reduce>;\n\n\n\n /// Return the sum of all elements in this [`Tensor`].\n\n fn sum_all(&self, txn: Self::Txn) -> TCBoxTryFuture<Number>;\n\n}\n\n\n", "file_path": "host/tensor/src/lib.rs", "rank": 85, "score": 122536.24285953116 }, { "content": "#[async_trait]\n\npub trait TensorIO<D: Dir> {\n\n /// The type of [`Transaction`] to expect\n\n type Txn: Transaction<D>;\n\n\n\n /// Read a single value from this [`Tensor`].\n\n async fn read_value(self, txn: Self::Txn, coord: Coord) -> TCResult<Number>;\n\n\n\n /// Write a single value to the slice of this [`Tensor`] with the given [`Bounds`].\n\n async fn write_value(&self, txn_id: TxnId, bounds: Bounds, value: Number) -> TCResult<()>;\n\n\n\n /// Overwrite a single element of this [`Tensor`].\n\n async fn write_value_at(&self, txn_id: TxnId, coord: Coord, value: Number) -> TCResult<()>;\n\n}\n\n\n\n/// [`Tensor`] I/O operations which accept another [`Tensor`] as an argument\n", "file_path": "host/tensor/src/lib.rs", "rank": 86, "score": 122536.13503411115 }, { "content": "#[async_trait]\n\npub trait TensorUnary<D: Dir> {\n\n /// The type of [`Transaction`] to expect\n\n type Txn: Transaction<D>;\n\n\n\n /// The return type of a unary operation\n\n type Unary: TensorInstance;\n\n\n\n /// Element-wise absolute value\n\n fn abs(&self) -> TCResult<Self::Unary>;\n\n\n\n /// Return `true` if all elements in this [`Tensor`] are nonzero.\n\n async fn all(self, txn: Self::Txn) -> TCResult<bool>;\n\n\n\n /// Return `true` if any element in this [`Tensor`] is nonzero.\n\n async fn any(self, txn: Self::Txn) -> TCResult<bool>;\n\n\n\n /// Element-wise logical not\n\n fn not(&self) -> TCResult<Self::Unary>;\n\n}\n\n\n", "file_path": "host/tensor/src/lib.rs", "rank": 87, "score": 122536.13503411115 }, { "content": "type NodeId = BlockId;\n\n\n", "file_path": "host/btree/src/file.rs", "rank": 88, "score": 121097.4503263826 }, { "content": "struct SelfHandler<'a, T> {\n\n subject: &'a T,\n\n}\n\n\n\nimpl<'a, T: Clone + Send + Sync> Handler<'a> for SelfHandler<'a, T>\n\nwhere\n\n State: From<T>,\n\n{\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|_txn, key| {\n\n Box::pin(async move {\n\n if key.is_none() {\n\n Ok(self.subject.clone().into())\n\n } else {\n\n Err(TCError::not_found(key))\n\n }\n\n })\n", "file_path": "host/src/route/mod.rs", "rank": 89, "score": 119256.69102097681 }, { "content": "struct TransposeHandler<T> {\n\n tensor: T,\n\n}\n\n\n\nimpl<'a, T> Handler<'a> for TransposeHandler<T>\n\nwhere\n\n T: TensorTransform + Send + 'a,\n\n Tensor: From<T::Transpose>,\n\n{\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|_txn, key| {\n\n Box::pin(async move {\n\n let transpose = if key.is_none() {\n\n self.tensor.transpose(None)\n\n } else {\n\n let permutation = key.try_cast_into(|v| {\n\n TCError::bad_request(\"invalid permutation for transpose\", v)\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 90, "score": 118902.54871487254 }, { "content": "struct ExpandHandler<T> {\n\n tensor: T,\n\n}\n\n\n\nimpl<'a, T> Handler<'a> for ExpandHandler<T>\n\nwhere\n\n T: TensorTransform + Send + 'a,\n\n Tensor: From<T::Expand>,\n\n{\n\n fn get<'b>(self: Box<Self>) -> Option<GetHandler<'a, 'b>>\n\n where\n\n 'b: 'a,\n\n {\n\n Some(Box::new(|_txn, key| {\n\n Box::pin(async move {\n\n let axis = key.try_cast_into(|v| TCError::bad_request(\"invalid tensor axis\", v))?;\n\n\n\n self.tensor\n\n .expand_dims(axis)\n\n .map(Tensor::from)\n", "file_path": "host/src/route/collection/tensor.rs", "rank": 91, "score": 118902.54871487254 }, { "content": "type HistoryBlockView<'en> = (\n\n Bytes,\n\n en::MapStream<\n\n TxnId,\n\n TCResult<MutationViewSeq<'en>>,\n\n TCBoxStream<'en, (TxnId, TCResult<MutationViewSeq<'en>>)>,\n\n >,\n\n);\n\n\n\npub enum MutationView<'en> {\n\n Delete(TCPathBuf, Value),\n\n Put(TCPathBuf, Value, StateView<'en>),\n\n}\n\n\n\nimpl<'en> en::IntoStream<'en> for MutationView<'en> {\n\n fn into_stream<E: en::Encoder<'en>>(self, encoder: E) -> Result<E::Ok, E::Error> {\n\n match self {\n\n Self::Delete(path, key) => (path, key).into_stream(encoder),\n\n Self::Put(path, key, value) => (path, key, value).into_stream(encoder),\n\n }\n\n }\n\n}\n", "file_path": "host/src/chain/data/history.rs", "rank": 92, "score": 118877.01640581831 }, { "content": "/// [`Tensor`] math operations\n\npub trait TensorMath<D: Dir, O> {\n\n /// The result type of a math operation\n\n type Combine: TensorInstance;\n\n\n\n /// Add two tensors together.\n\n fn add(self, other: O) -> TCResult<Self::Combine>;\n\n\n\n /// Divide `self` by `other`.\n\n fn div(self, other: O) -> TCResult<Self::Combine>;\n\n\n\n /// Multiply two tensors together.\n\n fn mul(self, other: O) -> TCResult<Self::Combine>;\n\n\n\n /// Subtract `other` from `self`.\n\n fn sub(self, other: O) -> TCResult<Self::Combine>;\n\n}\n\n\n", "file_path": "host/tensor/src/lib.rs", "rank": 93, "score": 118309.07402519531 }, { "content": "type Label = Vec<char>;\n\n\n\nconst VALID_LABELS: [char; 52] = [\n\n 'a', 'A', 'b', 'B', 'c', 'C', 'd', 'D', 'e', 'E', 'f', 'F', 'g', 'G', 'h', 'H', 'i', 'I', 'j',\n\n 'J', 'k', 'K', 'l', 'L', 'm', 'M', 'n', 'N', 'o', 'O', 'p', 'P', 'q', 'Q', 'r', 'R', 's', 'S',\n\n 't', 'T', 'u', 'U', 'v', 'V', 'w', 'W', 'x', 'X', 'y', 'Y', 'z', 'Z',\n\n];\n\n\n", "file_path": "host/tensor/src/einsum.rs", "rank": 94, "score": 118188.96029311263 }, { "content": "struct TensorVisitor<FD, FS, D, T> {\n\n txn: T,\n\n dir: PhantomData<D>,\n\n dense: PhantomData<FD>,\n\n sparse: PhantomData<FS>,\n\n}\n\n\n\nimpl<FD, FS, D, T> TensorVisitor<FD, FS, D, T> {\n\n fn new(txn: T) -> Self {\n\n Self {\n\n txn,\n\n dir: PhantomData,\n\n dense: PhantomData,\n\n sparse: PhantomData,\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<FD, FS, D, T> de::Visitor for TensorVisitor<FD, FS, D, T>\n", "file_path": "host/tensor/src/lib.rs", "rank": 95, "score": 117992.75245096769 }, { "content": "#[async_trait]\n\npub trait TensorDualIO<D: Dir, O> {\n\n /// The type of [`Transaction`] to expect\n\n type Txn: Transaction<D>;\n\n\n\n /// Zero out the elements of this [`Tensor`] where the corresponding element of `value` is nonzero.\n\n async fn mask(self, txn: Self::Txn, value: O) -> TCResult<()>;\n\n\n\n /// Overwrite the slice of this [`Tensor`] given by [`Bounds`] with the given `value`.\n\n async fn write(self, txn: Self::Txn, bounds: Bounds, value: O) -> TCResult<()>;\n\n}\n\n\n", "file_path": "host/tensor/src/lib.rs", "rank": 96, "score": 116483.72882411983 }, { "content": "fn outer_product<D, T>(\n\n f_inputs: &[Label],\n\n dimensions: &BTreeMap<char, u64>,\n\n tensors: Vec<T>,\n\n) -> TCResult<T>\n\nwhere\n\n D: Dir,\n\n T: TensorAccess\n\n + TensorMath<D, T, Combine = T>\n\n + TensorTransform<Broadcast = T, Expand = T, Transpose = T>\n\n + Clone,\n\n{\n\n assert_eq!(f_inputs.len(), tensors.len());\n\n assert!(!tensors.is_empty());\n\n\n\n let f_output = dimensions.keys().cloned().collect::<Label>();\n\n\n\n let mut normalized = tensors\n\n .into_iter()\n\n .zip(f_inputs.iter())\n", "file_path": "host/tensor/src/einsum.rs", "rank": 97, "score": 115552.6247029641 }, { "content": "struct DenseTensorVisitor<FD, FS, D, T> {\n\n txn_id: TxnId,\n\n file: FD,\n\n sparse: PhantomData<FS>,\n\n dir: PhantomData<D>,\n\n txn: PhantomData<T>,\n\n}\n\n\n\nimpl<FD, FS, D, T> DenseTensorVisitor<FD, FS, D, T> {\n\n fn new(txn_id: TxnId, file: FD) -> Self {\n\n Self {\n\n txn_id,\n\n file,\n\n sparse: PhantomData,\n\n dir: PhantomData,\n\n txn: PhantomData,\n\n }\n\n }\n\n}\n\n\n", "file_path": "host/tensor/src/dense/mod.rs", "rank": 98, "score": 114418.17609666627 }, { "content": "struct SparseTensorVisitor<FD, FS, D, T> {\n\n txn: T,\n\n dense: PhantomData<FD>,\n\n sparse: PhantomData<FS>,\n\n dir: PhantomData<D>,\n\n}\n\n\n\nimpl<FD, FS, D, T> SparseTensorVisitor<FD, FS, D, T> {\n\n fn new(txn: T) -> Self {\n\n Self {\n\n txn,\n\n dense: PhantomData,\n\n sparse: PhantomData,\n\n dir: PhantomData,\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<FD, FS, D, T> de::Visitor for SparseTensorVisitor<FD, FS, D, T>\n", "file_path": "host/tensor/src/sparse/mod.rs", "rank": 99, "score": 114418.17609666627 } ]
Rust
gcs-cxx/src/ecs_world.rs
Beliaar/godot-component-system
2e2bd9186a968853b53f447cffa34703b5ddc2a6
use std::string::String; use cxx::{type_id, ExternType}; use gcs::world::ecs_world::{create_ecs_world, ECSWorld}; use crate::component::component_data::create_component_data; use crate::component::component_data::CXXComponentData; use crate::component::component_definition::CXXComponentDefinition; use crate::component::component_info::create_component_info; use crate::component::component_info::CXXComponentInfo; use crate::entity::create_entity; use crate::entity::entity_id_from_string; use crate::entity::CXXEntityId; use crate::entity::EntityIdResult; use crate::godot::error::GCSResult; #[cxx::bridge(namespace = gcs::ffi)] pub mod ffi { extern "Rust" { type UnitResult; fn is_error(&self) -> bool; fn get_error(&self) -> String; } extern "Rust" { type StringVecResult; fn is_error(&self) -> bool; fn get_result(&self) -> Vec<String>; fn get_error(&self) -> String; } extern "Rust" { type EntityIdResult; fn is_error(&self) -> bool; fn get_result(&self) -> Box<CXXEntityId>; fn get_error(&self) -> String; } extern "Rust" { #[cxx_name = "ComponentInfo"] type CXXComponentInfo; } extern "Rust" { #[cxx_name = "ComponentData"] type CXXComponentData; fn get_field(self: &CXXComponentData, field: String) -> &ComponentValue; fn set_field(self: &mut CXXComponentData, field: String, value: &ComponentValue); fn create_component_data(entity: &CXXEntityId) -> Box<CXXComponentData>; } extern "Rust" { #[cxx_name = "EntityId"] type CXXEntityId; fn create_entity() -> Box<CXXEntityId>; fn as_string(&self) -> String; fn entity_id_from_string(id: String) -> Box<EntityIdResult>; } extern "Rust" { type ComponentInfoResult; fn is_error(&self) -> bool; fn get_result(&self) -> Box<CXXComponentInfo>; fn get_error(&self) -> String; } extern "Rust" { #[cxx_name = "ECSWorld"] type CXXECSWorld; pub(crate) fn create_component_info(hash: u64) -> Box<CXXComponentInfo>; fn register_component( self: &mut CXXECSWorld, name: String, component_definition: &ComponentDefinition, ) -> Box<ComponentInfoResult>; fn register_entity(&mut self, id: &CXXEntityId) -> Box<UnitResult>; pub fn set_component_data( &mut self, entity_id: &CXXEntityId, component: String, data: &CXXComponentData, ) -> Box<UnitResult>; fn is_component_added_to_entity(&self, entity_id: &CXXEntityId, component: String) -> bool; fn get_components_of_entity(&self, entity_id: &CXXEntityId) -> Box<StringVecResult>; fn create_entity(self: &mut CXXECSWorld) -> Box<CXXEntityId>; #[cxx_name = "create_ecs_world"] pub fn create_cxx_ecs_world() -> Box<CXXECSWorld>; } extern "C++" { include!("rust/cxx.h"); include!("gcs-cxx/include/godot/variant.h"); include!("gcs-cxx/src/component/component_definition.rs.h"); include!("gcs-cxx/src/component/component_value.rs.h"); type ComponentDefinition = crate::component::component_definition::CXXComponentDefinition; type ComponentValue = crate::component::component_value::CXXComponentValue; } } type ComponentInfoResult = GCSResult<Box<CXXComponentInfo>>; type UnitResult = GCSResult<()>; type StringVecResult = GCSResult<Vec<String>>; pub(crate) struct CXXECSWorld(ECSWorld<CXXComponentDefinition, CXXComponentData, CXXComponentInfo>); impl CXXECSWorld { fn register_component( self: &mut CXXECSWorld, name: String, component_definition: &CXXComponentDefinition, ) -> Box<ComponentInfoResult> { let result = self .0 .register_component(name, component_definition.clone()); Box::new(match result { Ok(info) => ComponentInfoResult::new_result(Box::new(info)), Err(error) => ComponentInfoResult::new_error(error), }) } fn register_entity(self: &mut CXXECSWorld, id: &CXXEntityId) -> Box<UnitResult> { let result = self.0.register_entity(id); Box::new(match result { Ok(_) => UnitResult::new_result(()), Err(err) => UnitResult::new_error(err.to_string()), }) } fn set_component_data( self: &mut CXXECSWorld, entity_id: &CXXEntityId, component: String, data: &CXXComponentData, ) -> Box<UnitResult> { let result = self.0.set_component_data(entity_id, component, data); Box::new(match result { Ok(_) => UnitResult::new_result(()), Err(err) => UnitResult::new_error(err.to_string()), }) } fn get_components_of_entity( self: &CXXECSWorld, entity_id: &CXXEntityId, ) -> Box<StringVecResult> { let result = self.0.get_components_of_entity(entity_id); Box::new(match result { Ok(value) => StringVecResult::new_result(value), Err(err) => StringVecResult::new_error(err.to_string()), }) } fn is_component_added_to_entity( self: &CXXECSWorld, entity_id: &CXXEntityId, component: String, ) -> bool { self.0.is_component_added_to_entity(entity_id, component) } fn create_entity(self: &mut CXXECSWorld) -> Box<CXXEntityId> { self.0.create_entity() } } pub(crate) fn create_cxx_ecs_world() -> Box<CXXECSWorld> { Box::new(CXXECSWorld(create_ecs_world::< CXXComponentDefinition, CXXComponentData, CXXComponentInfo, >())) } unsafe impl ExternType for CXXECSWorld { type Id = type_id!("gcs::ffi::ECSWorld"); type Kind = cxx::kind::Trivial; }
use std::string::String; use cxx::{type_id, ExternType}; use gcs::world::ecs_world::{create_ecs_world, ECSWorld}; use crate::component::component_data::create_component_data; use crate::component::component_data::CXXComponentData; use crate::component::component_definition::CXXComponentDefinition; use crate::component::component_info::create_component_info; use crate::component::component_info::CXXComponentInfo; use crate::entity::create_entity; use crate::entity::entity_id_from_string; use crate::entity::CXXEntityId; use crate::entity::EntityIdResult; use crate::godot::error::GCSResult; #[cxx::bridge(namespace = gcs::ffi)] pub mod ffi { extern "Rust" { type UnitResult; fn is_error(&self) -> bool; fn get_error(&self) -> String; } extern "Rust" { type StringVecResult; fn is_error(&self) -> bool; fn get_result(&self) -> Vec<String>; fn get_error(&self) -> String; } extern "Rust" { type EntityIdResult; fn is_error(&self) -> bool; fn get_result(&self) -> Box<CXXEntityId>; fn get_error(&self) -> String; } extern "Rust" { #[cxx_name = "ComponentInfo"] type CXXComponentInfo; } extern "Rust" { #[cxx_name = "ComponentData"] type CXXComponentData; fn get_field(self: &CXXComponentData, field: String) -> &ComponentValue; fn set_field(self: &mut CXXComponentData, field: String, value: &ComponentValue); fn create_component_data(entity: &CXXEntityId) -> Box<CXXComponentData>; } extern "Rust" { #[cxx_name = "EntityId"] type CXXEntityId; fn create_entity() -> Box<CXXEntityId>; fn as_string(&self) -> String; fn entity_id_from_string(id: String) -> Box<EntityIdResult>; } extern "Rust" { type ComponentInfoResult; fn is_error(&self) -> bool; fn get_result(&self) -> Box<CXXComponentInfo>; fn get_error(&self) -> String; } extern "Rust" { #[cxx_name = "ECSWorld"] type CXXECSWorld; pub(crate) fn create_component_info(hash: u64) -> Box<CXXComponentInfo>; fn register_component( self: &mut CXXECSWorld, name: String, component_definition: &ComponentDefinition, ) -> Box<ComponentInfoResult>; fn register_entity(&mut self, id: &CXXEntityId) -> Box<UnitResult>; pub fn set_component_data( &mut self, entity_id: &CXXEntityId, component: String, data: &CXXComponentData, ) -> Box<UnitResult>; fn is_component_added_to_entity(&self, entity_id: &CXXEntityId, component: String) -> bool; fn get_components_of_entity(&self, entity_id: &CXXEntityId) -> Box<StringVecResult>; fn create_entity(self: &mut CXXECSWorld) -> Box<CXXEntityId>; #[cxx_name = "create_ecs_world"] pub fn create_cxx_ecs_world() -> Box<CXXECSWorld>; } extern "C++" { include!("rust/cxx.h"); include!("gcs-cxx/include/godot/variant.h"); include!("gcs-cxx/src/component/component_definition.rs.h"); include!("gcs-cxx/src/component/component_value.rs.h"); type ComponentDefinition = crate::component::component_definition::CXXComponentDefinition; type ComponentValue = crate::component::component_value::CXXComponentValue; } } type ComponentInfoResult = GCSResult<Box<CXXComponentInfo>>; type UnitResult = GCSResult<()>; type StringVecResult = GCSResult<Vec<String>>; pub(crate) struct CXXECSWorld(ECSWorld<CXXComponentDefinition, CXXComponentData, CXXComponentInfo>); impl CXXECSWorld { fn register_component( self: &mut CXXECSWorld, name: String, component_definition: &CXXComponentDefinition, ) -> Box<ComponentInfoResult> { let result = self .0 .register_component(name, component_definition.clone()); Box::new(match result { Ok(info) => ComponentInfoResult::new_result(Box::new(info)), Err(error) => ComponentInfoResult::new_error(error), }) }
fn set_component_data( self: &mut CXXECSWorld, entity_id: &CXXEntityId, component: String, data: &CXXComponentData, ) -> Box<UnitResult> { let result = self.0.set_component_data(entity_id, component, data); Box::new(match result { Ok(_) => UnitResult::new_result(()), Err(err) => UnitResult::new_error(err.to_string()), }) } fn get_components_of_entity( self: &CXXECSWorld, entity_id: &CXXEntityId, ) -> Box<StringVecResult> { let result = self.0.get_components_of_entity(entity_id); Box::new(match result { Ok(value) => StringVecResult::new_result(value), Err(err) => StringVecResult::new_error(err.to_string()), }) } fn is_component_added_to_entity( self: &CXXECSWorld, entity_id: &CXXEntityId, component: String, ) -> bool { self.0.is_component_added_to_entity(entity_id, component) } fn create_entity(self: &mut CXXECSWorld) -> Box<CXXEntityId> { self.0.create_entity() } } pub(crate) fn create_cxx_ecs_world() -> Box<CXXECSWorld> { Box::new(CXXECSWorld(create_ecs_world::< CXXComponentDefinition, CXXComponentData, CXXComponentInfo, >())) } unsafe impl ExternType for CXXECSWorld { type Id = type_id!("gcs::ffi::ECSWorld"); type Kind = cxx::kind::Trivial; }
fn register_entity(self: &mut CXXECSWorld, id: &CXXEntityId) -> Box<UnitResult> { let result = self.0.register_entity(id); Box::new(match result { Ok(_) => UnitResult::new_result(()), Err(err) => UnitResult::new_error(err.to_string()), }) }
function_block-full_function
[ { "content": "pub fn create_component_field_definition() -> ffi::CXXComponentFieldDefinition {\n\n ffi::CXXComponentFieldDefinition::default()\n\n}\n\n\n\n#[derive(Hash, Eq, PartialEq, Clone, Default)]\n\npub struct CXXComponentDefinition {\n\n pub fields: Vec<ffi::CXXComponentFieldDefinition>,\n\n}\n\n\n\nunsafe impl ExternType for CXXComponentDefinition {\n\n type Id = type_id!(\"gcs::ffi::ComponentDefinition\");\n\n type Kind = cxx::kind::Opaque;\n\n}\n\n\n\nimpl ComponentDefinition for CXXComponentDefinition {\n\n type FieldDefinition = CXXComponentFieldDefinition;\n\n\n\n fn get_fields(&self) -> Vec<Self::FieldDefinition> {\n\n self.fields.clone()\n\n }\n", "file_path": "gcs-cxx/src/component/component_definition.rs", "rank": 0, "score": 165139.54146766572 }, { "content": "pub trait ComponentData: Default + Clone {\n\n type EntityIdType: EntityId;\n\n type ComponentValueType: ComponentValue;\n\n\n\n fn new(entity: Self::EntityIdType) -> Self;\n\n fn get_entity(&self) -> Self::EntityIdType;\n\n fn get_field(&self, field: String) -> &Self::ComponentValueType;\n\n fn set_field(&mut self, field: String, value: &Self::ComponentValueType);\n\n}\n", "file_path": "gcs/src/component/component_data.rs", "rank": 3, "score": 119191.17208660653 }, { "content": "pub fn create_component_definition() -> Box<CXXComponentDefinition> {\n\n Box::new(CXXComponentDefinition::default())\n\n}\n", "file_path": "gcs-cxx/src/component/component_definition.rs", "rank": 5, "score": 104857.54399169398 }, { "content": "fn component_value_from_variant(value: &Variant) -> Box<CXXComponentValue> {\n\n let variant_type: CXXVariantType = value.get_type();\n\n\n\n match variant_type.0 {\n\n VariantType::Nil => Box::new(CXXComponentValue::Nil),\n\n VariantType::Bool => Box::new(CXXComponentValue::Bool(variant_as_bool(value))),\n\n VariantType::Int => Box::new(CXXComponentValue::Int(variant_as_i64(value))),\n\n VariantType::Real => Box::new(CXXComponentValue::Real(variant_as_f64(value))),\n\n VariantType::String => Box::new(CXXComponentValue::String(variant_as_string(value))),\n\n VariantType::Vector2 => {\n\n unimplemented!()\n\n }\n\n VariantType::Rect2 => {\n\n unimplemented!()\n\n }\n\n VariantType::Vector3 => {\n\n unimplemented!()\n\n }\n\n VariantType::Transform2D => {\n\n unimplemented!()\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 6, "score": 103934.38302215458 }, { "content": "fn variant_from_component_value(value: &CXXComponentValue) -> &'static Variant {\n\n match value.clone() {\n\n CXXComponentValue::Nil => empty_variant(),\n\n CXXComponentValue::Int(value) => variant_from_i64(value),\n\n CXXComponentValue::String(value) => variant_from_string(value.clone()),\n\n CXXComponentValue::Bool(value) => variant_from_bool(value),\n\n CXXComponentValue::Real(value) => variant_from_f64(value),\n\n }\n\n}\n\n\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 7, "score": 103934.38302215458 }, { "content": "pub fn create_ecs_world<\n\n TComponentDefinition: ComponentDefinition,\n\n TComponentData: ComponentData,\n\n TComponentInfo: ComponentInfo,\n\n>() -> ECSWorld<TComponentDefinition, TComponentData, TComponentInfo> {\n\n ECSWorld::default()\n\n}\n\n\n\nimpl<\n\n TComponentDefinition: ComponentDefinition,\n\n TComponentData: ComponentData,\n\n TComponentInfo: ComponentInfo,\n\n > ECSWorld<TComponentDefinition, TComponentData, TComponentInfo>\n\n{\n\n pub fn register_component(\n\n &mut self,\n\n name: String,\n\n component_definition: TComponentDefinition,\n\n ) -> Result<TComponentInfo, String> {\n\n let mut hasher = DefaultHasher::default();\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 8, "score": 102458.28289028088 }, { "content": "pub trait ComponentValue: Clone + PartialEq + Debug {\n\n fn get_type(&self) -> VariantType;\n\n fn set_nil(&mut self);\n\n fn get_nil(&self) -> ();\n\n fn set_int(&mut self, value: i64);\n\n fn get_int(&self) -> i64;\n\n fn set_string(&mut self, value: String);\n\n fn get_string(&self) -> String;\n\n fn set_bool(&mut self, value: bool);\n\n fn get_bool(&self) -> bool;\n\n fn set_real(&mut self, value: f64);\n\n fn get_real(&self) -> f64;\n\n}\n", "file_path": "gcs/src/component/component_value.rs", "rank": 9, "score": 97956.05384568578 }, { "content": "const StringName &string_name_from_rust_string(rust::string string);\n", "file_path": "gcs-cxx/include/godot/string.h", "rank": 10, "score": 97158.17249574502 }, { "content": "pub trait ComponentFieldDefinition: Default + Hash + Clone + Eq {\n\n fn get_type(&self) -> VariantType;\n\n fn get_name(&self) -> String;\n\n}\n", "file_path": "gcs/src/component/component_definition.rs", "rank": 11, "score": 86788.16403257052 }, { "content": "pub trait ComponentInfo: Hash + Default + Eq + Copy + Clone {\n\n fn get_hash(&self) -> u64;\n\n fn create(hash: u64) -> Self\n\n where\n\n Self: Sized;\n\n}\n", "file_path": "gcs/src/component/component_info.rs", "rank": 12, "score": 77189.87680618539 }, { "content": "pub trait ComponentDefinition: Default + Hash + Clone {\n\n type FieldDefinition: ComponentFieldDefinition + Hash + Eq + PartialEq + Clone + Default;\n\n fn get_fields(&self) -> Vec<Self::FieldDefinition>;\n\n fn add_field(&mut self, field_definition: Self::FieldDefinition);\n\n}\n\n\n", "file_path": "gcs/src/component/component_definition.rs", "rank": 13, "score": 68967.28710993541 }, { "content": "pub trait EntityId: Default + PartialEq + Eq + Hash + Copy + Clone {\n\n fn create() -> Self\n\n where\n\n Self: Sized;\n\n fn as_string(&self) -> String;\n\n fn parse_str(input: &str) -> Result<Self, String>\n\n where\n\n Self: Sized;\n\n}\n", "file_path": "gcs/src/entity.rs", "rank": 14, "score": 65180.35367511671 }, { "content": "class ComponentFieldDefinition : public Reference {\n\nGDCLASS(ComponentFieldDefinition, Reference);\n\nprivate:\n\n gcs::ffi::ComponentFieldDefinition componentFieldDefinition;\n\nprotected:\n\n static void _bind_methods();\n\n\n\npublic:\n\n ComponentFieldDefinition();\n\n\n\n StringName get_name() const;\n\n void set_name(const StringName& name);\n\n\n\n Variant::Type get_type() const;\n\n void set_type(Variant::Type type);\n\n\n\n gcs::ffi::ComponentFieldDefinition get_definition();\n\n};\n\n\n\n\n\n#endif //GODOT_COMPONENT_SYSTEM_COMPONENT_FIELD_DEFINITION_H\n", "file_path": "include/component_field_definition.h", "rank": 15, "score": 64482.4794425959 }, { "content": "use crate::component::component_value::ComponentValue;\n\nuse crate::entity::EntityId;\n\n\n", "file_path": "gcs/src/component/component_data.rs", "rank": 16, "score": 61454.653408888385 }, { "content": "use crate::variant::VariantType;\n\nuse std::fmt::Debug;\n\n\n", "file_path": "gcs/src/component/component_value.rs", "rank": 17, "score": 61092.124975564715 }, { "content": "void register_godot_component_system_types();\n", "file_path": "register_types.h", "rank": 18, "score": 60150.71766331209 }, { "content": "void unregister_godot_component_system_types();", "file_path": "register_types.h", "rank": 19, "score": 60150.71766331209 }, { "content": "impl CXXComponentData {\n\n pub(crate) fn get_field(&self, field: String) -> &CXXComponentValue {\n\n ComponentData::get_field(self, field)\n\n }\n\n\n\n pub(crate) fn set_field(&mut self, field: String, value: &CXXComponentValue) {\n\n ComponentData::set_field(self, field, &value)\n\n }\n\n}\n\n\n\npub(crate) fn create_component_data(entity: &CXXEntityId) -> Box<CXXComponentData> {\n\n Box::new(CXXComponentData::new(*entity))\n\n}\n\n\n\nunsafe impl ExternType for CXXComponentData {\n\n type Id = type_id!(\"gcs::ffi::ComponentData\");\n\n type Kind = cxx::kind::Opaque;\n\n}\n", "file_path": "gcs-cxx/src/component/component_data.rs", "rank": 20, "score": 59544.086367865726 }, { "content": "use crate::component::component_value::CXXComponentValue;\n\nuse crate::entity::CXXEntityId;\n\nuse cxx::{type_id, ExternType};\n\nuse gcs::component::component_data::ComponentData;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Default, Clone)]\n\npub struct CXXComponentData {\n\n entity: CXXEntityId,\n\n fields: HashMap<String, CXXComponentValue>,\n\n}\n\n\n\nimpl ComponentData for CXXComponentData {\n\n type EntityIdType = CXXEntityId;\n\n type ComponentValueType = CXXComponentValue;\n\n\n\n fn new(entity: CXXEntityId) -> Self {\n\n Self {\n\n entity,\n\n fields: HashMap::new(),\n", "file_path": "gcs-cxx/src/component/component_data.rs", "rank": 21, "score": 59542.09488490822 }, { "content": " }\n\n }\n\n\n\n fn get_entity(&self) -> CXXEntityId {\n\n self.entity\n\n }\n\n\n\n fn get_field(&self, field: String) -> &Self::ComponentValueType {\n\n if self.fields.contains_key(&field) {\n\n &self.fields.get(&field).unwrap()\n\n } else {\n\n &Self::ComponentValueType::Nil\n\n }\n\n }\n\n\n\n fn set_field(&mut self, field: String, value: &Self::ComponentValueType) {\n\n self.fields.insert(field, value.clone());\n\n }\n\n}\n\n\n", "file_path": "gcs-cxx/src/component/component_data.rs", "rank": 22, "score": 59537.28962358109 }, { "content": "use crate::component::component_value::ffi::{\n\n empty_variant, variant_from_bool, variant_from_f64, variant_from_i64, variant_from_string,\n\n};\n\nuse crate::godot::variant::ffi::{\n\n variant_as_bool, variant_as_f64, variant_as_i64, variant_as_string, CXXVariantType, Variant,\n\n};\n\nuse cxx::{type_id, ExternType};\n\nuse gcs::component::component_value::ComponentValue;\n\nuse gcs::variant::VariantType;\n\n\n\n#[cxx::bridge(namespace = gcs::ffi)]\n\npub mod ffi {\n\n extern \"Rust\" {\n\n #[cxx_name = \"ComponentValue\"]\n\n type CXXComponentValue;\n\n fn variant_from_component_value(value: &CXXComponentValue) -> &'static Variant;\n\n fn component_value_from_variant(value: &Variant) -> Box<CXXComponentValue>;\n\n }\n\n\n\n unsafe extern \"C++\" {\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 23, "score": 59203.227804083355 }, { "content": "}\n\n\n\nimpl Default for CXXComponentValue {\n\n fn default() -> Self {\n\n CXXComponentValue::Nil\n\n }\n\n}\n\n\n\nimpl ComponentValue for CXXComponentValue {\n\n fn get_type(&self) -> VariantType {\n\n match self {\n\n CXXComponentValue::Nil => VariantType::Nil,\n\n CXXComponentValue::Int(_) => VariantType::Int,\n\n CXXComponentValue::String(_) => VariantType::String,\n\n CXXComponentValue::Bool(_) => VariantType::Bool,\n\n CXXComponentValue::Real(_) => VariantType::Real,\n\n }\n\n }\n\n\n\n fn set_nil(&mut self) {\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 24, "score": 59193.96736563092 }, { "content": " }\n\n CXXComponentValue::Real(value) => *value,\n\n }\n\n }\n\n}\n\n\n\nunsafe impl ExternType for CXXComponentValue {\n\n type Id = type_id!(\"gcs::ffi::ComponentValue\");\n\n type Kind = cxx::kind::Opaque;\n\n}\n\n\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 25, "score": 59192.937570142996 }, { "content": " include!(\"gcs-cxx/include/godot/variant.h\");\n\n include!(\"rust/cxx.h\");\n\n pub type Variant = crate::godot::variant::ffi::Variant;\n\n\n\n pub(crate) fn empty_variant() -> &'static Variant;\n\n pub(crate) fn variant_from_i64(value: i64) -> &'static Variant;\n\n pub(crate) fn variant_from_string(value: String) -> &'static Variant;\n\n pub(crate) fn variant_from_bool(value: bool) -> &'static Variant;\n\n pub(crate) fn variant_from_f64(value: f64) -> &'static Variant;\n\n\n\n }\n\n}\n\n\n\n#[derive(Clone, PartialEq, Debug)]\n\npub enum CXXComponentValue {\n\n Nil,\n\n Int(i64),\n\n String(String),\n\n Bool(bool),\n\n Real(f64),\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 26, "score": 59189.90622076336 }, { "content": " CXXComponentValue::Bool(value) => value.to_string(),\n\n CXXComponentValue::Real(value) => value.to_string(),\n\n }\n\n }\n\n\n\n fn set_bool(&mut self, value: bool) {\n\n *self = CXXComponentValue::Bool(value)\n\n }\n\n\n\n fn get_bool(&self) -> bool {\n\n match self {\n\n CXXComponentValue::Nil => false,\n\n CXXComponentValue::Int(value) => *value != 0,\n\n CXXComponentValue::String(value) => value.is_empty(),\n\n CXXComponentValue::Bool(value) => *value,\n\n CXXComponentValue::Real(value) => *value != 0.0,\n\n }\n\n }\n\n\n\n fn set_real(&mut self, value: f64) {\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 27, "score": 59188.68089024759 }, { "content": " CXXComponentValue::Bool(value) => {\n\n if *value {\n\n 1\n\n } else {\n\n 0\n\n }\n\n }\n\n CXXComponentValue::Real(value) => *value as i64,\n\n }\n\n }\n\n\n\n fn set_string(&mut self, value: String) {\n\n *self = CXXComponentValue::String(value)\n\n }\n\n\n\n fn get_string(&self) -> String {\n\n match self {\n\n CXXComponentValue::Nil => \"\".to_string(),\n\n CXXComponentValue::Int(value) => value.to_string(),\n\n CXXComponentValue::String(value) => value.clone(),\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 28, "score": 59187.23780172556 }, { "content": " *self = CXXComponentValue::Real(value)\n\n }\n\n\n\n fn get_real(&self) -> f64 {\n\n match self {\n\n CXXComponentValue::Nil => 0.0,\n\n CXXComponentValue::Int(value) => *value as f64,\n\n CXXComponentValue::String(value) => {\n\n let result = str::parse::<f64>(value);\n\n match result {\n\n Ok(value) => value,\n\n Err(_) => 0.0,\n\n }\n\n }\n\n CXXComponentValue::Bool(value) => {\n\n if *value {\n\n 1.0\n\n } else {\n\n 0.0\n\n }\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 29, "score": 59186.5543962405 }, { "content": " *self = CXXComponentValue::Nil;\n\n }\n\n\n\n fn get_nil(&self) -> () {}\n\n\n\n fn set_int(&mut self, value: i64) {\n\n *self = CXXComponentValue::Int(value)\n\n }\n\n\n\n fn get_int(&self) -> i64 {\n\n match self {\n\n CXXComponentValue::Nil => 0,\n\n CXXComponentValue::Int(value) => *value,\n\n CXXComponentValue::String(value) => {\n\n let result = str::parse::<i64>(value);\n\n match result {\n\n Ok(value) => value,\n\n Err(_) => 0,\n\n }\n\n }\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 30, "score": 59186.12361692515 }, { "content": " VariantType::PoolRealArray => {\n\n unimplemented!()\n\n }\n\n VariantType::PoolStringArray => {\n\n unimplemented!()\n\n }\n\n VariantType::PoolVector2Array => {\n\n unimplemented!()\n\n }\n\n VariantType::PoolVector3Array => {\n\n unimplemented!()\n\n }\n\n VariantType::PoolColorArray => {\n\n unimplemented!()\n\n }\n\n VariantType::VariantMax => {\n\n unimplemented!()\n\n }\n\n }\n\n}\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 31, "score": 59178.77935967958 }, { "content": " }\n\n VariantType::Plan => {\n\n unimplemented!()\n\n }\n\n VariantType::Quat => {\n\n unimplemented!()\n\n }\n\n VariantType::Aaab => {\n\n unimplemented!()\n\n }\n\n VariantType::Basis => {\n\n unimplemented!()\n\n }\n\n VariantType::Transform => {\n\n unimplemented!()\n\n }\n\n VariantType::Color => {\n\n unimplemented!()\n\n }\n\n VariantType::NodePath => {\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 32, "score": 59177.14052555491 }, { "content": " unimplemented!()\n\n }\n\n VariantType::_RID => {\n\n unimplemented!()\n\n }\n\n VariantType::Object => {\n\n unimplemented!()\n\n }\n\n VariantType::Dictionary => {\n\n unimplemented!()\n\n }\n\n VariantType::Array => {\n\n unimplemented!()\n\n }\n\n VariantType::PoolByteArray => {\n\n unimplemented!()\n\n }\n\n VariantType::PoolIntArray => {\n\n unimplemented!()\n\n }\n", "file_path": "gcs-cxx/src/component/component_value.rs", "rank": 33, "score": 59177.051629974245 }, { "content": "#ifndef GODOT_COMPONENT_SYSTEM_COMPONENT_FIELD_DEFINITION_H\n\n#define GODOT_COMPONENT_SYSTEM_COMPONENT_FIELD_DEFINITION_H\n\n#include \"core/reference.h\"\n\n#include \"gcs-cxx/src/component/component_definition.rs.h\"\n\n\n\n\n", "file_path": "include/component_field_definition.h", "rank": 34, "score": 58673.17867797248 }, { "content": "#include \"component_field_definition.h\"\n\n#include \"gcs-cxx/include/godot/string.h\" // NOLINT(modernize-deprecated-headers)\n\n\n\nStringName ComponentFieldDefinition::get_name() const {\n\n return string_name_from_rust_string(componentFieldDefinition.name);\n\n}\n\n\n\nvoid ComponentFieldDefinition::set_name(const StringName& name) {\n\n componentFieldDefinition.name = godot_string_to_rust_string(name);\n\n}\n\n\n\nVariant::Type ComponentFieldDefinition::get_type() const {\n\n return componentFieldDefinition.field_type;\n\n}\n\n\n\nvoid ComponentFieldDefinition::set_type(Variant::Type type) {\n\n componentFieldDefinition.field_type = type;\n\n}\n\n\n\nvoid ComponentFieldDefinition::_bind_methods() {\n", "file_path": "src/component_field_definition.cpp", "rank": 35, "score": 56230.342794720695 }, { "content": " ClassDB::bind_method(D_METHOD(\"set_name\", \"name\"), &ComponentFieldDefinition::set_name);\n\n ClassDB::bind_method(D_METHOD(\"get_name\"), &ComponentFieldDefinition::get_name);\n\n ClassDB::bind_method(D_METHOD(\"set_field_type\", \"type\"), &ComponentFieldDefinition::set_type);\n\n ClassDB::bind_method(D_METHOD(\"get_field_type\"), &ComponentFieldDefinition::get_type);\n\n\n\n ADD_PROPERTY(PropertyInfo(Variant::STRING, \"name\"), \"set_name\", \"get_name\");\n\n ADD_PROPERTY(PropertyInfo(Variant::INT, \"field_type\", PROPERTY_HINT_ENUM, \"NIL,BOOL,INT,REAL,STRING\"), \"set_field_type\", \"get_field_type\");\n\n}\n\n\n\ngcs::ffi::ComponentFieldDefinition ComponentFieldDefinition::get_definition() {\n\n return componentFieldDefinition;\n\n}\n\n\n\nComponentFieldDefinition::ComponentFieldDefinition() : componentFieldDefinition(\n\n gcs::ffi::create_component_field_definition()) {\n\n\n\n}\n", "file_path": "src/component_field_definition.cpp", "rank": 36, "score": 56229.102176377855 }, { "content": "class ComponentInfo : public Reference {\n\n GDCLASS(ComponentInfo, Reference)\n\n friend class ECSWorldBase;\n\nprivate:\n\n rust::box<gcs::ffi::ComponentInfo> componentInfo;\n\n\n\n void set_component_info(gcs::ffi::ComponentInfo* info);\n\n gcs::ffi::ComponentInfo& getComponentInfo();\n\n\n\nprotected:\n\n static void _bind_methods();\n\npublic:\n\n ComponentInfo();\n\n};\n\n\n\n\n\n#endif //GODOT_COMPONENT_SYSTEM_COMPONENT_INFO_H\n", "file_path": "include/component_info.h", "rank": 37, "score": 49500.26897223393 }, { "content": "fn main() {\n\n let platform = env::var(\"GODOT_PLATFORM\");\n\n\n\n let platform = match platform {\n\n Ok(platform) => platform,\n\n Err(err) => {\n\n panic!(\"Could not read GODOT_PLATFORM: {}\", err)\n\n }\n\n };\n\n\n\n let godot_path = env::var(\"GODOT_PATH\");\n\n let godot_path = match godot_path {\n\n Ok(path) => path,\n\n Err(err) => {\n\n panic!(\"Could not read GODOT_PATH: {}\", err)\n\n }\n\n };\n\n\n\n let platform_include_win = format!(\"{godot_path}/platform/{}\", platform);\n\n cxx_build::bridges(vec![\n", "file_path": "gcs-cxx/build.rs", "rank": 38, "score": 49193.9693776529 }, { "content": "use crate::variant::VariantType;\n\nuse std::hash::Hash;\n\n\n", "file_path": "gcs/src/component/component_definition.rs", "rank": 39, "score": 33349.120137064674 }, { "content": "use std::hash::Hash;\n\n\n", "file_path": "gcs/src/component/component_info.rs", "rank": 40, "score": 33345.808275288866 }, { "content": "class GodotComponent : public Reference {\n\n GDCLASS(GodotComponent, Reference);\n\n\n\npublic:\n\n void set_field(const rust::string& name, const Variant &value);\n\n\n\n std::unique_ptr<Variant> get_field(const rust::string& name) const;\n\n\n\nprotected:\n\n static void _bind_methods();\n\n};\n\n\n\n\n\n#endif\n", "file_path": "include/component.h", "rank": 41, "score": 33342.40765722693 }, { "content": "use crate::component::component_definition::ffi::CXXComponentFieldDefinition;\n\nuse cxx::{type_id, ExternType};\n\nuse gcs::component::component_definition::{ComponentDefinition, ComponentFieldDefinition};\n\nuse gcs::variant::VariantType;\n\n\n\n#[cxx::bridge(namespace = gcs::ffi)]\n\npub mod ffi {\n\n #[derive(Hash, Eq, PartialEq, Clone, Default)]\n\n #[cxx_name = \"ComponentFieldDefinition\"]\n\n pub struct CXXComponentFieldDefinition {\n\n pub name: String,\n\n pub field_type: VariantType,\n\n }\n\n\n\n extern \"Rust\" {\n\n include!(\"gcs-cxx/src/godot/variant.rs.h\");\n\n #[cxx_name = \"ComponentDefinition\"]\n\n type CXXComponentDefinition;\n\n\n\n pub fn create_component_definition() -> Box<CXXComponentDefinition>;\n", "file_path": "gcs-cxx/src/component/component_definition.rs", "rank": 42, "score": 32528.822772565778 }, { "content": "use cxx::{type_id, ExternType};\n\nuse gcs::component::component_info::ComponentInfo;\n\n\n\n#[derive(Hash, Eq, PartialEq, Clone, Copy, Default)]\n\npub struct CXXComponentInfo {\n\n hash: u64,\n\n}\n\n\n\nunsafe impl ExternType for CXXComponentInfo {\n\n type Id = type_id!(\"gcs::ffi::ComponentInfo\");\n\n type Kind = cxx::kind::Trivial;\n\n}\n\n\n\nimpl ComponentInfo for CXXComponentInfo {\n\n fn get_hash(&self) -> u64 {\n\n self.hash\n\n }\n\n\n\n fn create(hash: u64) -> Self\n\n where\n", "file_path": "gcs-cxx/src/component/component_info.rs", "rank": 43, "score": 32525.96011329651 }, { "content": "\n\n pub fn add_field(\n\n self: &mut CXXComponentDefinition,\n\n field_definition: CXXComponentFieldDefinition,\n\n );\n\n\n\n pub fn create_component_field_definition() -> CXXComponentFieldDefinition;\n\n\n\n }\n\n\n\n extern \"C++\" {\n\n type VariantType = crate::godot::variant::CXXVariantType;\n\n }\n\n}\n\n\n\nimpl ComponentFieldDefinition for CXXComponentFieldDefinition {\n\n fn get_type(&self) -> VariantType {\n\n self.field_type.0\n\n }\n\n\n\n fn get_name(&self) -> String {\n\n self.name.clone()\n\n }\n\n}\n\n\n", "file_path": "gcs-cxx/src/component/component_definition.rs", "rank": 44, "score": 32525.486621900312 }, { "content": "\n\n fn add_field(&mut self, field_definition: Self::FieldDefinition) {\n\n self.fields.push(field_definition);\n\n }\n\n}\n\n\n\nimpl CXXComponentDefinition {\n\n fn add_field(&mut self, field_definition: CXXComponentFieldDefinition) {\n\n ComponentDefinition::add_field(self, field_definition);\n\n }\n\n}\n\n\n", "file_path": "gcs-cxx/src/component/component_definition.rs", "rank": 45, "score": 32515.505943331347 }, { "content": " Self: Sized,\n\n {\n\n CXXComponentInfo { hash }\n\n }\n\n}\n\n\n\npub(crate) fn create_component_info(hash: u64) -> Box<CXXComponentInfo> {\n\n Box::new(CXXComponentInfo::create(hash))\n\n}\n", "file_path": "gcs-cxx/src/component/component_info.rs", "rank": 46, "score": 32511.956083583624 }, { "content": "class ComponentDefinition : public Reference {\n\nGDCLASS(ComponentDefinition, Reference);\n\n\n\nfriend class ECSWorldBase;\n\nprivate:\n\n ::rust::box<gcs::ffi::ComponentDefinition> componentDefinition;\n\n\n\npublic:\n\n ComponentDefinition();\n\n\n\n void add_field(Ref<ComponentFieldDefinition> field_definition);\n\n\n\nprotected:\n\n static void _bind_methods();\n\n};\n\n\n\n\n\n#endif //GODOT_COMPONENT_SYSTEM_COMPONENT_DEFINITION_H\n", "file_path": "include/component_definition.h", "rank": 47, "score": 32501.65720944145 }, { "content": "#include \"register_types.h\"\n\n#include \"core/class_db.h\"\n\n#include \"component.h\"\n\n#include \"ecs_world_2d.h\"\n\n#include \"component_field_definition.h\"\n\n#include \"component_definition.h\"\n\n#include \"component_info.h\"\n\n#include \"ecs_world_base.h\"\n\n\n\nvoid register_godot_component_system_types() {\n\n ClassDB::register_class<GodotComponent>();\n\n ClassDB::register_class<ECSWorld2D>();\n\n ClassDB::register_class<ComponentFieldDefinition>();\n\n ClassDB::register_class<ComponentDefinition>();\n\n ClassDB::register_class<Entity>();\n\n ClassDB::register_class<ComponentInfo>();\n\n}\n\nvoid unregister_godot_component_system_types() {\n\n}", "file_path": "register_types.cpp", "rank": 48, "score": 31917.888574375636 }, { "content": "#ifndef GODOT_COMPONENT_SYSTEM_COMPONENT_H\n\n#define GODOT_COMPONENT_SYSTEM_COMPONENT_H\n\n#include <vector>\n\n#include \"core/reference.h\"\n\n#include \"rust/cxx.h\"\n\n#include \"gcs-cxx/include/godot/variant.h\"\n\n\n", "file_path": "include/component.h", "rank": 49, "score": 30850.72721815114 }, { "content": "#include <memory>\n\n#include \"core/class_db.h\"\n\n#include \"core/script_language.h\"\n\n#include \"gcs-cxx/include/godot/string.h\" // NOLINT(modernize-deprecated-headers)\n\n#include \"component.h\"\n\n\n\nvoid GodotComponent::set_field(const rust::string& name, const Variant &value) {\n\n this->set(string_name_from_rust_string(name), value);\n\n}\n\n\n\nstd::unique_ptr<Variant> GodotComponent::get_field(const rust::string& name) const {\n\n return std::make_unique<Variant>(this->get(string_name_from_rust_string(name)));\n\n}\n\n\n\nvoid GodotComponent::_bind_methods() {\n\n}", "file_path": "src/component.cpp", "rank": 50, "score": 29453.440609217843 }, { "content": "#ifndef GODOT_COMPONENT_SYSTEM_COMPONENT_DEFINITION_H\n\n#define GODOT_COMPONENT_SYSTEM_COMPONENT_DEFINITION_H\n\n\n\n#include \"core/reference.h\"\n\n#include \"gcs-cxx/src/component/component_definition.rs.h\"\n\n#include \"component_field_definition.h\"\n\n\n", "file_path": "include/component_definition.h", "rank": 51, "score": 29441.88662735072 }, { "content": "#ifndef GODOT_COMPONENT_SYSTEM_COMPONENT_INFO_H\n\n#define GODOT_COMPONENT_SYSTEM_COMPONENT_INFO_H\n\n#include \"core/reference.h\"\n\n#include \"rust/cxx.h\"\n\n#include \"gcs-cxx/src/ecs_world.rs.h\"\n\n\n", "file_path": "include/component_info.h", "rank": 52, "score": 29441.80202496917 }, { "content": "#include <core/ustring.h>\n\n#include <string>\n\n#include \"rust/cxx.h\"\n\n#include \"gcs-cxx/include/godot/string.h\" // NOLINT(modernize-deprecated-headers)\n\n#include <core/string_name.h>\n\n\n\nrust::string godot_string_to_rust_string(const String &value) {\n\n auto as_wstring = std::wstring(value.c_str());\n\n auto as_string = std::string(as_wstring.begin(), as_wstring.end());\n\n return as_string;\n\n}\n\n\n\nrust::string string_name_to_string(const StringName &string_name) {\n\n String string = string_name;\n\n auto as_wstring = std::wstring(string.c_str());\n\n auto as_string = std::string(as_wstring.begin(), as_wstring.end());\n\n return as_string;\n\n}\n\n\n\nconst StringName &string_name_from_rust_string(rust::string string) {\n\n return *(new StringName(string.c_str()));\n\n}\n", "file_path": "gcs-cxx/src/godot/string.cpp", "rank": 53, "score": 28181.92473443448 }, { "content": "pub mod component_data;\n\npub mod component_definition;\n\npub mod component_info;\n\npub mod component_value;\n", "file_path": "gcs/src/component.rs", "rank": 54, "score": 28166.934100374598 }, { "content": "#include \"component_definition.h\"\n\n#include \"component_field_definition.h\"\n\n\n\nComponentDefinition::ComponentDefinition() : componentDefinition(gcs::ffi::create_component_definition()) {\n\n\n\n}\n\n\n\nvoid ComponentDefinition::add_field(Ref<ComponentFieldDefinition> field_definition) {\n\n componentDefinition->add_field(field_definition->get_definition());\n\n}\n\n\n\nvoid ComponentDefinition::_bind_methods() {\n\n ClassDB::bind_method(D_METHOD(\"add_field\", \"field_definition\"), &ComponentDefinition::add_field);\n\n\n\n}\n", "file_path": "src/component_definition.cpp", "rank": 55, "score": 28159.171801400527 }, { "content": "#include \"component_info.h\"\n\n\n\n\n\nComponentInfo::ComponentInfo() : componentInfo(gcs::ffi::create_component_info(0)){\n\n\n\n}\n\n\n\nvoid ComponentInfo::set_component_info(gcs::ffi::ComponentInfo* argComponentInfo) {\n\n componentInfo = rust::box<gcs::ffi::ComponentInfo>::from_raw(argComponentInfo);\n\n}\n\n\n\ngcs::ffi::ComponentInfo &ComponentInfo::getComponentInfo() {\n\n return componentInfo.operator*();\n\n}\n\n\n\nvoid ComponentInfo::_bind_methods() {\n\n}\n", "file_path": "src/component_info.cpp", "rank": 56, "score": 28159.097200187924 }, { "content": "pub(crate) mod component_data;\n\npub(crate) mod component_definition;\n\npub(crate) mod component_info;\n\npub(crate) mod component_value;\n", "file_path": "gcs-cxx/src/component.rs", "rank": 57, "score": 26988.268980798755 }, { "content": "class ECSWorldBase;\n\n\n", "file_path": "include/component_info.h", "rank": 58, "score": 25889.13891279285 }, { "content": "class ECSWorldBase;\n\n\n", "file_path": "include/component_definition.h", "rank": 59, "score": 25889.13891279285 }, { "content": "import subprocess\n\nimport sys\n\n\n\ndef can_build(env, platform):\n\n try:\n\n process = subprocess.run(\"rustup toolchain list\", capture_output=True, text=True, shell=True)\n\n process.check_returncode()\n\n return True\n\n except Exception as err:\n\n print(\"Rustup does not seem to be installed, or is not found in the current path. Please check \"\n\n \"https://rustup.rs/ for how to install rustup.\")\n\n return False\n\n\n\ndef configure(env):\n\n print(\"Checking for installed rust toolchain\")\n\n if sys.platform == 'win32':\n\n os = 'pc-windows'\n\n elif sys.platform == 'darwin':\n\n os = 'osx' #todo: check\n\n else:\n\n os = 'unknown-linux'\n\n try:\n\n process = subprocess.run(\"rustup toolchain list\", capture_output=True, text=True, shell=True)\n\n process.check_returncode()\n\n lines = process.stdout.splitlines()\n\n channel = None\n\n for line in lines:\n\n if line.endswith('(default)'):\n\n if line.startswith('stable'):\n\n channel = 'stable'\n\n break\n\n elif line.startswith('nightly'):\n\n channel = 'nightly'\n\n break\n\n if channel is None:\n\n print('summator_rust: No toolchain detected. Installing stable toolchain')\n\n channel = 'stable'\n\n\n\n if env['platform'] == 'javascript':\n\n print('summator_rust: installing emscripen target for rust')\n\n process = subprocess.run('rustup target add wasm32-unknown-emscripten', shell=True)\n\n process.check_returncode()\n\n elif env.msvc:\n\n print('summator_rust: Installing and/or selecting rust msvc toolchain')\n\n process = subprocess.run('rustup target add x86_64-' + os + '-msvc', shell=True)\n\n process.check_returncode()\n\n process = subprocess.run('rustup default ' + channel + '-x86_64-' + os + '-msvc', shell=True)\n\n process.check_returncode()\n\n else:\n\n print('summator_rust: Installing and/or selecting rust gnu toolchain')\n\n process = subprocess.run('rustup target add x86_64-' + os + '-gnu', shell=True)\n\n process.check_returncode()\n\n process = subprocess.run('rustup default ' + channel + '-x86_64-' + os + '-gnu', shell=True)\n\n process.check_returncode()\n\n\n\n except Exception:\n\n raise RuntimeError(\"Rustup does not seem to be installed, or is not found in the current path. Please check \"\n\n \"https://rustup.rs/ for how to install rustup.\")\n\n pass\n", "file_path": "config.py", "rank": 60, "score": 24889.311302122467 }, { "content": "def configure(env):\n\n print(\"Checking for installed rust toolchain\")\n\n if sys.platform == 'win32':\n\n os = 'pc-windows'\n\n elif sys.platform == 'darwin':\n\n os = 'osx' #todo: check\n\n else:\n\n os = 'unknown-linux'\n\n try:\n\n process = subprocess.run(\"rustup toolchain list\", capture_output=True, text=True, shell=True)\n\n process.check_returncode()\n\n lines = process.stdout.splitlines()\n\n channel = None\n\n for line in lines:\n\n if line.endswith('(default)'):\n\n if line.startswith('stable'):\n\n channel = 'stable'\n\n break\n\n elif line.startswith('nightly'):\n\n channel = 'nightly'\n\n break\n\n if channel is None:\n\n print('summator_rust: No toolchain detected. Installing stable toolchain')\n\n channel = 'stable'\n\n\n\n if env['platform'] == 'javascript':\n\n print('summator_rust: installing emscripen target for rust')\n\n process = subprocess.run('rustup target add wasm32-unknown-emscripten', shell=True)\n\n process.check_returncode()\n\n elif env.msvc:\n\n print('summator_rust: Installing and/or selecting rust msvc toolchain')\n\n process = subprocess.run('rustup target add x86_64-' + os + '-msvc', shell=True)\n\n process.check_returncode()\n\n process = subprocess.run('rustup default ' + channel + '-x86_64-' + os + '-msvc', shell=True)\n\n process.check_returncode()\n\n else:\n\n print('summator_rust: Installing and/or selecting rust gnu toolchain')\n\n process = subprocess.run('rustup target add x86_64-' + os + '-gnu', shell=True)\n\n process.check_returncode()\n\n process = subprocess.run('rustup default ' + channel + '-x86_64-' + os + '-gnu', shell=True)\n\n process.check_returncode()\n\n\n\n except Exception:\n\n raise RuntimeError(\"Rustup does not seem to be installed, or is not found in the current path. Please check \"\n\n \"https://rustup.rs/ for how to install rustup.\")\n", "file_path": "config.py", "rank": 61, "score": 23963.83799957967 }, { "content": "def can_build(env, platform):\n\n try:\n\n process = subprocess.run(\"rustup toolchain list\", capture_output=True, text=True, shell=True)\n\n process.check_returncode()\n\n return True\n\n except Exception as err:\n\n print(\"Rustup does not seem to be installed, or is not found in the current path. Please check \"\n\n \"https://rustup.rs/ for how to install rustup.\")\n", "file_path": "config.py", "rank": 62, "score": 23104.722084622015 }, { "content": "namespace gcs {\n\n namespace ffi {\n\n using Variant = ::Variant;\n\n using VariantType = Variant::Type;\n\n\n\n int64_t variant_as_i64(const Variant &variant);\n\n\n\n rust::string variant_as_string(const Variant &variant);\n\n\n\n bool variant_as_bool(const Variant &variant);\n\n\n\n double variant_as_f64(const Variant &variant);\n\n\n\n const Variant &empty_variant();\n\n\n\n const Variant &variant_from_i64(int64_t value);\n\n\n\n const Variant &variant_from_string(rust::string value);\n\n\n\n const Variant &variant_from_bool(bool value);\n\n\n\n const Variant &variant_from_f64(double value);\n\n }\n", "file_path": "gcs-cxx/include/godot/variant.h", "rank": 63, "score": 21558.924990746375 }, { "content": "godot-component-system\n", "file_path": "README.md", "rank": 64, "score": 19017.077595958035 }, { "content": "use crate::godot::variant::ffi::Variant;\n\nuse cxx::{type_id, ExternType};\n\nuse gcs::variant::VariantType;\n\n\n\n#[cxx::bridge(namespace = gcs::ffi)]\n\npub mod ffi {\n\n unsafe extern \"C++\" {\n\n include!(\"gcs-cxx/include/godot/variant.h\");\n\n include!(\"rust/cxx.h\");\n\n pub type Variant;\n\n #[cxx_name = \"VariantType\"]\n\n pub type CXXVariantType = crate::godot::variant::CXXVariantType;\n\n\n\n pub fn get_type(self: &Variant) -> CXXVariantType;\n\n\n\n pub fn variant_as_i64(variant: &Variant) -> i64;\n\n pub fn variant_as_string(variant: &Variant) -> String;\n\n pub fn variant_as_bool(variant: &Variant) -> bool;\n\n pub fn variant_as_f64(variant: &Variant) -> f64;\n\n }\n", "file_path": "gcs-cxx/src/godot/variant.rs", "rank": 67, "score": 25.641046630768738 }, { "content": " let mut entity_id = TestEntityId::create();\n\n entity_id.id = 1;\n\n\n\n world.register_entity(&entity_id).unwrap();\n\n let mut data = TestComponentData::new(entity_id);\n\n data.set_field(field_name.to_string(), &TestComponentValue::Int(27));\n\n world\n\n .set_component_data(&entity_id, component_name.to_string(), &data)\n\n .unwrap();\n\n let result = world.get_component_data(component_name.to_string());\n\n assert!(result.is_ok(), \"Should have returned Ok result\");\n\n let components = result.unwrap();\n\n assert_eq!(2, components.len());\n\n\n\n for data in components {\n\n let _value = data.get_field(field_name.to_string());\n\n match data.entity.id {\n\n 0 => {\n\n assert!(\n\n matches!(TestComponentValue::Int(9), _value),\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 69, "score": 24.685425178966554 }, { "content": " }\n\n\n\n pub fn has_component(&self, name: String) -> bool {\n\n self.component_names.contains_key(&name)\n\n }\n\n\n\n pub fn create_entity(&mut self) -> Box<TComponentData::EntityIdType> {\n\n let id = EntityId::create();\n\n self.entities.push(id);\n\n Box::new(*self.entities.last().unwrap())\n\n }\n\n\n\n pub fn register_entity(\n\n &mut self,\n\n id: &TComponentData::EntityIdType,\n\n ) -> Result<(), RegisterEntityError> {\n\n if self.entities.contains(id) {\n\n Err(AlreadyRegistered)\n\n } else {\n\n self.entities.push(*id);\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 70, "score": 23.92565342697277 }, { "content": "\n\n assert!(data.fields.contains_key(field_name));\n\n\n\n let result = world.set_component_data(&entity_id, component_name.to_string(), &data);\n\n assert!(result.is_ok());\n\n\n\n let stored_data = world\n\n .get_component_of_entity(&entity_id, component_name.to_string())\n\n .unwrap();\n\n\n\n let field_data = stored_data.get_field(field_name.to_string());\n\n assert_eq!(\n\n TestComponentValue::Int(value),\n\n *field_data,\n\n \"Stored value should be the same as the one supplied\"\n\n );\n\n }\n\n #[test]\n\n pub fn set_component_adds_initializes_entity_components_if_not_present() {\n\n let mut world =\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 71, "score": 23.63129332877054 }, { "content": " fn parse_str(_input: &str) -> Result<Self, String>\n\n where\n\n Self: Sized,\n\n {\n\n Result::Err(\"Not Implemented\".to_string())\n\n }\n\n }\n\n\n\n #[derive(Clone, PartialEq, Debug)]\n\n pub enum TestComponentValue {\n\n Nil,\n\n Int(i64),\n\n String(String),\n\n Bool(bool),\n\n Real(f64),\n\n }\n\n\n\n impl ComponentValue for TestComponentValue {\n\n fn get_type(&self) -> VariantType {\n\n match self {\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 72, "score": 23.613904179726475 }, { "content": " self.fields.get(&field).unwrap()\n\n }\n\n\n\n fn set_field(&mut self, field: String, value: &Self::ComponentValueType) {\n\n self.fields.insert(field, value.clone());\n\n }\n\n }\n\n\n\n #[derive(Default, Hash, Eq, PartialEq, Copy, Clone)]\n\n pub struct TestComponentInfo {\n\n pub hash: u64,\n\n }\n\n\n\n impl ComponentInfo for TestComponentInfo {\n\n fn get_hash(&self) -> u64 {\n\n self.hash\n\n }\n\n\n\n fn create(hash: u64) -> Self\n\n where\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 73, "score": 23.256959011777443 }, { "content": " let field_name = \"TestField\";\n\n\n\n let mut definition = TestComponentDefinition::default();\n\n let field_definition = TestComponentFieldDefinition {\n\n field_type: VariantType::Int,\n\n name: field_name.to_string(),\n\n };\n\n definition.add_field(field_definition);\n\n world\n\n .register_component(component_name.to_string(), definition)\n\n .unwrap();\n\n\n\n let entity_id = world.create_entity();\n\n let mut data = TestComponentData::new(*entity_id);\n\n data.set_field(field_name.to_string(), &TestComponentValue::Int(9));\n\n\n\n let field_name = \"TestField\";\n\n world\n\n .set_component_data(&entity_id, component_name.to_string(), &data)\n\n .unwrap();\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 74, "score": 23.25540850507489 }, { "content": " <ecs_world::tests::TestComponentData as ComponentData>::ComponentValueType,\n\n >,\n\n }\n\n\n\n impl ComponentData for TestComponentData {\n\n type EntityIdType = TestEntityId;\n\n type ComponentValueType = TestComponentValue;\n\n\n\n fn new(entity: TestEntityId) -> Self {\n\n TestComponentData {\n\n entity,\n\n fields: HashMap::new(),\n\n }\n\n }\n\n\n\n fn get_entity(&self) -> TestEntityId {\n\n self.entity\n\n }\n\n\n\n fn get_field(&self, field: String) -> &Self::ComponentValueType {\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 75, "score": 23.187949718177187 }, { "content": " ECSWorld::<TestComponentDefinition, TestComponentData, TestComponentInfo>::default();\n\n let field_name = \"Integer\";\n\n let field_definition = TestComponentFieldDefinition {\n\n name: field_name.to_string(),\n\n field_type: VariantType::Int,\n\n };\n\n\n\n let mut component_definition = TestComponentDefinition::default();\n\n component_definition.add_field(field_definition);\n\n\n\n let component_name = \"Test\";\n\n world\n\n .register_component(component_name.to_string(), component_definition)\n\n .unwrap();\n\n\n\n let entity_id = *world.create_entity();\n\n let mut data = TestComponentData::new(entity_id);\n\n let value = 2;\n\n data.set_field(field_name.to_string(), &TestComponentValue::Int(value));\n\n\n\n world\n\n .set_component_data(&entity_id, component_name.to_string(), &data)\n\n .unwrap();\n\n\n\n assert_eq!(1, world.components_of_entity.get(&entity_id).unwrap().len())\n\n }\n\n}\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 76, "score": 22.61361381674674 }, { "content": "#[derive(PartialEq, Debug)]\n\npub enum SetComponentDataError {\n\n EntityNotFound,\n\n ComponentNotFound,\n\n DataInUse,\n\n}\n\n\n\nimpl Display for SetComponentDataError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::EntityNotFound => {\n\n write!(f, \"Entity with that id was not found\")\n\n }\n\n Self::ComponentNotFound => {\n\n write!(f, \"Component with that name is already registered\")\n\n }\n\n Self::DataInUse => {\n\n write!(f, \"The data is already exclusively borrowed\")\n\n }\n\n }\n", "file_path": "gcs/src/world/errors.rs", "rank": 77, "score": 22.602707565232425 }, { "content": " .add_component_to_entity(&entity_id, component_name.to_string())\n\n .unwrap();\n\n\n\n let mut data = TestComponentData::new(entity_id);\n\n let value = 2;\n\n data.set_field(field_name.to_string(), &TestComponentValue::Int(value));\n\n\n\n assert!(\n\n world\n\n .set_component_data(&entity_id, component_name.to_string(), &data)\n\n .is_ok(),\n\n \"set_component_data should have returned Ok\"\n\n );\n\n\n\n let stored_data = world\n\n .get_component_of_entity(&entity_id, component_name.to_string())\n\n .unwrap();\n\n\n\n let field_data = stored_data.get_field(field_name.to_string());\n\n assert_eq!(\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 78, "score": 21.861561197325962 }, { "content": "}\n\n\n\n#[derive(Hash, Eq, PartialEq, Clone, Default)]\n\npub struct CXXVariantType(pub(crate) VariantType);\n\n\n\nunsafe impl ExternType for CXXVariantType {\n\n type Id = type_id!(\"gcs::ffi::VariantType\");\n\n type Kind = cxx::kind::Trivial;\n\n}\n\n\n\nimpl From<&ffi::Variant> for i64 {\n\n fn from(variant: &Variant) -> Self {\n\n ffi::variant_as_i64(variant)\n\n }\n\n}\n\n\n\nimpl From<&ffi::Variant> for String {\n\n fn from(variant: &Variant) -> Self {\n\n ffi::variant_as_string(variant)\n\n }\n", "file_path": "gcs-cxx/src/godot/variant.rs", "rank": 79, "score": 21.63923477818666 }, { "content": " use crate::variant::VariantType;\n\n use crate::world::ecs_world;\n\n use crate::world::ecs_world::ECSWorld;\n\n use crate::world::errors::SetComponentDataError::{ComponentNotFound, EntityNotFound};\n\n use crate::world::errors::{GetComponentDataError, GetComponentOfEntityError};\n\n use std::borrow::Borrow;\n\n use std::collections::HashMap;\n\n\n\n #[derive(Default, Clone, Hash, Debug, PartialEq, Eq)]\n\n pub struct TestComponentFieldDefinition {\n\n pub name: String,\n\n pub field_type: VariantType,\n\n }\n\n\n\n impl ComponentFieldDefinition for TestComponentFieldDefinition {\n\n fn get_type(&self) -> VariantType {\n\n self.field_type\n\n }\n\n\n\n fn get_name(&self) -> String {\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 80, "score": 21.45564682298891 }, { "content": "\n\n let mut component_definition_2 = TestComponentDefinition::default();\n\n component_definition_2.add_field(field_definition_2);\n\n\n\n let component_name = \"Test_2\";\n\n world\n\n .register_component(\"Test_1\".to_string(), component_definition)\n\n .unwrap();\n\n world\n\n .register_component(component_name.to_string(), component_definition_2)\n\n .unwrap();\n\n\n\n let entity_id = *world.create_entity();\n\n world\n\n .add_component_to_entity(&entity_id, \"Test_1\".to_string())\n\n .unwrap();\n\n\n\n let mut data = TestComponentData::new(entity_id);\n\n let value = 2;\n\n data.set_field(field_name.to_string(), &TestComponentValue::Int(value));\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 82, "score": 21.026201068930977 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n pub fn set_component_adds_component_to_entity_if_it_is_not_added() {\n\n let mut world =\n\n ECSWorld::<TestComponentDefinition, TestComponentData, TestComponentInfo>::default();\n\n let field_definition = TestComponentFieldDefinition {\n\n name: \"Boolean\".to_string(),\n\n field_type: VariantType::Bool,\n\n };\n\n\n\n let mut component_definition = TestComponentDefinition::default();\n\n component_definition.add_field(field_definition);\n\n\n\n let field_name = \"Integer\";\n\n let field_definition_2 = TestComponentFieldDefinition {\n\n name: field_name.to_string(),\n\n field_type: VariantType::Int,\n\n };\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 83, "score": 20.622108269744544 }, { "content": " pub fn set_component_data_adds_component_data() {\n\n let mut world =\n\n ECSWorld::<TestComponentDefinition, TestComponentData, TestComponentInfo>::default();\n\n let field_name = \"Integer\";\n\n let field_definition = TestComponentFieldDefinition {\n\n name: field_name.to_string(),\n\n field_type: VariantType::Int,\n\n };\n\n\n\n let mut component_definition = TestComponentDefinition::default();\n\n component_definition.add_field(field_definition);\n\n\n\n let component_name = \"Test\";\n\n world\n\n .register_component(component_name.to_string(), component_definition)\n\n .unwrap();\n\n\n\n let entity_id = *world.create_entity();\n\n\n\n world\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 84, "score": 20.48865697796268 }, { "content": "\n\n let component_information = self.component_names.get(&component).unwrap();\n\n\n\n let component_fields = self\n\n .component_definitions\n\n .get(component_information)\n\n .unwrap();\n\n\n\n for component_field in &component_fields.get_fields() {\n\n let new_data = data.get_field(component_field.get_name());\n\n stored_data.set_field(component_field.get_name(), &new_data.clone());\n\n }\n\n Ok(())\n\n }\n\n }\n\n\n\n pub fn is_component_added_to_entity(\n\n &self,\n\n entity_id: &TComponentData::EntityIdType,\n\n component: String,\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 85, "score": 20.207107499169446 }, { "content": " } else {\n\n 0.0\n\n }\n\n }\n\n TestComponentValue::Real(value) => *value,\n\n }\n\n }\n\n }\n\n\n\n impl Default for TestComponentValue {\n\n fn default() -> Self {\n\n TestComponentValue::Nil\n\n }\n\n }\n\n\n\n #[derive(Default, Clone)]\n\n pub struct TestComponentData {\n\n pub entity: TestEntityId,\n\n pub fields: HashMap<\n\n String,\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 86, "score": 20.079345649723326 }, { "content": " let mut component_definition_2 = TestComponentDefinition::default();\n\n component_definition_2.add_field(field_definition_2);\n\n\n\n world\n\n .register_component(\"Test\".to_string(), component_definition)\n\n .unwrap();\n\n let result = world.register_component(\"Test\".to_string(), component_definition_2);\n\n assert!(result.is_err())\n\n }\n\n\n\n #[test]\n\n pub fn register_component_adds_a_component_with_an_existing_definition_under_a_different_name_with_a_unique_hash(\n\n ) {\n\n let mut world =\n\n ECSWorld::<TestComponentDefinition, TestComponentData, TestComponentInfo>::default();\n\n let field_definition = TestComponentFieldDefinition {\n\n name: \"Field\".to_string(),\n\n field_type: VariantType::Nil,\n\n };\n\n\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 87, "score": 19.83207544098956 }, { "content": "use cxx::{type_id, ExternType};\n\nuse uuid::Uuid;\n\n\n\nuse gcs::entity::EntityId;\n\n\n\nuse crate::godot::error::GCSResult;\n\n\n\n#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]\n\npub struct CXXEntityId(uuid::Uuid);\n\n\n\npub(crate) type EntityIdResult = GCSResult<Box<CXXEntityId>>;\n\n\n\nimpl EntityId for CXXEntityId {\n\n fn create() -> Self\n\n where\n\n Self: Sized,\n\n {\n\n CXXEntityId(Uuid::new_v4())\n\n }\n\n\n", "file_path": "gcs-cxx/src/entity.rs", "rank": 88, "score": 19.749187605425078 }, { "content": " Ok(())\n\n }\n\n }\n\n\n\n fn add_component_to_entity(\n\n &mut self,\n\n entity_id: &TComponentData::EntityIdType,\n\n component: String,\n\n ) -> Result<(), String> {\n\n if !self.has_component(component.clone()) {\n\n Err(\"Component is not registered\".to_string())\n\n } else if self.is_component_added_to_entity(&entity_id, component.clone()) {\n\n Err(\"Component was already added for that entity\".to_string())\n\n } else {\n\n let components = self.components.get_mut(&component).unwrap();\n\n let value = RefCell::new(TComponentData::new(*entity_id));\n\n let data = Rc::new(value);\n\n components.push(data.clone());\n\n\n\n let entity_components = self\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 89, "score": 19.69783238915548 }, { "content": "}\n\n\n\nunsafe impl ExternType for CXXEntityId {\n\n type Id = type_id!(\"gcs::ffi::EntityId\");\n\n type Kind = cxx::kind::Trivial;\n\n}\n\n\n\npub(crate) fn create_entity() -> Box<CXXEntityId> {\n\n Box::new(CXXEntityId::create())\n\n}\n\n\n\npub(crate) fn entity_id_from_string(id: String) -> Box<EntityIdResult> {\n\n Box::new(match CXXEntityId::parse_str(id.as_str()) {\n\n Ok(value) => EntityIdResult::new_result(Box::new(value)),\n\n Err(err) => EntityIdResult::new_error(err),\n\n })\n\n}\n", "file_path": "gcs-cxx/src/entity.rs", "rank": 90, "score": 19.64989545879327 }, { "content": " TestComponentValue::Int(value),\n\n *field_data,\n\n \"Stored value should be the same as the one supplied\"\n\n );\n\n\n\n let components = world\n\n .get_component_data(component_name.to_string())\n\n .unwrap();\n\n let stored_data = components.first().unwrap();\n\n let field_data = stored_data.get_field(field_name.to_string());\n\n assert_eq!(\n\n TestComponentValue::Int(value),\n\n *field_data,\n\n \"Stored value should be the same as the one supplied\"\n\n );\n\n }\n\n\n\n #[test]\n\n pub fn set_component_checks_that_entity_exists() {\n\n let mut world =\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 91, "score": 19.449459020019624 }, { "content": " }\n\n\n\n #[test]\n\n pub fn register_component_does_not_allow_adding_of_a_component_with_a_name_that_already_exists()\n\n {\n\n let mut world =\n\n ECSWorld::<TestComponentDefinition, TestComponentData, TestComponentInfo>::default();\n\n let field_definition = TestComponentFieldDefinition {\n\n name: \"Field\".to_string(),\n\n field_type: VariantType::Nil,\n\n };\n\n\n\n let mut component_definition = TestComponentDefinition::default();\n\n component_definition.add_field(field_definition);\n\n\n\n let field_definition_2 = TestComponentFieldDefinition {\n\n name: \"Field\".to_string(),\n\n field_type: VariantType::Int,\n\n };\n\n\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 92, "score": 19.390320027759216 }, { "content": " entity->set_entity_id(rust_entity.into_raw());\n\n return {entity};\n\n}\n\n\n\nvoid ECSWorldBase::register_entity(Ref<Entity> entity) {\n\n auto result= world->register_entity(entity->get_entity_id());\n\n\n\n ERR_FAIL_COND_MSG(result->is_error(), string_name_from_rust_string(result->get_error()));\n\n}\n\n\n\nRef<ComponentInfo> ECSWorldBase::register_script_component(const StringName &name, Ref<Script> resource) {\n\n auto property_list = List<PropertyInfo>();\n\n resource->get_script_property_list(&property_list);\n\n auto definition = gcs::ffi::create_component_definition();\n\n for (int i=0; i < property_list.size(); ++i) {\n\n auto field_definition = gcs::ffi::create_component_field_definition();\n\n auto property_info = property_list[i];\n\n field_definition.name = godot_string_to_rust_string(property_info.name);\n\n field_definition.field_type = property_info.type;\n\n definition->add_field(field_definition);\n", "file_path": "src/ecs_world_base.cpp", "rank": 94, "score": 18.402902316978338 }, { "content": " let stored_component = world\n\n .get_component_of_entity(&entity_id, component_name.to_string())\n\n .unwrap();\n\n\n\n assert_eq!(\n\n entity_id,\n\n stored_component.get_entity(),\n\n \"Added component should belong to the entity that was passed\"\n\n );\n\n }\n\n\n\n #[test]\n\n pub fn add_component_to_entity_does_not_allow_adding_the_same_component_twice_to_an_entity() {\n\n let mut world =\n\n ECSWorld::<TestComponentDefinition, TestComponentData, TestComponentInfo>::default();\n\n let field_definition = TestComponentFieldDefinition {\n\n name: \"Integer\".to_string(),\n\n field_type: VariantType::Int,\n\n };\n\n\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 95, "score": 18.310949550242285 }, { "content": " .components_of_entity\n\n .entry(*entity_id)\n\n .or_insert_with(HashMap::new);\n\n\n\n entity_components.insert(component, data);\n\n Ok(())\n\n }\n\n }\n\n\n\n pub fn set_component_data(\n\n &mut self,\n\n entity_id: &TComponentData::EntityIdType,\n\n component: String,\n\n data: &TComponentData,\n\n ) -> Result<(), SetComponentDataError> {\n\n if !self.entities.contains(entity_id) {\n\n Err(EntityNotFound)\n\n } else if !self.components.contains_key(&component) {\n\n Err(ComponentNotFound)\n\n } else {\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 96, "score": 18.10965374712334 }, { "content": " let mut component_definition = TestComponentDefinition::default();\n\n component_definition.add_field(field_definition);\n\n\n\n let component_name = \"Test\";\n\n world\n\n .register_component(component_name.to_string(), component_definition)\n\n .unwrap();\n\n\n\n let entity_id = *world.create_entity();\n\n\n\n world\n\n .add_component_to_entity(&entity_id, component_name.to_string())\n\n .unwrap();\n\n\n\n let result = world.add_component_to_entity(&entity_id, component_name.to_string());\n\n\n\n assert!(result.is_err(), \"Result should have been Err\");\n\n }\n\n\n\n #[test]\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 97, "score": 17.723846231502208 }, { "content": "\n\n let mut uuid_1 = *world.create_entity();\n\n uuid_1.id = 1;\n\n let uuid_2 = *world.create_entity();\n\n\n\n assert_ne!(uuid_1, uuid_2, \"Should have created unique entities\");\n\n }\n\n\n\n #[test]\n\n pub fn add_component_to_entity_adds_a_new_component_to_an_entity() {\n\n let mut world =\n\n ECSWorld::<TestComponentDefinition, TestComponentData, TestComponentInfo>::default();\n\n let field_definition = TestComponentFieldDefinition {\n\n name: \"Integer\".to_string(),\n\n field_type: VariantType::Int,\n\n };\n\n\n\n let mut component_definition = TestComponentDefinition::default();\n\n component_definition.add_field(field_definition);\n\n\n", "file_path": "gcs/src/world/ecs_world.rs", "rank": 99, "score": 17.51046591000148 } ]
Rust
rust/src/eddsa/utils.rs
hermeznetwork/hermez_flutter_sdk
c165ba3cdb6ecfc5f7cc476e658ff3265e178312
extern crate num; extern crate num_bigint; extern crate num_traits; use num_bigint::{BigInt, ToBigInt}; use num_traits::{One, Zero}; pub fn modulus(a: &BigInt, m: &BigInt) -> BigInt { ((a % m) + m) % m } pub fn modinv(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let big_zero: BigInt = Zero::zero(); if a == &big_zero { return Err("no mod inv of Zero".to_string()); } let mut mn = (q.clone(), a.clone()); let mut xy: (BigInt, BigInt) = (Zero::zero(), One::one()); while mn.1 != big_zero { xy = (xy.1.clone(), xy.0 - (mn.0.clone() / mn.1.clone()) * xy.1); mn = (mn.1.clone(), modulus(&mn.0, &mn.1)); } while xy.0 < Zero::zero() { xy.0 = modulus(&xy.0, q); } Ok(xy.0) } /* pub fn modinv_v2(a0: &BigInt, m0: &BigInt) -> BigInt { if m0 == &One::one() { return One::one(); } let (mut a, mut m, mut x0, mut inv): (BigInt, BigInt, BigInt, BigInt) = (a0.clone(), m0.clone(), Zero::zero(), One::one()); while a > One::one() { inv = inv - (&a / m.clone()) * x0.clone(); a = a % m.clone(); std::mem::swap(&mut a, &mut m); std::mem::swap(&mut x0, &mut inv); } if inv < Zero::zero() { inv += m0.clone() } inv } pub fn modinv_v3(a: &BigInt, q: &BigInt) -> BigInt { let mut aa: BigInt = a.clone(); let mut qq: BigInt = q.clone(); if qq < Zero::zero() { qq = -qq; } if aa < Zero::zero() { aa = -aa; } let d = num::Integer::gcd(&aa, &qq); if d != One::one() { println!("ERR no mod_inv"); } let res: BigInt; if d < Zero::zero() { res = d + qq; } else { res = d; } res } pub fn modinv_v4(x: &BigInt, q: &BigInt) -> BigInt { let (gcd, inverse, _) = extended_gcd(x.clone(), q.clone()); let one: BigInt = One::one(); if gcd == one { modulus(&inverse, q) } else { panic!("error: gcd!=one") } } pub fn extended_gcd(a: BigInt, b: BigInt) -> (BigInt, BigInt, BigInt) { let (mut s, mut old_s) = (BigInt::zero(), BigInt::one()); let (mut t, mut old_t) = (BigInt::one(), BigInt::zero()); let (mut r, mut old_r) = (b, a); while r != BigInt::zero() { let quotient = &old_r / &r; old_r -= &quotient * &r; std::mem::swap(&mut old_r, &mut r); old_s -= &quotient * &s; std::mem::swap(&mut old_s, &mut s); old_t -= quotient * &t; std::mem::swap(&mut old_t, &mut t); } let _quotients = (t, s); // == (a, b) / gcd (old_r, old_s, old_t) } */ pub fn concatenate_arrays<T: Clone>(x: &[T], y: &[T]) -> Vec<T> { x.iter().chain(y).cloned().collect() } pub fn modsqrt(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let zero: BigInt = Zero::zero(); let one: BigInt = One::one(); if legendre_symbol(&a, q) != 1 { return Err("not a mod p square".to_string()); } else if a == &zero { return Err("not a mod p square".to_string()); } else if q == &2.to_bigint().unwrap() { return Err("not a mod p square".to_string()); } else if q % 4.to_bigint().unwrap() == 3.to_bigint().unwrap() { let r = a.modpow(&((q + one) / 4), &q); return Ok(r); } let mut s = q - &one; let mut e: BigInt = Zero::zero(); while &s % 2 == zero { s = s >> 1; e = e + &one; } let mut n: BigInt = 2.to_bigint().unwrap(); while legendre_symbol(&n, q) != -1 { n = &n + &one; } let mut y = a.modpow(&((&s + &one) >> 1), q); let mut b = a.modpow(&s, q); let mut g = n.modpow(&s, q); let mut r = e; loop { let mut t = b.clone(); let mut m: BigInt = Zero::zero(); while &t != &one { t = modulus(&(&t * &t), q); m = m + &one; } if m == zero { return Ok(y.clone()); } t = g.modpow(&(2.to_bigint().unwrap().modpow(&(&r - &m - 1), q)), q); g = g.modpow(&(2.to_bigint().unwrap().modpow(&(r - &m), q)), q); y = modulus(&(y * t), q); b = modulus(&(b * &g), q); r = m.clone(); } } #[allow(dead_code)] pub fn modsqrt_v2(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let zero: BigInt = Zero::zero(); let one: BigInt = One::one(); if legendre_symbol(&a, q) != 1 { return Err("not a mod p square".to_string()); } else if a == &zero { return Err("not a mod p square".to_string()); } else if q == &2.to_bigint().unwrap() { return Err("not a mod p square".to_string()); } else if q % 4.to_bigint().unwrap() == 3.to_bigint().unwrap() { let r = a.modpow(&((q + one) / 4), &q); return Ok(r); } let mut p = q - &one; let mut s: BigInt = Zero::zero(); while &p % 2.to_bigint().unwrap() == zero { s = s + &one; p = p >> 1; } let mut z: BigInt = One::one(); while legendre_symbol(&z, q) != -1 { z = &z + &one; } let mut c = z.modpow(&p, q); let mut x = a.modpow(&((&p + &one) >> 1), q); let mut t = a.modpow(&p, q); let mut m = s; while &t != &one { let mut i: BigInt = One::one(); let mut e: BigInt = 2.to_bigint().unwrap(); while i < m { if t.modpow(&e, q) == one { break; } e = e * 2.to_bigint().unwrap(); i = i + &one; } let b = c.modpow(&(2.to_bigint().unwrap().modpow(&(&m - &i - 1), q)), q); x = modulus(&(x * &b), q); t = modulus(&(t * &b * &b), q); c = modulus(&(&b * &b), q); m = i.clone(); } return Ok(x); } pub fn legendre_symbol(a: &BigInt, q: &BigInt) -> i32 { let one: BigInt = One::one(); let ls: BigInt = a.modpow(&((q - &one) >> 1), &q); if &(ls) == &(q - one) { return -1; } 1 } #[cfg(test)] mod tests { use super::*; #[test] fn test_mod_inverse() { let a = BigInt::parse_bytes(b"123456789123456789123456789123456789123456789", 10).unwrap(); let b = BigInt::parse_bytes(b"12345678", 10).unwrap(); assert_eq!( modinv(&a, &b).unwrap(), BigInt::parse_bytes(b"641883", 10).unwrap() ); } #[test] fn test_sqrtmod() { let a = BigInt::parse_bytes( b"6536923810004159332831702809452452174451353762940761092345538667656658715568", 10, ) .unwrap(); let q = BigInt::parse_bytes( b"7237005577332262213973186563042994240857116359379907606001950938285454250989", 10, ) .unwrap(); assert_eq!( (modsqrt(&a, &q).unwrap()).to_string(), "5464794816676661649783249706827271879994893912039750480019443499440603127256" ); assert_eq!( (modsqrt_v2(&a, &q).unwrap()).to_string(), "5464794816676661649783249706827271879994893912039750480019443499440603127256" ); } }
extern crate num; extern crate num_bigint; extern crate num_traits; use num_bigint::{BigInt, ToBigInt}; use num_traits::{One, Zero}; pub fn modulus(a: &BigInt, m: &BigInt) -> BigInt { ((a % m) + m) % m } pub fn modinv(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let big_zero: BigInt = Zero::zero(); if a == &big_zero { return Err("no mod inv of Zero".to_string()); } let mut mn = (q.clone(), a.clone()); let mut xy: (BigInt, BigInt) = (Zero::zero(), One::one()); while mn.1 != big_zero { xy = (xy.1.clone(), xy.0 - (mn.0.clone() / mn.1.clone()) * xy.1); mn = (mn.1.clone(), modulus(&mn.0, &mn.1)); } while xy.0 < Zero::zero() { xy.0 = modulus(&xy.0, q); } Ok(xy.0) } /* pub fn modinv_v2(a0: &BigInt, m0: &BigInt) -> BigInt { if m0 == &One::one() { return One::one(); } let (mut a, mut m, mut x0, mut inv): (BigInt, BigInt, BigInt, BigInt) = (a0.clone(), m0.clone(), Zero::zero(), One::one()); while a > One::one() { inv = inv - (&a / m.clone()) * x0.clone(); a = a % m.clone(); std::mem::swap(&mut a, &mut m); std::mem::swap(&mut x0, &mut inv); } if inv < Zero::zero() { inv += m0.clone() } inv } pub fn modinv_v3(a: &BigInt, q: &BigInt) -> BigInt { let mut aa: BigInt = a.clone(); let mut qq: BigInt = q.clone(); if qq < Zero::zero() { qq = -qq; } if aa < Zero::zero() { aa = -aa; } let d = num::Integer::gcd(&aa, &qq); if d != One::one() { println!("ERR no mod_inv"); } let res: BigInt; if d < Zero::zero() { res = d + qq; } else { res = d; } res } pub fn modinv_v4(x: &BigInt, q: &BigInt) -> BigInt { let (gcd, inverse, _) = extended_gcd(x.clone(), q.clone()); let one: BigInt = One::one(); if gcd == one { modulus(&inverse, q) } else { panic!("error: gcd!=one") } } pub fn extended_gcd(a: BigInt, b: BigInt) -> (BigInt, BigInt, BigInt) { let (mut s, mut old_s) = (BigInt::zero(), BigInt::one()); let (mut t, mut old_t) = (BigInt::one(), BigInt::zero()); let (mut r, mut old_r) = (b, a); while r != BigInt::zero() { let quotient = &old_r / &r; old_r -= &quotient * &r; std::mem::swap(&mut
egendre_symbol(&a, q) != 1 { return Err("not a mod p square".to_string()); } else if a == &zero { return Err("not a mod p square".to_string()); } else if q == &2.to_bigint().unwrap() { return Err("not a mod p square".to_string()); } else if q % 4.to_bigint().unwrap() == 3.to_bigint().unwrap() { let r = a.modpow(&((q + one) / 4), &q); return Ok(r); } let mut s = q - &one; let mut e: BigInt = Zero::zero(); while &s % 2 == zero { s = s >> 1; e = e + &one; } let mut n: BigInt = 2.to_bigint().unwrap(); while legendre_symbol(&n, q) != -1 { n = &n + &one; } let mut y = a.modpow(&((&s + &one) >> 1), q); let mut b = a.modpow(&s, q); let mut g = n.modpow(&s, q); let mut r = e; loop { let mut t = b.clone(); let mut m: BigInt = Zero::zero(); while &t != &one { t = modulus(&(&t * &t), q); m = m + &one; } if m == zero { return Ok(y.clone()); } t = g.modpow(&(2.to_bigint().unwrap().modpow(&(&r - &m - 1), q)), q); g = g.modpow(&(2.to_bigint().unwrap().modpow(&(r - &m), q)), q); y = modulus(&(y * t), q); b = modulus(&(b * &g), q); r = m.clone(); } } #[allow(dead_code)] pub fn modsqrt_v2(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let zero: BigInt = Zero::zero(); let one: BigInt = One::one(); if legendre_symbol(&a, q) != 1 { return Err("not a mod p square".to_string()); } else if a == &zero { return Err("not a mod p square".to_string()); } else if q == &2.to_bigint().unwrap() { return Err("not a mod p square".to_string()); } else if q % 4.to_bigint().unwrap() == 3.to_bigint().unwrap() { let r = a.modpow(&((q + one) / 4), &q); return Ok(r); } let mut p = q - &one; let mut s: BigInt = Zero::zero(); while &p % 2.to_bigint().unwrap() == zero { s = s + &one; p = p >> 1; } let mut z: BigInt = One::one(); while legendre_symbol(&z, q) != -1 { z = &z + &one; } let mut c = z.modpow(&p, q); let mut x = a.modpow(&((&p + &one) >> 1), q); let mut t = a.modpow(&p, q); let mut m = s; while &t != &one { let mut i: BigInt = One::one(); let mut e: BigInt = 2.to_bigint().unwrap(); while i < m { if t.modpow(&e, q) == one { break; } e = e * 2.to_bigint().unwrap(); i = i + &one; } let b = c.modpow(&(2.to_bigint().unwrap().modpow(&(&m - &i - 1), q)), q); x = modulus(&(x * &b), q); t = modulus(&(t * &b * &b), q); c = modulus(&(&b * &b), q); m = i.clone(); } return Ok(x); } pub fn legendre_symbol(a: &BigInt, q: &BigInt) -> i32 { let one: BigInt = One::one(); let ls: BigInt = a.modpow(&((q - &one) >> 1), &q); if &(ls) == &(q - one) { return -1; } 1 } #[cfg(test)] mod tests { use super::*; #[test] fn test_mod_inverse() { let a = BigInt::parse_bytes(b"123456789123456789123456789123456789123456789", 10).unwrap(); let b = BigInt::parse_bytes(b"12345678", 10).unwrap(); assert_eq!( modinv(&a, &b).unwrap(), BigInt::parse_bytes(b"641883", 10).unwrap() ); } #[test] fn test_sqrtmod() { let a = BigInt::parse_bytes( b"6536923810004159332831702809452452174451353762940761092345538667656658715568", 10, ) .unwrap(); let q = BigInt::parse_bytes( b"7237005577332262213973186563042994240857116359379907606001950938285454250989", 10, ) .unwrap(); assert_eq!( (modsqrt(&a, &q).unwrap()).to_string(), "5464794816676661649783249706827271879994893912039750480019443499440603127256" ); assert_eq!( (modsqrt_v2(&a, &q).unwrap()).to_string(), "5464794816676661649783249706827271879994893912039750480019443499440603127256" ); } }
old_r, &mut r); old_s -= &quotient * &s; std::mem::swap(&mut old_s, &mut s); old_t -= quotient * &t; std::mem::swap(&mut old_t, &mut t); } let _quotients = (t, s); // == (a, b) / gcd (old_r, old_s, old_t) } */ pub fn concatenate_arrays<T: Clone>(x: &[T], y: &[T]) -> Vec<T> { x.iter().chain(y).cloned().collect() } pub fn modsqrt(a: &BigInt, q: &BigInt) -> Result<BigInt, String> { let zero: BigInt = Zero::zero(); let one: BigInt = One::one(); if l
random
[ { "content": "pub fn decompress_signature(b: &[u8; 64]) -> Result<Signature, String> {\n\n let r_b8_bytes: [u8; 32] = *array_ref!(b[..32], 0, 32);\n\n let s: BigInt = BigInt::from_bytes_le(Sign::Plus, &b[32..]);\n\n let r_b8 = decompress_point(r_b8_bytes);\n\n match r_b8 {\n\n Result::Err(err) => return Err(err.to_string()),\n\n Result::Ok(res) => Ok(Signature {\n\n r_b8: res.clone(),\n\n s: s,\n\n }),\n\n }\n\n}\n\n\n\npub struct PrivateKey {\n\n pub key: [u8; 32],\n\n}\n\n\n\nimpl PrivateKey {\n\n pub fn import(b: Vec<u8>) -> Result<PrivateKey, String> {\n\n if b.len() != 32 {\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 0, "score": 130150.43659043944 }, { "content": "pub fn verify_schnorr(pk: Point, m: BigInt, r: Point, s: BigInt) -> Result<bool, String> {\n\n // sG = s·G\n\n let sg = B8.mul_scalar(&s);\n\n\n\n // r + h · x\n\n let h = schnorr_hash(&pk, m, &r)?;\n\n let pk_h = pk.mul_scalar(&h);\n\n let right = r.projective().add(&pk_h.projective());\n\n\n\n Ok(sg.equals(right.affine()))\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 4, "score": 118156.15997979333 }, { "content": "pub fn decompress_point(bb: [u8; 32]) -> Result<Point, String> {\n\n // https://tools.ietf.org/html/rfc8032#section-5.2.3\n\n let mut sign: bool = false;\n\n let mut b = bb.clone();\n\n if b[31] & 0x80 != 0x00 {\n\n sign = true;\n\n b[31] = b[31] & 0x7F;\n\n }\n\n let y: BigInt = BigInt::from_bytes_le(Sign::Plus, &b[..]);\n\n if y >= Q.clone() {\n\n return Err(\"y outside the Finite Field over R\".to_string());\n\n }\n\n let one: BigInt = One::one();\n\n\n\n // x^2 = (1 - y^2) / (a - d * y^2) (mod p)\n\n let den = utils::modinv(\n\n &utils::modulus(\n\n &(&A_big.clone() - utils::modulus(&(&D_big.clone() * (&y * &y)), &Q)),\n\n &Q,\n\n ),\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 6, "score": 116219.75112991959 }, { "content": "pub fn to_hex_string(bytes: Vec<u8>) -> String {\n\n let strs: Vec<String> = bytes.iter()\n\n .map(|b| format!(\"{:02X}\", b))\n\n .collect();\n\n strs.join(\"\")\n\n}\n\n\n\n#[no_mangle]\n\npub extern fn unpack_point(compressed_point: *const c_char) -> *mut c_char {\n\n let compressed_point_str = unsafe { CStr::from_ptr(compressed_point) }.to_str().unwrap();\n\n let y_bytes_raw = compressed_point_str.from_hex().unwrap();\n\n let mut y_bytes: [u8; 32] = [0; 32];\n\n y_bytes.copy_from_slice(&y_bytes_raw);\n\n let p = decompress_point(y_bytes).unwrap();\n\n let x_big = BigInt::parse_bytes(to_hex(&p.x).as_bytes(), 16).unwrap();\n\n let y_big = BigInt::parse_bytes(to_hex(&p.y).as_bytes(), 16).unwrap();\n\n let mut result_string: String = \"\".to_owned();\n\n result_string.push_str(&x_big.to_string());\n\n result_string.push_str(\",\");\n\n result_string.push_str(&y_big.to_string());\n", "file_path": "rust/src/lib.rs", "rank": 7, "score": 114060.05298965443 }, { "content": "pub fn schnorr_hash(pk: &Point, msg: BigInt, c: &Point) -> Result<BigInt, String> {\n\n if msg > Q.clone() {\n\n return Err(\"msg outside the Finite Field\".to_string());\n\n }\n\n let msg_fr: Fr = Fr::from_str(&msg.to_string()).unwrap();\n\n let hm_input = vec![pk.x.clone(), pk.y.clone(), c.x.clone(), c.y.clone(), msg_fr];\n\n let h = poseidon.hash(hm_input)?;\n\n let h_b = BigInt::parse_bytes(to_hex(&h).as_bytes(), 16).unwrap();\n\n Ok(h_b)\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 8, "score": 114051.15629314492 }, { "content": "pub fn new_key() -> PrivateKey {\n\n // https://tools.ietf.org/html/rfc8032#section-5.1.5\n\n let mut rng = rand6::thread_rng();\n\n let sk_raw = rng.gen_biguint(1024).to_bigint().unwrap();\n\n let (_, sk_raw_bytes) = sk_raw.to_bytes_be();\n\n PrivateKey::import(sk_raw_bytes[..32].to_vec()).unwrap()\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 10, "score": 102383.46588309195 }, { "content": "pub fn test_bit(b: &Vec<u8>, i: usize) -> bool {\n\n return b[i / 8] & (1 << (i % 8)) != 0;\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 13, "score": 96911.79586202512 }, { "content": "pub fn verify(pk: Point, sig: Signature, msg: BigInt) -> bool {\n\n if msg > Q.clone() {\n\n return false;\n\n }\n\n let msg_fr: Fr = Fr::from_str(&msg.to_string()).unwrap();\n\n let hm_input = vec![\n\n sig.r_b8.x.clone(),\n\n sig.r_b8.y.clone(),\n\n pk.x.clone(),\n\n pk.y.clone(),\n\n msg_fr,\n\n ];\n\n let hm = match poseidon.hash(hm_input) {\n\n Result::Err(_) => return false,\n\n Result::Ok(hm) => hm,\n\n };\n\n let l = B8.mul_scalar(&sig.s);\n\n let hm_b = BigInt::parse_bytes(to_hex(&hm).as_bytes(), 16).unwrap();\n\n let r = sig\n\n .r_b8\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 15, "score": 88814.95735606081 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let p: Point = Point {\n\n x: babyjubjub_rs::Fr::from_str(\n\n \"17777552123799933955779906779655732241715742912184938656739573121738514868268\",\n\n )\n\n .unwrap(),\n\n y: babyjubjub_rs::Fr::from_str(\n\n \"2626589144620713026669568689430873010625803728049924121243784502389097019475\",\n\n )\n\n .unwrap(),\n\n };\n\n let q = p.clone();\n\n\n\n let p_projective = p.projective();\n\n let q_projective = q.projective();\n\n\n\n c.bench_function(\"add\", |b| b.iter(|| p_projective.add(&q_projective)));\n\n let r: BigInt = BigInt::parse_bytes(b\"3\", 10).unwrap();\n\n c.bench_function(\"mul_scalar_small\", |b| b.iter(|| p.mul_scalar(&r)));\n\n let r: BigInt = BigInt::parse_bytes(\n", "file_path": "rust/benches/bench_babyjubjub.rs", "rank": 17, "score": 79842.03118440544 }, { "content": "fn vector_as_u8_64_array(vector: Vec<u8>) -> [u8; 64] {\n\n let mut arr = [0u8;64];\n\n for (place, element) in arr.iter_mut().zip(vector.iter()) {\n\n *place = *element;\n\n }\n\n arr\n\n}\n\n\n\n#[no_mangle]\n\npub extern fn pack_point(point_x: *const c_char, point_y: *const c_char) -> *mut c_char {\n\n let point_x_cstr = unsafe { CStr::from_ptr(point_x) };\n\n let point_x_str = match point_x_cstr.to_str() {\n\n Err(_) => \"there\",\n\n Ok(string) => string,\n\n };\n\n let point_y_cstr = unsafe { CStr::from_ptr(point_y) };\n\n let point_y_str = match point_y_cstr.to_str() {\n\n Err(_) => \"there\",\n\n Ok(string) => string,\n\n };\n\n let p: Point = Point {\n\n x: Fr::from_str(point_x_str).unwrap(),\n\n y: Fr::from_str(point_y_str).unwrap(),\n\n };\n\n\n\n let compressed_point = p.compress();\n\n let hex_string = to_hex_string(compressed_point.to_vec());\n\n CString::new(hex_string.as_str()).unwrap().into_raw()\n\n}\n\n\n", "file_path": "rust/src/lib.rs", "rank": 19, "score": 34868.945138495765 }, { "content": "//extern crate rand;\n\n//#[macro_use]\n\n//extern crate ff;\n\nuse ff::*;\n\n\n\nuse poseidon_rs::Poseidon;\n\npub type Fr = poseidon_rs::Fr; // alias\n\n\n\n//#[macro_use]\n\n//extern crate arrayref;\n\n//extern crate generic_array;\n\n//extern crate num;\n\n//extern crate num_bigint;\n\n//extern crate num_traits;\n\n\n\n//extern crate rand6;\n\nuse rand6::Rng;\n\n\n\n// use blake2::{Blake2b, Digest};\n\n//extern crate blake; // compatible version with Blake used at circomlib\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 20, "score": 26721.762679852374 }, { "content": " &Q,\n\n )?;\n\n let mut x: BigInt = utils::modulus(&((one - utils::modulus(&(&y * &y), &Q)) * den), &Q);\n\n x = utils::modsqrt(&x, &Q)?;\n\n\n\n if sign && !(&x > &(&Q.clone() >> 1)) || (!sign && (&x > &(&Q.clone() >> 1))) {\n\n x = x * -1.to_bigint().unwrap();\n\n }\n\n x = utils::modulus(&x, &Q);\n\n let x_fr: Fr = Fr::from_str(&x.to_string()).unwrap();\n\n let y_fr: Fr = Fr::from_str(&y.to_string()).unwrap();\n\n Ok(Point { x: x_fr, y: y_fr })\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Signature {\n\n pub r_b8: Point,\n\n pub s: BigInt,\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 21, "score": 26718.718706242733 }, { "content": "//#[macro_use]\n\n//use blake_hash::Digest;\n\nuse blake::Blake;\n\n\n\nuse std::cmp::min;\n\n\n\nuse num_bigint::{BigInt, RandBigInt, Sign, ToBigInt};\n\nuse num_traits::One;\n\n\n\nuse generic_array::GenericArray;\n\n\n\npub mod utils;\n\n\n\nlazy_static! {\n\n static ref D: Fr = Fr::from_str(\"168696\").unwrap();\n\n static ref D_big: BigInt = BigInt::parse_bytes(b\"168696\", 10).unwrap();\n\n static ref A: Fr = Fr::from_str(\"168700\").unwrap();\n\n static ref A_big: BigInt = BigInt::parse_bytes(b\"168700\", 10).unwrap();\n\n pub static ref Q: BigInt = BigInt::parse_bytes(\n\n b\"21888242871839275222246405745257275088548364400416034343698204186575808495617\",10\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 22, "score": 26717.627445463055 }, { "content": " .projective()\n\n .add(&pk.mul_scalar(&(8.to_bigint().unwrap() * hm_b)).projective());\n\n l.equals(r.affine())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n extern crate rustc_hex;\n\n use rustc_hex::{FromHex, ToHex};\n\n\n\n #[test]\n\n fn test_add_same_point() {\n\n let p: PointProjective = PointProjective {\n\n x: Fr::from_str(\n\n \"17777552123799933955779906779655732241715742912184938656739573121738514868268\",\n\n )\n\n .unwrap(),\n\n y: Fr::from_str(\n\n \"2626589144620713026669568689430873010625803728049924121243784502389097019475\",\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 23, "score": 26715.23824193941 }, { "content": "#[derive(Clone, Debug)]\n\npub struct Point {\n\n pub x: Fr,\n\n pub y: Fr,\n\n}\n\n\n\nimpl Point {\n\n pub fn projective(&self) -> PointProjective {\n\n PointProjective {\n\n x: self.x.clone(),\n\n y: self.y.clone(),\n\n z: Fr::one(),\n\n }\n\n }\n\n\n\n pub fn mul_scalar(&self, n: &BigInt) -> Point {\n\n let mut r: PointProjective = PointProjective {\n\n x: Fr::zero(),\n\n y: Fr::one(),\n\n z: Fr::one(),\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 24, "score": 26715.013216628224 }, { "content": " h[31] = h[31] & 0x7F;\n\n h[31] = h[31] | 0x40;\n\n\n\n let sk = BigInt::from_bytes_le(Sign::Plus, &h[..]);\n\n sk >> 3\n\n }\n\n\n\n pub fn public(&self) -> Point {\n\n // https://tools.ietf.org/html/rfc8032#section-5.1.5\n\n let pk = B8.mul_scalar(&self.scalar_key());\n\n pk.clone()\n\n }\n\n\n\n pub fn sign(&self, msg: BigInt) -> Result<Signature, String> {\n\n if msg > Q.clone() {\n\n return Err(\"msg outside the Finite Field\".to_string());\n\n }\n\n // let (_, sk_bytes) = self.key.to_bytes_le();\n\n // let mut hasher = Blake2b::new();\n\n // hasher.update(sk_bytes);\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 25, "score": 26712.38495977256 }, { "content": " return Err(String::from(\"imported key can not be bigger than 32 bytes\"));\n\n }\n\n let mut sk: [u8; 32] = [0; 32];\n\n sk.copy_from_slice(&b[..32]);\n\n Ok(PrivateKey { key: sk })\n\n }\n\n\n\n pub fn scalar_key(&self) -> BigInt {\n\n // not-compatible with circomlib implementation, but using Blake2b\n\n // let mut hasher = Blake2b::new();\n\n // hasher.update(sk_raw_bytes);\n\n // let mut h = hasher.finalize();\n\n\n\n // compatible with circomlib implementation\n\n //let hash = blake_hash::Blake512::digest(&self.key.to_vec());\n\n let mut hash = [0; 64];\n\n blake::hash(512,&self.key,&mut hash).unwrap();\n\n let mut h: Vec<u8> = hash[..32].to_vec();\n\n\n\n h[0] = h[0] & 0xF8;\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 26, "score": 26712.255939604664 }, { "content": "\n\n let hm_input = vec![r8.x.clone(), r8.y.clone(), a.x.clone(), a.y.clone(), msg_fr];\n\n let hm = poseidon.hash(hm_input)?;\n\n\n\n let mut s = &self.scalar_key() << 3;\n\n let hm_b = BigInt::parse_bytes(to_hex(&hm).as_bytes(), 16).unwrap();\n\n s = hm_b * s;\n\n s = r + s;\n\n s = s % &SUBORDER.clone();\n\n\n\n Ok(Signature {\n\n r_b8: r8.clone(),\n\n s: s,\n\n })\n\n }\n\n\n\n pub fn sign_schnorr(&self, m: BigInt) -> Result<(Point, BigInt), String> {\n\n // random r\n\n let mut rng = rand6::thread_rng();\n\n let k = rng.gen_biguint(1024).to_bigint().unwrap();\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 27, "score": 26711.6182046414 }, { "content": " 10,\n\n )\n\n .unwrap()\n\n >> 3;\n\n static ref poseidon: poseidon_rs::Poseidon = Poseidon::new();\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct PointProjective {\n\n pub x: Fr,\n\n pub y: Fr,\n\n pub z: Fr,\n\n}\n\n\n\nimpl PointProjective {\n\n pub fn affine(&self) -> Point {\n\n if self.z.is_zero() {\n\n return Point {\n\n x: Fr::zero(),\n\n y: Fr::zero(),\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 28, "score": 26710.999456142323 }, { "content": " // let mut h = hasher.finalize(); // h: hash(sk), s: h[32:64]\n\n let mut h = [0; 64];\n\n blake::hash(512,&self.key,&mut h).unwrap();\n\n //let mut h = blake_hash::Blake512::digest(&self.key);\n\n\n\n let (_, msg_bytes) = msg.to_bytes_le();\n\n let mut msg32: [u8; 32] = [0; 32];\n\n msg32[..msg_bytes.len()].copy_from_slice(&msg_bytes[..]);\n\n let msg_fr: Fr = Fr::from_str(&msg.to_string()).unwrap();\n\n\n\n // https://tools.ietf.org/html/rfc8032#section-5.1.6\n\n let s = GenericArray::<u8, generic_array::typenum::U32>::from_mut_slice(&mut h[32..64]);\n\n let r_bytes = utils::concatenate_arrays(s, &msg32);\n\n let mut r_hashed = [0; 64];\n\n blake::hash(512,&r_bytes,&mut r_hashed).unwrap();\n\n //let r_hashed = blake_hash::Blake512::digest(&r_bytes);\n\n let mut r = BigInt::from_bytes_le(Sign::Plus, &r_hashed[..]);\n\n r = utils::modulus(&r, &SUBORDER);\n\n let r8: Point = B8.mul_scalar(&r);\n\n let a = &self.public();\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 29, "score": 26710.131699551966 }, { "content": " if &x_big > &(&Q.clone() >> 1) {\n\n r[31] = r[31] | 0x80;\n\n }\n\n r\n\n }\n\n\n\n pub fn equals(&self, p: Point) -> bool {\n\n if self.x == p.x && self.y == p.y {\n\n return true;\n\n }\n\n false\n\n }\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 30, "score": 26709.85634921739 }, { "content": "impl Signature {\n\n pub fn compress(&self) -> [u8; 64] {\n\n let mut b: Vec<u8> = Vec::new();\n\n b.append(&mut self.r_b8.compress().to_vec());\n\n let (_, s_bytes) = self.s.to_bytes_le();\n\n let mut s_32bytes: [u8; 32] = [0; 32];\n\n let len = min(s_bytes.len(), s_32bytes.len());\n\n s_32bytes[..len].copy_from_slice(&s_bytes[..len]);\n\n b.append(&mut s_32bytes.to_vec());\n\n let mut r: [u8; 64] = [0; 64];\n\n r[..].copy_from_slice(&b[..]);\n\n r\n\n }\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 31, "score": 26709.45056172218 }, { "content": " };\n\n }\n\n\n\n let zinv = self.z.inverse().unwrap();\n\n let mut x = self.x.clone();\n\n x.mul_assign(&zinv);\n\n let mut y = self.y.clone();\n\n y.mul_assign(&zinv);\n\n\n\n Point {\n\n x: x.clone(),\n\n y: y.clone(),\n\n }\n\n }\n\n pub fn add(&self, q: &PointProjective) -> PointProjective {\n\n // add-2008-bbjlp https://hyperelliptic.org/EFD/g1p/auto-twisted-projective.html#doubling-dbl-2008-bbjlp\n\n let mut a = self.z.clone();\n\n a.mul_assign(&q.z);\n\n let mut b = a;\n\n b.square();\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 32, "score": 26709.25528051731 }, { "content": " };\n\n let mut exp: PointProjective = self.projective();\n\n let (_, b) = n.to_bytes_le();\n\n for i in 0..n.bits() {\n\n if test_bit(&b, i) {\n\n r = r.add(&exp);\n\n }\n\n exp = exp.add(&exp);\n\n }\n\n r.affine()\n\n }\n\n\n\n pub fn compress(&self) -> [u8; 32] {\n\n let p = &self;\n\n let mut r: [u8; 32] = [0; 32];\n\n let x_big = BigInt::parse_bytes(to_hex(&p.x).as_bytes(), 16).unwrap();\n\n let y_big = BigInt::parse_bytes(to_hex(&p.y).as_bytes(), 16).unwrap();\n\n let (_, y_bytes) = y_big.to_bytes_le();\n\n let len = min(y_bytes.len(), r.len());\n\n r[..len].copy_from_slice(&y_bytes[..len]);\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 33, "score": 26708.273149566667 }, { "content": " let expected_px: Fr =\n\n Fr::from_str(&BigInt::from_bytes_le(Sign::Plus, &e_px_bytes).to_string()).unwrap();\n\n assert_eq!(&p.x, &expected_px);\n\n }\n\n\n\n #[test]\n\n fn test_point_decompress_loop() {\n\n for _ in 0..5 {\n\n let random_bytes = rand6::thread_rng().gen::<[u8; 32]>();\n\n let sk_raw: BigInt = BigInt::from_bytes_le(Sign::Plus, &random_bytes[..]);\n\n let (_, sk_raw_bytes) = sk_raw.to_bytes_be();\n\n let mut h = [0; 64];\n\n blake::hash(512,&sk_raw_bytes,&mut h).unwrap();\n\n //let mut h = blake_hash::Blake512::digest(&sk_raw_bytes);\n\n\n\n h[0] = h[0] & 0xF8;\n\n h[31] = h[31] & 0x7F;\n\n h[31] = h[31] | 0x40;\n\n\n\n let sk = BigInt::from_bytes_le(Sign::Plus, &h[..]);\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 34, "score": 26708.047454234926 }, { "content": " e_px_bytes.copy_from_slice(&expected_px_raw);\n\n let expected_px: Fr =\n\n Fr::from_str(&BigInt::from_bytes_le(Sign::Plus, &e_px_bytes).to_string()).unwrap();\n\n assert_eq!(&p.x, &expected_px);\n\n }\n\n\n\n #[test]\n\n fn test_point_decompress1() {\n\n let y_bytes_raw = \"70552d3ff548e09266ded29b33ce75139672b062b02aa66bb0d9247ffecf1d0b\"\n\n .from_hex()\n\n .unwrap();\n\n let mut y_bytes: [u8; 32] = [0; 32];\n\n y_bytes.copy_from_slice(&y_bytes_raw);\n\n let p = decompress_point(y_bytes).unwrap();\n\n\n\n let expected_px_raw = \"30f1635ba7d56f9cb32c3ffbe6dca508a68c7f43936af11a23c785ce98cb3404\"\n\n .from_hex()\n\n .unwrap();\n\n let mut e_px_bytes: [u8; 32] = [0; 32];\n\n e_px_bytes.copy_from_slice(&expected_px_raw);\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 35, "score": 26707.912706433995 }, { "content": " let mut c = self.x.clone();\n\n c.mul_assign(&q.x);\n\n let mut d = self.y.clone();\n\n d.mul_assign(&q.y);\n\n let mut e = D.clone();\n\n e.mul_assign(&c);\n\n e.mul_assign(&d);\n\n let mut f = b;\n\n f.sub_assign(&e);\n\n let mut g = b;\n\n g.add_assign(&e);\n\n let mut x1y1 = self.x.clone();\n\n x1y1.add_assign(&self.y);\n\n let mut x2y2 = q.clone().x;\n\n x2y2.add_assign(&q.y);\n\n let mut aux = x1y1;\n\n aux.mul_assign(&x2y2);\n\n aux.sub_assign(&c);\n\n aux.sub_assign(&d);\n\n let mut x3 = a;\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 36, "score": 26707.701081614265 }, { "content": " sig.r_b8.x.to_string(),\n\n \"Fr(0x192b4e51adf302c8139d356d0e08e2404b5ace440ef41fc78f5c4f2428df0765)\"\n\n );\n\n assert_eq!(\n\n sig.r_b8.y.to_string(),\n\n \"Fr(0x2202bebcf57b820863e0acc88970b6ca7d987a0d513c2ddeb42e3f5d31b4eddf)\"\n\n );\n\n assert_eq!(\n\n sig.s.to_string(),\n\n \"1672775540645840396591609181675628451599263765380031905495115170613215233181\"\n\n );\n\n let v = verify(pk, sig, msg);\n\n assert_eq!(v, true);\n\n }\n\n}\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 37, "score": 26707.378627076305 }, { "content": " x3.mul_assign(&f);\n\n x3.mul_assign(&aux);\n\n let mut ac = A.clone();\n\n ac.mul_assign(&c);\n\n let mut dac = d;\n\n dac.sub_assign(&ac);\n\n let mut y3 = a;\n\n y3.mul_assign(&g);\n\n y3.mul_assign(&dac);\n\n let mut z3 = f;\n\n z3.mul_assign(&g);\n\n\n\n PointProjective {\n\n x: x3.clone(),\n\n y: y3.clone(),\n\n z: z3.clone(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 38, "score": 26707.267134215814 }, { "content": " sk.scalar_key().to_string(),\n\n \"6466070937662820620902051049739362987537906109895538826186780010858059362905\"\n\n );\n\n\n\n // test public key\n\n let pk = sk.public();\n\n assert_eq!(\n\n pk.x.to_string(),\n\n \"Fr(0x1d5ac1f31407018b7d413a4f52c8f74463b30e6ac2238220ad8b254de4eaa3a2)\"\n\n );\n\n assert_eq!(\n\n pk.y.to_string(),\n\n \"Fr(0x1e1de8a908826c3f9ac2e0ceee929ecd0caf3b99b3ef24523aaab796a6f733c4)\"\n\n );\n\n\n\n // test signature & verification\n\n let msg = BigInt::from_bytes_le(Sign::Plus, &hex::decode(\"00010203040506070809\").unwrap());\n\n println!(\"msg {:?}\", msg.to_string());\n\n let sig = sk.sign(msg.clone()).unwrap();\n\n assert_eq!(\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 39, "score": 26707.24018780104 }, { "content": " )\n\n .unwrap(),\n\n z: Fr::one(),\n\n };\n\n let q: PointProjective = PointProjective {\n\n x: Fr::from_str(\n\n \"16540640123574156134436876038791482806971768689494387082833631921987005038935\",\n\n )\n\n .unwrap(),\n\n y: Fr::from_str(\n\n \"20819045374670962167435360035096875258406992893633759881276124905556507972311\",\n\n )\n\n .unwrap(),\n\n z: Fr::one(),\n\n };\n\n let res = p.add(&q).affine();\n\n assert_eq!(\n\n res.x,\n\n Fr::from_str(\n\n \"7916061937171219682591368294088513039687205273691143098332585753343424131937\"\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 40, "score": 26707.12249192744 }, { "content": " )\n\n .unwrap(),\n\n z: Fr::one(),\n\n };\n\n let q: PointProjective = PointProjective {\n\n x: Fr::from_str(\n\n \"17777552123799933955779906779655732241715742912184938656739573121738514868268\",\n\n )\n\n .unwrap(),\n\n y: Fr::from_str(\n\n \"2626589144620713026669568689430873010625803728049924121243784502389097019475\",\n\n )\n\n .unwrap(),\n\n z: Fr::one(),\n\n };\n\n let res = p.add(&q).affine();\n\n assert_eq!(\n\n res.x,\n\n Fr::from_str(\n\n \"6890855772600357754907169075114257697580319025794532037257385534741338397365\"\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 41, "score": 26707.12249192744 }, { "content": " \"53b81ed5bffe9545b54016234682e7b2f699bd42a5e9eae27ff4051bc698ce85\"\n\n );\n\n let p2 = decompress_point(p_comp).unwrap();\n\n assert_eq!(p.x, p2.x);\n\n assert_eq!(p.y, p2.y);\n\n }\n\n\n\n #[test]\n\n fn test_point_decompress0() {\n\n let y_bytes_raw = \"b5328f8791d48f20bec6e481d91c7ada235f1facf22547901c18656b6c3e042f\"\n\n .from_hex()\n\n .unwrap();\n\n let mut y_bytes: [u8; 32] = [0; 32];\n\n y_bytes.copy_from_slice(&y_bytes_raw);\n\n let p = decompress_point(y_bytes).unwrap();\n\n\n\n let expected_px_raw = \"b86cc8d9c97daef0afe1a4753c54fb2d8a530dc74c7eee4e72b3fdf2496d2113\"\n\n .from_hex()\n\n .unwrap();\n\n let mut e_px_bytes: [u8; 32] = [0; 32];\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 42, "score": 26706.60319796539 }, { "content": "\n\n #[test]\n\n fn test_circomlib_testvector() {\n\n let sk_raw_bytes =\n\n hex::decode(\"0001020304050607080900010203040506070809000102030405060708090001\")\n\n .unwrap();\n\n\n\n // test blake compatible with circomlib implementation\n\n let mut h = [0; 64];\n\n blake::hash(512,&sk_raw_bytes,&mut h).unwrap();\n\n //let h = blake_hash::Blake512::digest(&sk_raw_bytes);\n\n assert_eq!(h.to_hex(), \"c992db23d6290c70ffcc02f7abeb00b9d00fa8b43e55d7949c28ba6be7545d3253882a61bd004a236ef1cdba01b27ba0aedfb08eefdbfb7c19657c880b43ddf1\");\n\n\n\n // test private key\n\n let sk = PrivateKey::import(\n\n hex::decode(\"0001020304050607080900010203040506070809000102030405060708090001\")\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n assert_eq!(\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 43, "score": 26706.549945166855 }, { "content": " \"2626589144620713026669568689430873010625803728049924121243784502389097019475\",\n\n )\n\n .unwrap(),\n\n };\n\n let res_m = p.mul_scalar(&3.to_bigint().unwrap());\n\n let res_a = p.projective().add(&p.projective());\n\n let res_a = res_a.add(&p.projective()).affine();\n\n assert_eq!(res_m.x, res_a.x);\n\n assert_eq!(\n\n res_m.x,\n\n Fr::from_str(\n\n \"19372461775513343691590086534037741906533799473648040012278229434133483800898\"\n\n )\n\n .unwrap()\n\n );\n\n assert_eq!(\n\n res_m.y,\n\n Fr::from_str(\n\n \"9458658722007214007257525444427903161243386465067105737478306991484593958249\"\n\n )\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 44, "score": 26706.18328091801 }, { "content": " )\n\n .unwrap();\n\n pub static ref B8: Point = Point {\n\n x: Fr::from_str(\n\n \"5299619240641551281634865583518297030282874472190772894086521144482721001553\",\n\n )\n\n .unwrap(),\n\n y: Fr::from_str(\n\n \"16950150798460657717958625567821834550301663161624707787222815936182638968203\",\n\n )\n\n .unwrap(),\n\n };\n\n static ref ORDER: Fr = Fr::from_str(\n\n \"21888242871839275222246405745257275088614511777268538073601725287587578984328\",\n\n )\n\n .unwrap();\n\n\n\n // SUBORDER = ORDER >> 3\n\n static ref SUBORDER: BigInt = &BigInt::parse_bytes(\n\n b\"21888242871839275222246405745257275088614511777268538073601725287587578984328\",\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 45, "score": 26706.140161397165 }, { "content": " )\n\n .unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_new_key_sign_verify_0() {\n\n let sk = new_key();\n\n let pk = sk.public();\n\n let msg = 5.to_bigint().unwrap();\n\n let sig = sk.sign(msg.clone()).unwrap();\n\n let v = verify(pk, sig, msg);\n\n assert_eq!(v, true);\n\n }\n\n\n\n #[test]\n\n fn test_new_key_sign_verify_1() {\n\n let sk = new_key();\n\n let pk = sk.public();\n\n let msg = BigInt::parse_bytes(b\"123456789012345678901234567890\", 10).unwrap();\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 46, "score": 26705.85717442024 }, { "content": " let point = B8.mul_scalar(&sk);\n\n let cmp_point = point.compress();\n\n let dcmp_point = decompress_point(cmp_point).unwrap();\n\n\n\n assert_eq!(&point.x, &dcmp_point.x);\n\n assert_eq!(&point.y, &dcmp_point.y);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_signature_compress_decompress() {\n\n let sk = new_key();\n\n let pk = sk.public();\n\n\n\n for i in 0..5 {\n\n let msg_raw = \"123456\".to_owned() + &i.to_string();\n\n let msg = BigInt::parse_bytes(msg_raw.as_bytes(), 10).unwrap();\n\n let sig = sk.sign(msg.clone()).unwrap();\n\n\n\n let compressed_sig = sig.compress();\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 47, "score": 26705.60835222439 }, { "content": " let decompressed_sig = decompress_signature(&compressed_sig).unwrap();\n\n assert_eq!(&sig.r_b8.x, &decompressed_sig.r_b8.x);\n\n assert_eq!(&sig.r_b8.y, &decompressed_sig.r_b8.y);\n\n assert_eq!(&sig.s, &decompressed_sig.s);\n\n\n\n let v = verify(pk.clone(), decompressed_sig, msg);\n\n assert_eq!(v, true);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_schnorr_signature() {\n\n let sk = new_key();\n\n let pk = sk.public();\n\n\n\n let msg = BigInt::parse_bytes(b\"123456789012345678901234567890\", 10).unwrap();\n\n let (s, e) = sk.sign_schnorr(msg.clone()).unwrap();\n\n let verification = verify_schnorr(pk, msg, s, e).unwrap();\n\n assert_eq!(true, verification);\n\n }\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 48, "score": 26703.89650942749 }, { "content": " let sig = sk.sign(msg.clone()).unwrap();\n\n let v = verify(pk, sig, msg);\n\n assert_eq!(v, true);\n\n }\n\n\n\n #[test]\n\n fn test_point_compress_decompress() {\n\n let p: Point = Point {\n\n x: Fr::from_str(\n\n \"17777552123799933955779906779655732241715742912184938656739573121738514868268\",\n\n )\n\n .unwrap(),\n\n y: Fr::from_str(\n\n \"2626589144620713026669568689430873010625803728049924121243784502389097019475\",\n\n )\n\n .unwrap(),\n\n };\n\n let p_comp = p.compress();\n\n assert_eq!(\n\n p_comp[..].to_hex(),\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 49, "score": 26703.89650942749 }, { "content": " )\n\n .unwrap()\n\n );\n\n assert_eq!(\n\n res.y,\n\n Fr::from_str(\n\n \"4338620300185947561074059802482547481416142213883829469920100239455078257889\"\n\n )\n\n .unwrap()\n\n );\n\n }\n\n #[test]\n\n fn test_add_different_points() {\n\n let p: PointProjective = PointProjective {\n\n x: Fr::from_str(\n\n \"17777552123799933955779906779655732241715742912184938656739573121738514868268\",\n\n )\n\n .unwrap(),\n\n y: Fr::from_str(\n\n \"2626589144620713026669568689430873010625803728049924121243784502389097019475\",\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 50, "score": 26703.89650942749 }, { "content": "\n\n // r = k·G\n\n let r = B8.mul_scalar(&k);\n\n\n\n // h = H(x, r, m)\n\n let pk = &self.public();\n\n let h = schnorr_hash(&pk, m, &r)?;\n\n\n\n // s= k+x·h\n\n let sk_scalar = self.scalar_key();\n\n let s = k + &sk_scalar * &h;\n\n Ok((r, s))\n\n }\n\n}\n\n\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 51, "score": 26703.89650942749 }, { "content": " )\n\n .unwrap()\n\n );\n\n assert_eq!(\n\n res.y,\n\n Fr::from_str(\n\n \"14035240266687799601661095864649209771790948434046947201833777492504781204499\"\n\n )\n\n .unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_mul_scalar() {\n\n let p: Point = Point {\n\n x: Fr::from_str(\n\n \"17777552123799933955779906779655732241715742912184938656739573121738514868268\",\n\n )\n\n .unwrap(),\n\n y: Fr::from_str(\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 52, "score": 26703.89650942749 }, { "content": " .unwrap()\n\n );\n\n\n\n let n = BigInt::parse_bytes(\n\n b\"14035240266687799601661095864649209771790948434046947201833777492504781204499\",\n\n 10,\n\n )\n\n .unwrap();\n\n let res2 = p.mul_scalar(&n);\n\n assert_eq!(\n\n res2.x,\n\n Fr::from_str(\n\n \"17070357974431721403481313912716834497662307308519659060910483826664480189605\"\n\n )\n\n .unwrap()\n\n );\n\n assert_eq!(\n\n res2.y,\n\n Fr::from_str(\n\n \"4014745322800118607127020275658861516666525056516280575712425373174125159339\"\n", "file_path": "rust/src/eddsa/mod.rs", "rank": 53, "score": 26703.89650942749 }, { "content": "// WARNING still updating the code, it works, but is still in process the refactor.\n\n\n\npub mod eddsa;\n\n\n\nuse poseidon_rs::Poseidon;\n\npub type Fr = poseidon_rs::Fr;\n\n\n\n#[macro_use]\n\nextern crate ff;\n\n\n\n#[macro_use]\n\nextern crate arrayref;\n\nextern crate generic_array;\n\n//extern crate mimc_rs;\n\nextern crate num;\n\nextern crate num_bigint;\n\nextern crate num_traits;\n\nextern crate rand6;\n\nextern crate rand;\n\nextern crate blake; // compatible version with Blake used at circomlib\n", "file_path": "rust/src/lib.rs", "rank": 55, "score": 19.698877312953947 }, { "content": "use criterion::{criterion_group, criterion_main, Criterion};\n\n\n\nextern crate rand;\n\n#[macro_use]\n\nextern crate ff;\n\nuse ff::*;\n\n\n\nextern crate num;\n\nextern crate num_bigint;\n\nuse num_bigint::{BigInt, ToBigInt};\n\n\n\nuse babyjubjub_rs::{utils, Point};\n\nuse babyjubjub::eddsa::Point;\n\n\n", "file_path": "rust/benches/bench_babyjubjub.rs", "rank": 56, "score": 16.850098250975726 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n\n\nuse ff::*;\n\nuse std::str;\n\n\n\nuse crate::eddsa::{Signature, decompress_point, Point, PrivateKey, verify, decompress_signature, /*compress_point,*/ PointProjective, Q, B8, new_key};\n\nuse num_bigint::{Sign, BigInt, ToBigInt};\n\nuse std::os::raw::{c_char};\n\nuse std::ffi::{CStr, CString};\n\nuse std::cmp::min;\n\nuse std::str::FromStr;\n\nuse num_traits::{Num, ToPrimitive};\n\nuse rustc_hex::{FromHex, ToHex};\n\nuse num::Zero;\n\n\n\n/*lazy_static! {\n\n static ref B8: Point = Point {\n\n x: Fr::from_str(\n\n \"5299619240641551281634865583518297030282874472190772894086521144482721001553\",\n", "file_path": "rust/src/lib.rs", "rank": 57, "score": 16.4432337194356 }, { "content": "\n\n if verify(pk.public(), sig.clone(), message_bigint.clone()) {\n\n CString::new(\"1\".to_owned()).unwrap().into_raw()\n\n } else {\n\n CString::new(\"0\".to_owned()).unwrap().into_raw()\n\n }\n\n}\n\n\n\n#[no_mangle]\n\npub extern fn cstring_free(str: *mut c_char) {\n\n unsafe {\n\n if str.is_null() { return }\n\n CString::from_raw(str)\n\n };\n\n}", "file_path": "rust/src/lib.rs", "rank": 60, "score": 12.020223788309524 }, { "content": "}\n\n\n\n#[no_mangle]\n\npub extern fn sign_poseidon(private_key: *const c_char, msg: *const c_char) -> *mut c_char {\n\n let private_key_str = unsafe { CStr::from_ptr(private_key) }.to_str().unwrap();\n\n //let pk_bigint = BigInt::from_str(private_key_str).unwrap();\n\n let pk_bytes_raw = private_key_str.from_hex().unwrap();\n\n let mut pk_bytes: [u8; 32] = [0; 32];\n\n pk_bytes.copy_from_slice(&pk_bytes_raw);\n\n let pk = PrivateKey { key: pk_bytes };\n\n let message_str = unsafe { CStr::from_ptr(msg) }.to_str().unwrap();\n\n let message_bigint = BigInt::from_str(message_str).unwrap();\n\n let sig = pk.sign(message_bigint.clone()).unwrap();\n\n let compressed_signature = sig.compress();\n\n let hex_string = compressed_signature.to_hex();\n\n CString::new(hex_string.as_str()).unwrap().into_raw()\n\n}\n\n\n\n#[no_mangle]\n\npub extern fn verify_poseidon(private_key: *const c_char, compressed_signature: *const c_char, message: *const c_char) -> *mut c_char {\n", "file_path": "rust/src/lib.rs", "rank": 61, "score": 10.961448016153879 }, { "content": " )\n\n .unwrap(),\n\n y: Fr::from_str(\n\n \"16950150798460657717958625567821834550301663161624707787222815936182638968203\",\n\n )\n\n .unwrap(),\n\n // z: Fr::one(),\n\n };\n\n}*/\n\n\n\n#[no_mangle]\n\npub extern fn pack_signature(signature: *const c_char) -> *mut c_char {\n\n let signature_cstr = unsafe { CStr::from_ptr(signature) };\n\n let signature_str = match signature_cstr.to_str() {\n\n Err(_) => \"there\",\n\n Ok(string) => string,\n\n };\n\n let signature_bytes_raw = signature_str.from_hex().unwrap();\n\n let mut signature_bytes: [u8; 64] = [0; 64];\n\n signature_bytes.copy_from_slice(&signature_bytes_raw);\n", "file_path": "rust/src/lib.rs", "rank": 62, "score": 10.854639292250011 }, { "content": " CString::new(result_string.as_str()).unwrap().into_raw()\n\n}\n\n\n\n#[no_mangle]\n\npub extern fn prv2pub(private_key: *const c_char) -> *mut c_char {\n\n /*let private_key_bytes: [u8; 32] = *array_ref!(private_key[..32], 0, 32);\n\n let private_key = PrivateKey::import(private_key_bytes.to_vec()).unwrap();*/\n\n let private_key_str = unsafe { CStr::from_ptr(private_key) }.to_str().unwrap();\n\n //let pk_bigint = BigInt::from_str(private_key_str).unwrap();\n\n let pk_bytes_raw = private_key_str.from_hex().unwrap();\n\n let mut pk_bytes: [u8; 32] = [0; 32];\n\n pk_bytes.copy_from_slice(&pk_bytes_raw);\n\n let pk = PrivateKey { key: pk_bytes };\n\n let public_key = pk.public();\n\n let mut result_string: String = \"\".to_owned();\n\n result_string.push_str(&public_key.x.to_string());\n\n result_string.push_str(\",\");\n\n result_string.push_str(&public_key.y.to_string());\n\n CString::new(result_string.as_str()).unwrap().into_raw()\n\n}\n", "file_path": "rust/src/lib.rs", "rank": 63, "score": 9.802141515999786 }, { "content": "\n\n let sig = Signature { r_b8 : r_b8.clone(), s };\n\n let res = sig.compress();\n\n\n\n let hex_string = to_hex_string(res.to_vec());\n\n CString::new(hex_string.as_str()).unwrap().into_raw()\n\n}\n\n\n\n#[no_mangle]\n\npub extern fn unpack_signature(compressed_signature: *const c_char) -> *mut c_char {\n\n let compressed_signature_cstr = unsafe { CStr::from_ptr(compressed_signature) };\n\n let compressed_signature_str = match compressed_signature_cstr.to_str() {\n\n Err(_) => \"there\",\n\n Ok(string) => string,\n\n };\n\n let compressed_signature_bytes_raw = compressed_signature_str.from_hex().unwrap();\n\n let mut compressed_signature_bytes: [u8; 64] = [0; 64];\n\n compressed_signature_bytes.copy_from_slice(&compressed_signature_bytes_raw);\n\n let decompressed_sig = decompress_signature(&compressed_signature_bytes).unwrap();\n\n\n", "file_path": "rust/src/lib.rs", "rank": 65, "score": 8.260580907379968 }, { "content": " let private_key_str = unsafe { CStr::from_ptr(private_key) }.to_str().unwrap();\n\n // let pk_bigint = BigInt::from_str(private_key_str).unwrap();\n\n let pk_bytes_raw = private_key_str.from_hex().unwrap();\n\n let mut pk_bytes: [u8; 32] = [0; 32];\n\n pk_bytes.copy_from_slice(&pk_bytes_raw);\n\n let pk = PrivateKey { key: pk_bytes };\n\n let compressed_signature_str = unsafe { CStr::from_ptr(compressed_signature) }.to_str().unwrap();\n\n let signature_bytes_raw = compressed_signature_str.from_hex().unwrap();\n\n let mut signature_bytes: [u8; 64] = [0; 64];\n\n signature_bytes.copy_from_slice(&signature_bytes_raw);\n\n let sig = decompress_signature(&signature_bytes).unwrap();\n\n let message_c_str = unsafe { CStr::from_ptr(message) };\n\n let message_str = match message_c_str.to_str() {\n\n Err(_) => \"there\",\n\n Ok(string) => string,\n\n };\n\n let message_bigint = match message_str.parse::<i32>() {\n\n Ok(n) => BigInt::from(n),\n\n Err(e) => BigInt::zero(),\n\n };\n", "file_path": "rust/src/lib.rs", "rank": 66, "score": 6.9742681543511456 }, { "content": " s_32bytes[..lens].copy_from_slice(&s_bytes[..lens]);\n\n b.append(&mut s_32bytes.to_vec());\n\n\n\n let mut r: [u8; 64] = [0; 64];\n\n let res_len = min(r.len(), b.len());\n\n r[..res_len].copy_from_slice(&b[..res_len]);\n\n\n\n let hex_string = to_hex_string(r.to_vec());\n\n CString::new(hex_string.as_str()).unwrap().into_raw()\n\n}\n\n\n", "file_path": "rust/src/lib.rs", "rank": 67, "score": 6.311359747077326 }, { "content": "```\n\n\n\nWe can see that the field accountIndex is formed by the token symbol it holds and an index. A Hermez account can only hold one type of token. Account indexes start at 256. Indexes 0-255 are reserved for internal use. Note that the balances do not match with the ammount deposited of 1 ETH because accounts already existed in Hermez Network before the deposit, so we performed a deposit on top instead.\n\n\n\nAlternatively, an account query can be filtered using the assigned accountIndex\n\n\n\n```dart\n\n final account1ByIdx = coordinatorApi.getAccount(infoAccountSender.accountIndex);\n\n\n\n final account2ByIdx = coordinatorApi.getAccount(infoAccountReceiver.accountIndex);\n\n\n\n```\n\n\n\n```json\n\n[\n\n {\n\n \"accountIndex\": \"hez:ETH:4253\",\n\n \"balance\": \"1099600000000000000\",\n\n \"bjj\": \"hez:dMfPJlK_UtFqVByhP3FpvykOg5kAU3jMLD7OTx_4gwzO\",\n\n \"hezEthereumAddress\": \"hez:0x74d5531A3400f9b9d63729bA9C0E5172Ab0FD0f6\",\n\n \"itemId\": 4342,\n\n \"nonce\": 1,\n\n \"token\": {\n\n \"USD\": 1789,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n }\n\n },\n\n {\n\n \"accountIndex\": \"hez:ETH:4254\",\n\n \"balance\": \"1097100000000000000\",\n\n \"bjj\": \"hez:HESLP_6Kp_nn5ANmSGiOnhhYvF3wF5Davf7xGi6lwh3U\",\n\n \"hezEthereumAddress\": \"hez:0x12FfCe7D5d6d09564768d0FFC0774218458162d4\",\n\n \"itemId\": 4343,\n\n \"nonce\": 6,\n\n \"token\": {\n\n \"USD\": 1789,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n }\n\n }\n\n]\n\n```\n\n\n", "file_path": "README.md", "rank": 69, "score": 3.6723924601833837 }, { "content": " //let x_string = to_hex_string(r_b8_bytes[..16].to_vec());\n\n //let x_str = x_string.as_str();\n\n //let y_string = to_hex_string(r_b8_bytes[16..].to_vec());\n\n\n\n //let r_b8 = decompress_point(r_b8_bytes).unwrap();\n\n //let y_str = y_string.as_str();\n\n //let x_big = BigInt::parse_bytes(&r_b8_bytes[0..15], 16).unwrap();\n\n //let y_big = BigInt::parse_bytes(&r_b8_bytes[15..32], 16).unwrap();\n\n let x_big: BigInt = BigInt::from_bytes_le(Sign::Plus, &r_b8_bytes[0..15]);\n\n let y_big: BigInt = BigInt::from_bytes_le(Sign::Plus, &r_b8_bytes[15..32]);\n\n //let y_big = x_big.clone();\n\n\n\n let r_b8: Point = Point {\n\n x: Fr::from_str(\n\n &x_big.to_string(),\n\n ).unwrap(),\n\n y: Fr::from_str(\n\n &y_big.to_string(),\n\n ).unwrap(),\n\n };\n", "file_path": "rust/src/lib.rs", "rank": 70, "score": 3.530941508163268 }, { "content": " pack_signature(\"16727755406458403965916091816756284515992637653800319054951151706132152331811672775540645840396591609181675628451599263765380031\");\n\n unpack_signature(\"16727755406458403965916091816756284515992637653800319054951151706132152331811672775540645840396591609181675628451599263765380031\");\n\n pack_point(\"17777552123799933955779906779655732241715742912184938656739573121738514868268\", \"2626589144620713026669568689430873010625803728049924121243784502389097019475\");\n\n unpack_point(\"53b81ed5bffe9545b54016234682e7b2f699bd42a5e9eae27ff4051bc698ce85\");\n\n prv2pub(\"0001020304050607080900010203040506070809000102030405060708090001\");\n\n hash_poseidon(\"\", \"\", \"\", \"\", \"\", \"\");\n\n sign_poseidon(\"\", \"\");\n\n verify_poseidon(\"\", \"\", \"\");\n\n let str = \"string\"\n\n let unsafePointer = UnsafeMutablePointer<Int8>(mutating: (str as NSString).utf8String)\n\n cstring_free(unsafePointer);\n\n }\n\n}\n", "file_path": "ios/Classes/SwiftHermezPlugin.swift", "rank": 71, "score": 3.2770434981696717 }, { "content": "\n\n#[no_mangle]\n\npub extern fn hash_poseidon(tx_compressed_data: *const c_char, to_eth_addr: *const c_char, to_bjj_ay: *const c_char, rq_txcompressed_data_v2: *const c_char, rq_to_eth_addr: *const c_char, rq_to_bjj_ay: *const c_char) -> *mut c_char {\n\n let tx_compressed_data_str = unsafe { CStr::from_ptr(tx_compressed_data) }.to_str().unwrap();\n\n let b0: Fr = Fr::from_str(tx_compressed_data_str).unwrap();\n\n let to_eth_addr_str = unsafe { CStr::from_ptr(to_eth_addr) }.to_str().unwrap();\n\n let b1: Fr = Fr::from_str(to_eth_addr_str).unwrap();\n\n let to_bjj_ay_str = unsafe { CStr::from_ptr(to_bjj_ay) }.to_str().unwrap();\n\n let b2: Fr = Fr::from_str(to_bjj_ay_str).unwrap();\n\n let rq_txcompressed_data_v2_str = unsafe { CStr::from_ptr(rq_txcompressed_data_v2) }.to_str().unwrap();\n\n let b3: Fr = Fr::from_str(rq_txcompressed_data_v2_str).unwrap();\n\n let rq_to_eth_addr_str = unsafe { CStr::from_ptr(rq_to_eth_addr) }.to_str().unwrap();\n\n let b4: Fr = Fr::from_str(rq_to_eth_addr_str).unwrap();\n\n let rq_to_bjj_ay_str = unsafe { CStr::from_ptr(rq_to_bjj_ay) }.to_str().unwrap();\n\n let b5: Fr = Fr::from_str(rq_to_bjj_ay_str).unwrap();\n\n\n\n let hm_input = vec![b0.clone(), b1.clone(), b2.clone(), b3.clone(), b4.clone(), b5.clone()];\n\n let poseidon = Poseidon::new();\n\n let hm = poseidon.hash(hm_input).unwrap();\n\n return CString::new(to_hex(&hm).as_str()).unwrap().into_raw();\n", "file_path": "rust/src/lib.rs", "rank": 72, "score": 3.269585523272513 }, { "content": "# hermez_sdk\n\n\n\n[![pub package](https://img.shields.io/badge/pub-1.0.0-orange)](https://pub.dev/packages/hermez_sdk)\n\n[![build](https://github.com/hermeznetwork/hermez_flutter_sdk/workflows/hermez_sdk/badge.svg)](https://github.com/hermeznetwork/hermez_flutter_sdk/actions?query=workflow%3Ahermez_sdk)\n\n[![license](https://img.shields.io/badge/License-Apache-blue.svg)](https://github.com/hermeznetwork/hermez_mobile_library/blob/master/LICENSE)\n\n\n\n## Description\n\n\n\nThis is a flutter Plugin for Hermez Mobile SDK (https://hermez.io). This plugin provides a cross-platform tool (iOS, Android) to communicate with the Hermez API and network.\n\n\n\n## Installation\n\n\n\nTo use this plugin, add `hermez_sdk` as a [dependency](https://flutter.io/using-packages/) in your `pubspec.yaml` file like this\n\n\n\n```yaml\n\ndependencies:\n\n hermez_sdk: ^x.y.z\n\n```\n\nThis will get you the latest version.\n\n\n\nIf you want to test a specific branch of the repository, pull `hermez_sdk` like this\n\n\n\n```yaml\n\ndependencies:\n\n hermez_sdk:\n\n git:\n\n url: ssh://[email protected]/hermeznetwork/hermez-flutter-sdk.git\n\n ref: branchPathName\n\n```\n\n\n\nAlso, add the abi contracts in json files to the assets folder of your project and to your `pubspec.yaml` file like this\n\n\n\n```yaml\n\nassets:\n\n - HermezABI.json\n\n - ERC20ABI.json\n\n - WithdrawalDelayerABI.json\n\n```\n\n\n\n## Features and bugs\n\n\n\nPlease file feature requests and bugs at the [issue tracker][tracker].\n\n\n\n[tracker]: https://github.com/hermeznetwork/hermez_flutter_sdk/issues\n\n\n\n## Setup\n\n\n\nNOTE: In order to interact with Hermez, you will need to supply your own Ethereum node. You can check these links to help you set up a node (https://blog.infura.io/getting-started-with-infura-28e41844cc89, https://blog.infura.io/getting-started-with-infuras-ethereum-api).\n\n\n\n## Usage\n\n\n\nTo start using this package first import it in your Dart file.\n\n\n\n```dart\n\nimport 'package:hermez_sdk/hermez_sdk.dart';\n\n```\n\n\n", "file_path": "README.md", "rank": 73, "score": 3.249494623186226 }, { "content": " let mut b: Vec<u8> = Vec::new();\n\n\n\n let x_big = BigInt::parse_bytes(to_hex(&decompressed_sig.r_b8.x).as_bytes(), 16).unwrap();\n\n let y_big = BigInt::parse_bytes(to_hex(&decompressed_sig.r_b8.y).as_bytes(), 16).unwrap();\n\n let (_, x_bytes) = x_big.to_bytes_le();\n\n let (_, y_bytes) = y_big.to_bytes_le();\n\n\n\n let mut x_16bytes: [u8; 16] = [0; 16];\n\n let lenx = min(x_bytes.len(), x_16bytes.len());\n\n x_16bytes[..lenx].copy_from_slice(&x_bytes[..lenx]);\n\n b.append(&mut x_16bytes.to_vec());\n\n\n\n let mut y_16bytes: [u8; 16] = [0; 16];\n\n let leny = min(y_bytes.len(), y_16bytes.len());\n\n y_16bytes[..leny].copy_from_slice(&y_bytes[..leny]);\n\n b.append(&mut y_16bytes.to_vec());\n\n\n\n let (_, s_bytes) = decompressed_sig.s.to_bytes_le();\n\n let mut s_32bytes: [u8; 32] = [0; 32];\n\n let lens = min(s_bytes.len(), s_32bytes.len());\n", "file_path": "rust/src/lib.rs", "rank": 74, "score": 3.2054562614318542 }, { "content": "\n\n let r_b8_bytes: [u8; 32] = *array_ref!(signature_bytes[..32], 0, 32);\n\n let s: BigInt = BigInt::from_bytes_le(Sign::Plus, &signature_bytes[32..]);\n\n\n\n //let x_big = BigInt::parse_bytes(&r_b8_bytes[..16], 16).unwrap();\n\n //let y_big = BigInt::parse_bytes(&r_b8_bytes[16..], 16).unwrap();\n\n /*let (_, x_bytes) = x_big.to_bytes_le();\n\n let (_, y_bytes) = y_big.to_bytes_le();\n\n\n\n let mut x_16bytes: [u8; 16] = [0; 16];\n\n let lenx = min(x_bytes.len(), x_16bytes.len());\n\n x_16bytes[..lenx].copy_from_slice(&x_bytes[..lenx]);\n\n b.append(&mut x_16bytes.to_vec());\n\n\n\n let mut y_16bytes: [u8; 16] = [0; 16];\n\n let leny = min(y_bytes.len(), y_16bytes.len());\n\n y_16bytes[..leny].copy_from_slice(&y_bytes[..leny]);\n\n b.append(&mut y_16bytes.to_vec());*/\n\n\n\n\n", "file_path": "rust/src/lib.rs", "rank": 76, "score": 2.9100146347788383 }, { "content": "### Initialization\n\n\n\nTo initialize the Hermez SDK you can call the init method with one of the supported environments as a parameter, or setup all the different parameters passing the environment 'custom'.\n\n\n\n```dart\n\nHermezSDK.init(\n\n 'rinkeby',\n\n web3ApiKey: EXAMPLES_WEB3_API_KEY\n\n);\n\n```\n\n\n\nor \n\n\n\n```dart\n\nHermezSDK.init(\n\n 'custom',\n\n envParams: EnvParams(\n\n EXAMPLES_WEB3_CHAIN_ID,\n\n {\n\n ContractName.hermez: EXAMPLES_HERMEZ_ROLLUP_ADDRESS, // Hermez\n\n ContractName.withdrawalDelayer:\n\n EXAMPLES_HERMEZ_WDELAYER_ADDRESS, // WithdrawalDelayer\n\n },\n\n EXAMPLES_HERMEZ_API_URL,\n\n EXAMPLES_HERMEZ_EXPLORER_URL,\n\n EXAMPLES_WEB3_URL + EXAMPLES_WEB3_API_KEY,\n\n EXAMPLES_WEB3_RDP_URL + EXAMPLES_WEB3_API_KEY),\n\n);\n\n```\n\n\n\n### Supported Tokens\n\n\n\nBefore being able to operate on the Hermez Network, we must ensure that the token we want to operate with is listed. For that we make a call to the Hermez Coordinator API that will list all available tokens. All tokens in Hermez Network must be ERC20.\n\n\n\nWe can see there are 2 tokens registered. ETH will always be configured at index 0. The second token is HEZ. For the rest of the examples we will work with ETH. In the future, more tokens will be included in Hermez.\n\n\n\n```dart\n\nimport 'package:hermez_sdk/api.dart' as coordinatorApi;\n\nimport 'package:hermez_sdk/model/tokens_response.dart';\n\n\n\n...\n\n\n\nFuture<TokensResponse> getHermezSupportedTokens() async {\n\n TokensResponse tokensResponse = await coordinatorApi.getTokens();\n\n return tokensResponse;\n\n}\n\n```\n\n\n\n```json\n\n{\n\n \"tokens\": [\n\n {\n\n \"itemId\": 1,\n\n \"id\": 0,\n\n \"ethereumBlockNum\": 0,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\",\n\n \"decimals\": 18,\n\n \"USD\": 1787,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\"\n\n },\n\n {\n\n \"itemId\": 2,\n\n \"id\": 1,\n\n \"ethereumBlockNum\": 8153596,\n\n \"ethereumAddress\": \"0x2521bc90b4f5fb9a8d61278197e5ff5cdbc4fbf2\",\n\n \"name\": \"Hermez Network Token\",\n\n \"symbol\": \"HEZ\",\n\n \"decimals\": 18,\n\n \"USD\": 5.365,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.386805Z\"\n\n }\n\n ],\n\n \"pendingItems\": 0\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 77, "score": 2.849629966703832 }, { "content": "### Create Wallet\n\n\n\nWe can create a new Hermez wallet by providing the Ethereum private key of an Ethereum account. This wallet will store the Ethereum and Baby JubJub keys for the Hermez account. The Ethereum address is used to authorize L1 transactions, and the Baby JubJub key is used to authorize L2 transactions. We will create two wallets.\n\n\n\n> [!NOTE]\n\n> You will need to supply two private keys to test and initialize both accounts. The keys provided here are invalid and are shown as an example.\n\n\n\n```dart\n\nimport 'package:hermez_sdk/hermez_wallet.dart';\n\n\n\n...\n\n\n\nvoid createHermezWallets() async {\n\n // load first account\n\n final wallet =\n\n await HermezWallet.createWalletFromPrivateKey(EXAMPLES_PRIVATE_KEY1);\n\n final HermezWallet hermezWallet = wallet[0];\n\n final String hermezEthereumAddress = wallet[1];\n\n\n\n // load second account\n\n final wallet2 =\n\n await HermezWallet.createWalletFromPrivateKey(EXAMPLES_PRIVATE_KEY2);\n\n final HermezWallet hermezWallet2 = wallet2[0];\n\n final String hermezEthereumAddress2 = wallet2[1];\n\n}\n\n```\n\n\n\n### Move tokens from Ethereum to Hermez Network\n\n\n\nCreating a Hermez account and depositing tokens is done simultaneously as an L1 transaction. In this example we are going to deposit 1 ETH tokens into the newly created Hermez accounts.\n\n\n\n```dart\n\nimport 'package:hermez_sdk/tx.dart' as tx;\n\nimport 'package:hermez_sdk/utils.dart';\n\nimport 'package:hermez_sdk/hermez_compressed_amount.dart';\n\n\n\n...\n\n\n\nvoid moveTokensFromEthereumToHermez() async {\n\n \n\n // load account and ethereum token\n\n\n\n ...\n\n\n\n // set amount to transfer\n\n final amount = 1.0;\n\n final amountDeposit = getTokenAmountBigInt(amount, tokenERC20.decimals);\n\n final compressedDepositAmount =\n\n HermezCompressedAmount.compressAmount(amountDeposit.toDouble());\n\n\n\n // perform deposit account 1\n\n String txHash = await tx.deposit(compressedDepositAmount, hermezEthereumAddress, tokenERC20,\n\n hermezWallet.publicKeyCompressedHex, EXAMPLES_PRIVATE_KEY1);\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 78, "score": 2.7984049565038047 }, { "content": "#### Withdraw\n\n\n\nAfter doing any type of Exit transaction, which moves the user's funds from their token account to a specific Exit Merkle tree, one needs to do a Withdraw of those funds to an Ethereum L1 account. To do a Withdraw we need to indicate the accountIndex that includes the Ethereum address where the funds will be transferred, the amount and type of tokens, and some information to verify the ownership of those tokens. Additionally, there is one boolean flag. If set to true, the Withdraw will be instantaneous.\n\n\n\n```dart\n\nvoid moveTokensFromHermezToEthereumStep2Withdraw() async {\n\n // load ethereum token and account\n\n\n\n ...\n\n\n\n final exitInfoN = (await coordinatorApi.getExits(\n\n hermezEthereumAddress, true, tokenERC20.id))\n\n .exits;\n\n\n\n if (exitInfoN != null && exitInfoN.length > 0) {\n\n final exitInfo = exitInfoN.last;\n\n // set to perform instant withdraw\n\n final isInstant = true;\n\n\n\n // perform withdraw\n\n tx.withdraw(\n\n double.parse(exitInfo.balance),\n\n exitInfo.accountIndex,\n\n exitInfo.token,\n\n hermezWallet.publicKeyCompressedHex,\n\n exitInfo.batchNum,\n\n exitInfo.merkleProof.siblings,\n\n EXAMPLES_PRIVATE_KEY1,\n\n isInstant: isInstant);\n\n }\n\n}\n\n```\n\n\n\nThe funds should now appear in the Ethereum account that made the withdrawal.\n\n\n", "file_path": "README.md", "rank": 79, "score": 2.7063200838800006 }, { "content": "### Create Internal Accounts\n\n\n\nUntil now we have seen that accounts have an Ethereum address and a Baby JubJub key. This is the case for normal accounts. However, there is a second type of account that only requires a Baby JubJub key. These accounts are called internal accounts.\n\n\n\nThe advantage of these accounts is that they are much more inexpensive to create than a normal account, since these accounts only exist on Hermez. The downside is that one cannot perform deposits or withdrawals from this type of account. However, there are some scenarios where these accounts are useful. For example, in those scenarios where one requires a temporary account. (for example, Exchanges could use these accounts to receive a transfer from users).F\n\n\n\n```dart\n\n // Create Internal Account\n\n // create new bjj private key to receive user transactions\n\n final Uint8List pvtBjjKey = Uint8List(32);\n\n pvtBjjKey.fillRange(0, 32, 1);\n\n\n\n // create rollup internal account from bjj private key\n\n final wallet4 = await HermezWallet.createWalletFromBjjPvtKey(pvtBjjKey);\n\n final hermezWallet4 = wallet4[0];\n\n\n\n // fee computation\n\n final state = await coordinatorApi.getState();\n\n final fees = state.recommendedFee;\n\n final usdTokenExchangeRate = tokenERC20.USD;\n\n final fee = fees.createAccountInternal / usdTokenExchangeRate;\n\n\n\n // set amount to transfer\n\n final amount = 0.0001;\n\n final amountTransferInternal =\n\n getTokenAmountBigInt(amount, tokenERC20.decimals);\n\n final compressedTransferInternalAmount =\n\n HermezCompressedAmount.compressAmount(\n\n amountTransferInternal.toDouble());\n\n\n\n // generate L2 transaction\n\n final transferToInternal = {\n\n 'from': infoAccountSender.accountIndex,\n\n 'to': hermezWallet4.publicKeyBase64,\n\n 'amount': compressedTransferInternalAmount,\n\n 'fee': fee\n\n };\n\n\n\n final internalAccountResponse = await tx.generateAndSendL2Tx(\n\n transferToInternal, hermezWallet4, tokenERC20);\n", "file_path": "README.md", "rank": 80, "score": 2.5709597866107994 }, { "content": "# hermez_sdk_example\n\n\n\nDemonstrates how to use the hermez_sdk plugin.\n\n\n\n## Getting Started\n\n\n\nThis project is a starting point for a Flutter application.\n\n\n\nA few resources to get you started if this is your first Flutter project:\n\n\n\n- [Lab: Write your first Flutter app](https://flutter.dev/docs/get-started/codelab)\n\n- [Cookbook: Useful Flutter samples](https://flutter.dev/docs/cookbook)\n\n\n\nFor help getting started with Flutter, view our\n\n[online documentation](https://flutter.dev/docs), which offers tutorials,\n\nsamples, guidance on mobile development, and a full API reference.\n", "file_path": "example/README.md", "rank": 81, "score": 2.441704120070608 }, { "content": "### Create Account Authorization\n\n\n\nImagine that Bob wants to send a transfer of Ether to Mary using Hermez, but Mary only has an Ethereum account but no Hermez account. To complete this transfer, Mary could open a Hermez account and proceed as the previous transfer example. Alternatively, Mary could authorize the Coordinator to create a Hermez account on her behalf so that she can receive Bob's transfer.\n\n\n\nFirst we create a wallet for Mary:\n\n\n\n```dart\n\n // load third account\n\n final wallet3 =\n\n await HermezWallet.createWalletFromPrivateKey(EXAMPLES_PRIVATE_KEY3);\n\n final HermezWallet hermezWallet3 = wallet3[0];\n\n final String hermezEthereumAddress3 = wallet3[1];\n\n```\n\n\n\nThe authorization for the creation of a Hermez account is done using the private key stored in the newly created Hermez wallet.\n\n\n\nNOTE: that the account is not created at this moment. The account will be created when Bob performs the transfer. Also, it is Bob that pays for the fees associated with the account creation.\n\n\n\n```dart\n\n final signature = await hermezWallet3\n\n .signCreateAccountAuthorization(EXAMPLES_PRIVATE_KEY3);\n\n final res = await coordinatorApi.postCreateAccountAuthorization(\n\n hermezWallet3.hermezEthereumAddress,\n\n hermezWallet3.publicKeyBase64,\n\n signature);\n\n```\n\n\n\nWe can find out if the Coordinator has been authorized to create a Hermez account on behalf of a user by:\n\n\n\n```dart\n\nfinal authResponse = await coordinatorApi.getCreateAccountAuthorization(hermezWallet3.hermezEthereumAddress);\n\n```\n\n\n\n```json\n\n{\n\n \"hezEthereumAddress\": \"hez:0xd3B6DcfCA7Eb3207905Be27Ddfa69453625ffbf9\",\n\n \"bjj\": \"hez:ct0ml6FjdUN6uGUHZ70qOq5-58cZ19SJDeldMH021oOk\",\n\n \"signature\": \"0x22ffc6f8d569a92c48a4e784a11a9e57b840fac21eaa7fedc9dc040c4a45d502744a35eeb0ab173234c0f687b252bd0364647bff8db270ffcdf1830257de28e41c\",\n\n \"timestamp\": \"2021-03-16T14:56:05.295946Z\"\n\n}\n", "file_path": "README.md", "rank": 82, "score": 2.2190432921994034 }, { "content": "## 1.0.0+3\n\n\n\n* changed license file\n\n* minor fixes\n\n\n\n## 1.0.0+2\n\n\n\n* added metadata for pub.dev\n\n* fixed readme file\n\n\n\n## 1.0.0+1\n\n\n\n* adds support for dart null safety\n\n* formatted files\n\n\n\n## 1.0.0\n\n\n\n* adds support for iOS and Android 🎉 \n\n* Hermez wallets creation\n\n* complete communication with coordinators Api\n\n* adds Deposit, Withdraw and Transfers operations\n", "file_path": "CHANGELOG.md", "rank": 83, "score": 2.149366205969357 }, { "content": "# How you can contribute to hermez_sdk \n\n\n\nHi nice to see you here. 🙌🎉\n\n\n\nThank you for taking the time to contribute to this package ! 👍\n\n\n\nThis document should be a set of guidelines that can help you to create a meaningful\n\npull request or issue. Don't see this as any kind of rules, this is a living document,\n\nso if you have an idea how to optimize this propose changes in a pull request.\n\n\n\n## How do I propose a change?\n\n\n\nChanges to the public SDK should be done via an issue. So we can discuss the proposed \n\nchanges before you put any work into them.\n\n\n\nIf you are fixing a bug, you can just submit a pull request. We do recommend filing an issue\n\nas well to get an overview what it is that you are fixing.\n\nThis is helpful in case we don’t accept that specific fix but want to keep\n\ntrack of the issue.\n\n\n\n## What do I do before creating a pull request\n\n\n\n1. Fork the repository and branch out of `master`, prefixing your branch's name with `feature/`, `bug/` or `task/` to indicate the scope of the PR.\n\n1. Install all dependencies (`flutter packages get` or `pub get`)\n\n1. Ensure you have a meaningful PR name using the [imperative mood](https://chris.beams.io/posts/git-commit/#imperative) as all your commits will be squashed upon merge and the PR's name will be used as the merge commit's message.\n\n1. If the PR can be broken down into multiple meaningful PRs please do so so that it is easier to review.\n\n1. If you’ve fixed a bug or added code that should be tested, add tests!\n\n1. If you've changed the public SDK, make sure to update/add documentation (for now that is the [Readme](README.md))\n\n1. If you've made breaking changes give us a heads up in the pull request. Try to provide a compatibility path for the deprecated code and if necessary provide migration instructions in the [Readme](README.md).\n\n1. Format your code (`dartfmt -w .`)\n\n1. Analyze your code (`flutter analyze`)\n\n1. Create the Pull Request\n\n1. Verify that all status checks are passing\n\n\n\nWhile the prerequisites above must be satisfied prior to having your\n\npull request reviewed, the reviewer(s) may ask you to complete additional\n\ndesign work, tests, or other changes before your pull request can be ultimately\n\naccepted.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 84, "score": 2.0570742935173048 }, { "content": "### Transfers\n\n\n\nFirst, we compute the fees for the transaction. For this we consult the recommended fees from the Coordinator.\n\n\n\n```dart\n\n // fee computation\n\n final state = await coordinatorApi.getState();\n\n final fees = state.recommendedFee;\n\n```\n\n\n\n```json\n\n{\n\n \"existingAccount\": 96.34567219671051,\n\n \"createAccount\": 192.69134439342102,\n\n \"createAccountInternal\": 240.86418049177627\n\n}\n\n```\n\n\n\nThe returned fees are the suggested fees for different transactions:\n\n\n\n- existingAccount : Make a transfer to an existing account\n\n- createAccount : Make a transfer to a non-existent account, and create a regular account\n\n- createAccountInternal : Make a transfer to an non-existent account and create internal account\n\n\n\nThe fee amounts are given in USD. However, fees are payed in the token of the transaction. So, we need to do a conversion.\n\n\n\n```dart\n\n final usdTokenExchangeRate = tokenERC20.USD;\n\n final fee = fees.existingAccount / usdTokenExchangeRate;\n\n```\n\n\n\nFinally we make the final transfer transaction.\n\n\n\n```dart\n\n // set amount to transfer\n\n final amount = 0.0001;\n\n final amountTransfer = getTokenAmountBigInt(amount, tokenERC20.decimals);\n\n final compressedTransferAmount =\n\n HermezCompressedAmount.compressAmount(amountTransfer.toDouble());\n\n // generate L2 transaction\n\n final l2TxTransfer = {\n\n from: infoAccountSender.accountIndex,\n\n to: infoAccountReceiver.accountIndex,\n\n amount: compressedTransferAmount,\n\n fee: fee\n\n };\n\n\n\n final transferResponse = await tx.generateAndSendL2Tx(l2TxTransfer, hermezWallet, infoAccountSender.token);\n\n```\n\n\n\n```json\n\n{\n\n \"status\": 200,\n\n \"id\": \"0x02e7c2c293173f21249058b1d71afd5b1f3c0de4f1a173bac9b9aa4a2d149483a2\",\n\n \"nonce\": 3\n\n}\n\n```\n\n\n\nThe result status 200 shows that transaction has been correctly received. Additionally, we receive the nonce matching the transaction we sent, and an id that we can use to verify the status of the transaction either using getHistoryTransaction() or getPoolTransaction().\n\n\n\nAs we saw with the Exit transaction, every transaction includes a ´nonce´. This nonce is a protection mechanism to avoid replay attacks. Every L2 transaction will increase the nonce by 1.\n\n\n", "file_path": "README.md", "rank": 85, "score": 2.027778423625065 }, { "content": "```\n\n\n\n```json\n\n[{\n\n \"accountIndex\": \"hez:ETH:4253\",\n\n \"balance\": \"477700000000000000\",\n\n \"bjj\": \"hez:dMfPJlK_UtFqVByhP3FpvykOg5kAU3jMLD7OTx_4gwzO\",\n\n \"hezEthereumAddress\": \"hez:0x74d5531A3400f9b9d63729bA9C0E5172Ab0FD0f6\",\n\n \"itemId\": 4342,\n\n \"nonce\": 4,\n\n \"token\": {\n\n \"USD\": 1793,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n }\n\n},\n\n{\n\n \"accountIndex\": \"hez:ETH:256\",\n\n \"balance\": \"1874280899837791518\",\n\n \"bjj\": \"hez:YN2DmRh0QgDrxz3NLDqH947W5oNys7YWqkxsQmFVeI_m\",\n\n \"hezEthereumAddress\": \"hez:0x9F255048EC1141831A28019e497F3f76e559356E\",\n\n \"itemId\": 1,\n\n \"nonce\": 2,\n\n \"token\": {\n\n \"USD\": 1793,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n }\n\n}]\n\n```\n\n\n", "file_path": "README.md", "rank": 86, "score": 1.8804148717587807 }, { "content": "import Flutter\n\nimport UIKit\n\n\n\npublic class SwiftHermezPlugin: NSObject, FlutterPlugin {\n\n public static func register(with registrar: FlutterPluginRegistrar) {\n\n //let channel = FlutterMethodChannel(name: \"hermez_sdk\", binaryMessenger: registrar.messenger())\n\n //let instance = SwiftHermezPlugin()\n\n // registrar.addMethodCallDelegate(instance, channel: channel)\n\n // We are not using Flutter channels here\n\n }\n\n\n\n public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {\n\n // result(\"iOS \" + UIDevice.current.systemVersion)\n\n // Noop\n\n \n\n result(nil)\n\n }\n\n\n\n\n\n public func dummyMethodToEnforceBundling() {\n", "file_path": "ios/Classes/SwiftHermezPlugin.swift", "rank": 87, "score": 1.6172556846536397 }, { "content": " b\"2626589144620713026669568689430873010625803728049924121243784502389097019475\",\n\n 10,\n\n )\n\n .unwrap();\n\n c.bench_function(\"mul_scalar\", |b| b.iter(|| p.mul_scalar(&r)));\n\n\n\n c.bench_function(\"point compress\", |b| b.iter(|| p.compress()));\n\n let p_comp = p.compress();\n\n c.bench_function(\"point decompress\", |b| {\n\n b.iter(|| babyjubjub_rs::decompress_point(p_comp))\n\n });\n\n\n\n let sk = babyjubjub_rs::new_key();\n\n let pk = sk.public().unwrap();\n\n let msg = 5.to_bigint().unwrap();\n\n c.bench_function(\"sign\", |b| b.iter(|| sk.sign(msg.clone())));\n\n let sig = sk.sign(msg.clone()).unwrap();\n\n c.bench_function(\"verify\", |b| {\n\n b.iter(|| babyjubjub_rs::verify(pk.clone(), sig.clone(), msg.clone()))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "rust/benches/bench_babyjubjub.rs", "rank": 88, "score": 1.5601817258096613 }, { "content": "```\n\n\n\n```json\n\n{\n\n \"L1Info\": null,\n\n \"L1orL2\": \"L2\",\n\n \"L2Info\": { \"fee\": 202, \"historicFeeUSD\": 182.8352, \"nonce\": 3 },\n\n \"amount\": \"100000000000000\",\n\n \"batchNum\": 4724,\n\n \"fromAccountIndex\": \"hez:ETH:4253\",\n\n \"fromBJJ\": \"hez:dMfPJlK_UtFqVByhP3FpvykOg5kAU3jMLD7OTx_4gwzO\",\n\n \"fromHezEthereumAddress\": \"hez:0x74d5531A3400f9b9d63729bA9C0E5172Ab0FD0f6\",\n\n \"historicUSD\": 0.17855,\n\n \"id\": \"0x02e7c2c293173f21249058b1d71afd5b1f3c0de4f1a173bac9b9aa4a2d149483a2\",\n\n \"itemId\": 14590,\n\n \"position\": 1,\n\n \"timestamp\": \"2021-03-16T13:24:48Z\",\n\n \"toAccountIndex\": \"hez:ETH:4254\",\n\n \"toBJJ\": \"hez:HESLP_6Kp_nn5ANmSGiOnhhYvF3wF5Davf7xGi6lwh3U\",\n\n \"toHezEthereumAddress\": \"hez:0x12FfCe7D5d6d09564768d0FFC0774218458162d4\",\n\n \"token\": {\n\n \"USD\": 1787.2,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n },\n\n \"type\": \"Transfer\"\n\n}\n\n```\n\n\n\nAt this point, the balances in both accounts will be updated with the result of the transfer\n\n\n\n```dart\n\n // get sender account information\n\n final infoAccountSender = (await coordinatorApi\n\n .getAccounts(hermezEthereumAddress, [tokenERC20.id]))\n\n .accounts[0];\n\n\n\n // get receiver account information\n\n final infoAccountReceiver = (await coordinatorApi\n\n .getAccounts(hermezEthereumAddress2, [tokenERC20.id]))\n\n .accounts[0];\n", "file_path": "README.md", "rank": 89, "score": 1.555817891810983 }, { "content": "# Contributor Covenant Code of Conduct\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as\n\ncontributors and maintainers pledge to making participation in our project and\n\nour community a harassment-free experience for everyone, regardless of age, body\n\nsize, disability, ethnicity, sex characteristics, gender identity and expression,\n\nlevel of experience, education, socio-economic status, nationality, personal\n\nappearance, race, religion, or sexual identity and orientation.\n\n\n\n## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment\n\ninclude:\n\n\n\n* Using welcoming and inclusive language\n\n* Being respectful of differing viewpoints and experiences\n\n* Gracefully accepting constructive criticism\n\n* Focusing on what is best for the community\n\n* Showing empathy towards other community members\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n* The use of sexualized language or imagery and unwelcome sexual attention or\n\n advances\n\n* Trolling, insulting/derogatory comments, and personal or political attacks\n\n* Public or private harassment\n\n* Publishing others' private information, such as a physical or electronic\n\n address, without explicit permission\n\n* Other conduct which could reasonably be considered inappropriate in a\n\n professional setting\n\n\n\n## Our Responsibilities\n\n\n\nProject maintainers are responsible for clarifying the standards of acceptable\n\nbehavior and are expected to take appropriate and fair corrective action in\n\nresponse to any instances of unacceptable behavior.\n\n\n\nProject maintainers have the right and responsibility to remove, edit, or\n\nreject comments, commits, code, wiki edits, issues, and other contributions\n\nthat are not aligned to this Code of Conduct, or to ban temporarily or\n\npermanently any contributor for other behaviors that they deem inappropriate,\n\nthreatening, offensive, or harmful.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 90, "score": 1.5554280972134427 }, { "content": "```\n\n\n\nOnce we verify the receiving Ethereum account has authorized the creation of a Hermez account, we can proceed with the transfer from Bob's account to Mary's account. For this, we set the destination address to Mary's Ethereum address and set the fee using the createAccount value.\n\n\n\n```dart\n\n // set amount to transfer\n\n final amount = 0.0001;\n\n final amountTransfer = getTokenAmountBigInt(amount, tokenERC20.decimals);\n\n final compressedTransferAmount =\n\n HermezCompressedAmount.compressAmount(amountTransfer.toDouble());\n\n\n\n // fee computation\n\n final state = await coordinatorApi.getState();\n\n final fees = state.recommendedFee;\n\n final usdTokenExchangeRate = tokenERC20.USD;\n\n final fee = fees.createAccount / usdTokenExchangeRate;\n\n \n\n // generate L2 transaction\n\n final l2TransferTx = {\n\n \"from\": infoAccountSender.accountIndex,\n\n \"to\": hermezWallet3.hermezEthereumAddress,\n\n \"amount\": compressedTransferAmount,\n\n \"fee\": fee\n\n };\n\n\n\n final transferResponse = await tx.generateAndSendL2Tx(\n\n l2TransferTx, hermezWallet, infoAccountSender.token);\n\n```\n\n\n\n```json\n\n{\n\n \"status\": 200,\n\n \"id\": \"0x025398af5b69f132d8c2c5b7b225df1436baf7d1774a6b017a233bf273b4675c8f\",\n\n \"nonce\": 0\n\n}\n\n```\n\n\n\nAfter the transfer has been forged, we can check Mary's account on Hermez\n\n\n\n```dart\n\n// get receiver account information\n\n final infoAccountReceiver = (await coordinatorApi\n\n .getAccounts(hermezWallet3.hermezEthereumAddress, [tokenERC20.id]))\n\n .accounts[0];\n\n```\n\n\n\n```json\n\n{\n\n \"accountIndex\": \"hez:ETH:265\",\n\n \"balance\": \"1000000000000000\",\n\n \"bjj\": \"hez:ct0ml6FjdUN6uGUHZ70qOq5-58cZ19SJDeldMH021oOk\",\n\n \"hezEthereumAddress\": \"hez:0xd3B6DcfCA7Eb3207905Be27Ddfa69453625ffbf9\",\n\n \"itemId\": 10,\n\n \"nonce\": 0,\n\n \"token\": {\n\n \"USD\": 1795.94,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-03-16T14:56:57.460862Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n }\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 91, "score": 1.4953347038877038 }, { "content": "### Token Balance\n\n\n\nA token balance can be obtained by querying the API and passing the hermezEthereumAddress of the Hermez account.\n\n\n\n```dart\n\nvoid getTokenBalance() async {\n\n\n\n // load accounts and ethereum token\n\n \n\n ...\n\n\n\n // get sender account information\n\n final infoAccountSender = (await coordinatorApi\n\n .getAccounts(hermezEthereumAddress, [tokenERC20.id]))\n\n .accounts[0];\n\n\n\n // get receiver account information\n\n final infoAccountReceiver = (await coordinatorApi\n\n .getAccounts(hermezEthereumAddress2, [tokenERC20.id]))\n\n .accounts[0];\n\n}\n\n```\n\n\n\n```json\n\n[\n\n {\n\n \"accountIndex\": \"hez:ETH:4253\",\n\n \"balance\": \"1099600000000000000\",\n\n \"bjj\": \"hez:dMfPJlK_UtFqVByhP3FpvykOg5kAU3jMLD7OTx_4gwzO\",\n\n \"hezEthereumAddress\": \"hez:0x74d5531A3400f9b9d63729bA9C0E5172Ab0FD0f6\",\n\n \"itemId\": 4342,\n\n \"nonce\": 1,\n\n \"token\": {\n\n \"USD\": 1789,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n }\n\n },\n\n {\n\n \"accountIndex\": \"hez:ETH:4254\",\n\n \"balance\": \"1097100000000000000\",\n\n \"bjj\": \"hez:HESLP_6Kp_nn5ANmSGiOnhhYvF3wF5Davf7xGi6lwh3U\",\n\n \"hezEthereumAddress\": \"hez:0x12FfCe7D5d6d09564768d0FFC0774218458162d4\",\n\n \"itemId\": 4343,\n\n \"nonce\": 6,\n\n \"token\": {\n\n \"USD\": 1789,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n }\n\n }\n\n]\n", "file_path": "README.md", "rank": 92, "score": 1.467694995618201 }, { "content": "```\n\n\n\n```json\n\n{\n\n \"status\": 200,\n\n \"id\": \"0x02ac000f39eee60b198c85348443002991753de912337720b9ef85d48e9dcfe83e\",\n\n \"nonce\": 0\n\n}\n\n```\n\n\n\nOnce the transaction is forged, we can check the account information\n\n\n\n```dart\n\n // get internal account information\n\n final infoAccountInternal = (await coordinatorApi\n\n .getAccounts(hermezWallet4.publicKeyBase64, [tokenERC20.id]))\n\n .accounts[0];\n\n```\n\n\n\n```json\n\n{\n\n \"accountIndex\": \"hez:ETH:259\",\n\n \"balance\": \"1000000000000000000\",\n\n \"bjj\": \"hez:KmbnR34pOUhSaaPOkeWbaeZVjMqojfyYy8sYIHRSlaKx\",\n\n \"hezEthereumAddress\": \"hez:0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF\",\n\n \"itemId\": 4,\n\n \"nonce\": 0,\n\n \"token\": {\n\n \"USD\": 1798.51,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-03-16T15:44:08.33507Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n }\n\n}\n\n```\n\n\n\nWe can verify it is in fact an internal account because the associated hezEthereumAddress is hez:0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF.\n", "file_path": "README.md", "rank": 93, "score": 1.1969749499123203 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies both within project spaces and in public spaces\n\nwhen an individual is representing the project or its community. Examples of\n\nrepresenting a project or community include using an official project e-mail\n\naddress, posting via an official social media account, or acting as an appointed\n\nrepresentative at an online or offline event. Representation of a project may be\n\nfurther defined and clarified by project maintainers.\n\n\n\n## Enforcement\n\n\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\n\nreported by contacting the project team at [email protected]. All\n\ncomplaints will be reviewed and investigated and will result in a response that\n\nis deemed necessary and appropriate to the circumstances. The project team is\n\nobligated to maintain confidentiality with regard to the reporter of an incident.\n\nFurther details of specific enforcement policies may be posted separately.\n\n\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\n\nfaith may face temporary or permanent repercussions as determined by other\n\nmembers of the project's leadership.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\n\navailable at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html\n\n\n\n[homepage]: https://www.contributor-covenant.org\n\n\n\nFor answers to common questions about this code of conduct, see\n\nhttps://www.contributor-covenant.org/faq\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 94, "score": 1.0484530906363219 }, { "content": "### Transaction Status\n\n\n\nTransactions received by the Coordinator will be stored in its transaction pool while they haven't been processed. To check a transaction in the transaction pool we make a query to the Coordinator node.\n\n\n\n```dart\n\nfinal txTransferPool = await coordinatorApi.getPoolTransaction(transferResponse['id']);\n\n```\n\n\n\n```json\n\n{\n\n \"amount\": \"100000000000000\",\n\n \"fee\": 202,\n\n \"fromAccountIndex\": \"hez:ETH:4253\",\n\n \"fromBJJ\": \"hez:dMfPJlK_UtFqVByhP3FpvykOg5kAU3jMLD7OTx_4gwzO\",\n\n \"fromHezEthereumAddress\": \"hez:0x74d5531A3400f9b9d63729bA9C0E5172Ab0FD0f6\",\n\n \"id\": \"0x02e7c2c293173f21249058b1d71afd5b1f3c0de4f1a173bac9b9aa4a2d149483a2\",\n\n \"info\": null,\n\n \"nonce\": 3,\n\n \"requestAmount\": null,\n\n \"requestFee\": null,\n\n \"requestFromAccountIndex\": null,\n\n \"requestNonce\": null,\n\n \"requestToAccountIndex\": null,\n\n \"requestToBJJ\": null,\n\n \"requestToHezEthereumAddress\": null,\n\n \"requestTokenId\": null,\n\n \"signature\": \"c9e1a61ce2c3c728c6ec970ae646b444a7ab9d30aa6015eb10fb729078c1302978fe9fb0419b4d944d4f11d83582043a48546dff7dda22de7c1e1da004cd5401\",\n\n \"state\": \"pend\",\n\n \"timestamp\": \"2021-03-16T13:20:33.336469Z\",\n\n \"toAccountIndex\": \"hez:ETH:4254\",\n\n \"toBjj\": \"hez:HESLP_6Kp_nn5ANmSGiOnhhYvF3wF5Davf7xGi6lwh3U\",\n\n \"toHezEthereumAddress\": \"hez:0x12FfCe7D5d6d09564768d0FFC0774218458162d4\",\n\n \"token\": {\n\n \"USD\": 1786,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n },\n\n \"type\": \"Transfer\"\n\n}\n\n```\n\n\n\nWe can also check directly with the Coordinator in the database of forged transactions.\n\n\n\n```dart\n\nfinal transferConf = await coordinatorApi.getHistoryTransaction(transferResponse['id']);\n", "file_path": "README.md", "rank": 95, "score": 0.8524299216859594 }, { "content": "### Move tokens from Hermez to Ethereum Network\n\n\n\nWithdrawing funds is a two step process:\n\n\n\n1. Exit\n\n2. Withdrawal\n\n\n\n#### Exit\n\n\n\nThe Exit transaction is used as a first step to retrieve the funds from Hermez Network back to Ethereum. There are two types of Exit transactions:\n\n\n\n- Normal Exit, referred as Exit from now on. This is a L2 transaction type.\n\n- Force Exit, an L1 transaction type which has extended guarantees that will be processed by the Coordinator. We will talk more about Force Exit here\n\n \n\nThe Exit is requested as follows:\n\n\n\n```dart\n\nvoid moveTokensFromHermezToEthereumStep1Exit() async {\n\n // load account\n\n\n\n ...\n\n \n\n // set amount to exit\n\n final amount = 0.0001;\n\n final amountExit = getTokenAmountBigInt(amount, tokenERC20.decimals);\n\n final compressedExitAmount =\n\n HermezCompressedAmount.compressAmount(amountExit.toDouble());\n\n\n\n // set fee in transaction\n\n final state = await coordinatorApi.getState();\n\n final userFee = state.recommendedFee.existingAccount;\n\n\n\n // generate L2 transaction\n\n final l2ExitTx = {\n\n 'type': 'Exit',\n\n 'from': infoAccountSender.accountIndex,\n\n 'amount': compressedExitAmount,\n\n 'fee': userFee\n\n };\n\n\n\n final exitResponse = await tx.generateAndSendL2Tx(l2ExitTx, hermezWallet, infoAccountSender.token);\n\n }\n\n```\n\n\n\n```json\n\n{\n\n \"status\": 200,\n\n \"id\": \"0x0257305cdc43060a754a5c2ea6b0e0f6e28735ea8e75d841ca4a7377aa099d91b7\",\n\n \"nonce\": 2\n\n}\n\n```\n\n\n\nAfter submitting our Exit request to the Coordinator, we can check the status of the transaction by calling the Coordinator's Transaction Pool. The Coordinator's transaction pool stores all those transactions that are waiting to be forged.\n\n\n\n```dart\n\nfinal txExitPool = await coordinatorApi.getPoolTransaction(exitResponse['id']);\n", "file_path": "README.md", "rank": 96, "score": 0.8287801852112517 }, { "content": "#### Force Exit\n\n\n\nThis is the L1 equivalent of an Exit. With this option, the smart contract forces Coordinators to pick up L1 transactions before they pick up L2 transactions to ensure that L1 transactions will eventually be picked up.\n\n\n\nThis is a security measure. We don't expect users to need to make a Force Exit.\n\n\n\n```dart\n\nvoid moveTokensFromHermezToEthereumStep1ForceExit() async {\n\n // load ethereum token and account info\n\n \n\n ...\n\n\n\n // set amount to force exit\n\n final amount = 0.0001;\n\n final amountForceExit = getTokenAmountBigInt(amount, tokenERC20.decimals);\n\n final compressedForceExitAmount =\n\n HermezCompressedAmount.compressAmount(amountForceExit.toDouble());\n\n\n\n // perform force exit\n\n tx.forceExit(compressedForceExitAmount, infoAccountSender.accountIndex,\n\n tokenERC20, EXAMPLES_PRIVATE_KEY1);\n\n}\n\n```\n\n\n\nThe last step to recover the funds will be to send a new Withdraw request to the smart contract as we did after the regular Exit request.\n\n\n\n```dart\n\nvoid moveTokensFromHermezToEthereumStep2Withdraw() async {\n\n // load ethereum token and account\n\n\n\n ...\n\n\n\n final exitInfoN = (await coordinatorApi.getExits(\n\n hermezEthereumAddress, true, tokenERC20.id))\n\n .exits;\n\n\n\n if (exitInfoN != null && exitInfoN.length > 0) {\n\n final exitInfo = exitInfoN.last;\n\n // set to perform instant withdraw\n\n final isInstant = true;\n\n\n\n // perform withdraw\n\n tx.withdraw(\n\n double.parse(exitInfo.balance),\n\n exitInfo.accountIndex,\n\n exitInfo.token,\n\n hermezWallet.publicKeyCompressedHex,\n\n exitInfo.batchNum,\n\n exitInfo.merkleProof.siblings,\n\n EXAMPLES_PRIVATE_KEY1,\n\n isInstant: isInstant);\n\n }\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 97, "score": 0.7936833332090201 }, { "content": "```\n\n\n\n```json\n\n{\n\n \"amount\": \"1000000000000000000\",\n\n \"fee\": 204,\n\n \"fromAccountIndex\": \"hez:ETH:4253\",\n\n \"fromBJJ\": \"hez:dMfPJlK_UtFqVByhP3FpvykOg5kAU3jMLD7OTx_4gwzO\",\n\n \"fromHezEthereumAddress\": \"hez:0x74d5531A3400f9b9d63729bA9C0E5172Ab0FD0f6\",\n\n \"id\": \"0x0257305cdc43060a754a5c2ea6b0e0f6e28735ea8e75d841ca4a7377aa099d91b7\",\n\n \"info\": null,\n\n \"nonce\": 2,\n\n \"requestAmount\": null,\n\n \"requestFee\": null,\n\n \"requestFromAccountIndex\": null,\n\n \"requestNonce\": null,\n\n \"requestToAccountIndex\": null,\n\n \"requestToBJJ\": null,\n\n \"requestToHezEthereumAddress\": null,\n\n \"requestTokenId\": null,\n\n \"signature\": \"38f23d06826be8ea5a0893ee67f4ede885a831523c0c626c102edb05e1cf890e418b5820e3e6d4b530386d0bc84b3c3933d655527993ad77a55bb735d5a67c03\",\n\n \"state\": \"pend\",\n\n \"timestamp\": \"2021-03-16T12:31:50.407428Z\",\n\n \"toAccountIndex\": \"hez:ETH:1\",\n\n \"toBjj\": null,\n\n \"toHezEthereumAddress\": null,\n\n \"token\": {\n\n \"USD\": 1781.9,\n\n \"decimals\": 18,\n\n \"ethereumAddress\": \"0x0000000000000000000000000000000000000000\",\n\n \"ethereumBlockNum\": 0,\n\n \"fiatUpdate\": \"2021-02-28T18:55:17.372008Z\",\n\n \"id\": 0,\n\n \"itemId\": 1,\n\n \"name\": \"Ether\",\n\n \"symbol\": \"ETH\"\n\n },\n\n \"type\": \"Exit\"\n\n}\n\n```\n\n\n\nWe can see the state field is set to pend (meaning pending). There are 4 possible states:\n\n\n\n1. pend : Pending\n\n2. fging : Forging\n\n3. fged : Forged\n\n4. invl : Invalid\n\n \n\nIf we continue polling the Coordinator about the status of the transaction, the state will eventually be set to fged.\n\n\n\nWe can also query the Coordinator to check whether or not our transaction has been forged. getHistoryTransaction reports those transactions that have been forged by the Coordinator.\n\n\n\n```dart\n\nfinal txExitConf = await coordinatorApi.getHistoryTransaction(txExitPool.id);\n\n```\n\n\n\nAnd we can confirm our account status and check that the correct amount has been transfered out of the account.\n\n\n\n```dart\n\nfinal accountResponse = await coordinatorApi.getAccounts(hermezEthereumAddress, [tokenERC20.id]);\n\nfinal infoAccount = accountResponse.accounts.length > 0 ? accountResponse.accounts[0]: null;\n\n```\n\n\n", "file_path": "README.md", "rank": 98, "score": 0.787651965351817 } ]
Rust
src/packages/string_more.rs
jonnyboyC/rhai
91963d10dc6fb5ab1a0e4ffc62f5ecc2643dfff8
#![allow(non_snake_case)] use crate::any::Dynamic; use crate::def_package; use crate::engine::Engine; use crate::fn_native::FnPtr; use crate::parser::{ImmutableString, INT}; use crate::plugin::*; use crate::utils::StaticVec; #[cfg(not(feature = "unchecked"))] use crate::{result::EvalAltResult, token::Position}; use crate::stdlib::{ any::TypeId, boxed::Box, format, mem, string::String, string::ToString, vec::Vec, }; macro_rules! gen_concat_functions { ($root:ident => $($arg_type:ident),+ ) => { pub mod $root { $(pub mod $arg_type { use super::super::*; #[export_fn] #[inline] pub fn append_func(x: &mut ImmutableString, y: $arg_type) -> String { format!("{}{}", x, y) } #[export_fn] #[inline] pub fn prepend_func(x: &mut $arg_type, y: ImmutableString) -> String { format!("{}{}", x, y) } })* } } } macro_rules! reg_functions { ($mod_name:ident += $root:ident ; $($arg_type:ident),+) => { $( set_exported_fn!($mod_name, "+", $root::$arg_type::append_func); set_exported_fn!($mod_name, "+", $root::$arg_type::prepend_func); )* } } def_package!(crate:MoreStringPackage:"Additional string utilities, including string building.", lib, { reg_functions!(lib += basic; INT, bool, char, FnPtr); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] { reg_functions!(lib += numbers; i8, u8, i16, u16, i32, i64, u32, u64); #[cfg(not(target_arch = "wasm32"))] reg_functions!(lib += num_128; i128, u128); } #[cfg(not(feature = "no_float"))] reg_functions!(lib += float; f32, f64); lib.combine_flatten(exported_module!(string_functions)); lib.set_raw_fn( "pad", &[TypeId::of::<ImmutableString>(), TypeId::of::<INT>(), TypeId::of::<char>()], |_engine: &Engine, _: &Module, args: &mut [&mut Dynamic]| { let len = *args[1].read_lock::<INT>().unwrap(); #[cfg(not(feature = "unchecked"))] if _engine.limits.max_string_size > 0 && len > 0 && (len as usize) > _engine.limits.max_string_size { return EvalAltResult::ErrorDataTooLarge( "Length of string".to_string(), _engine.limits.max_string_size, len as usize, Position::none(), ).into(); } if len > 0 { let ch = mem::take(args[2]).cast::<char>(); let mut s = args[0].write_lock::<ImmutableString>().unwrap(); let orig_len = s.chars().count(); if len as usize > orig_len { let p = s.make_mut(); for _ in 0..(len as usize - orig_len) { p.push(ch); } #[cfg(not(feature = "unchecked"))] if _engine.limits.max_string_size > 0 && s.len() > _engine.limits.max_string_size { return EvalAltResult::ErrorDataTooLarge( "Length of string".to_string(), _engine.limits.max_string_size, s.len(), Position::none(), ).into(); } } } Ok(()) }, ); lib.set_iter( TypeId::of::<ImmutableString>(), |arr| Box::new( arr.cast::<ImmutableString>().chars().collect::<Vec<_>>().into_iter().map(Into::into) ) as Box<dyn Iterator<Item = Dynamic>>, ); }); gen_concat_functions!(basic => INT, bool, char, FnPtr); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] gen_concat_functions!(numbers => i8, u8, i16, u16, i32, i64, u32, u64); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] #[cfg(not(target_arch = "wasm32"))] gen_concat_functions!(num_128 => i128, u128); #[cfg(not(feature = "no_float"))] gen_concat_functions!(float => f32, f64); #[export_module] mod string_functions { #[rhai_fn(name = "+")] #[inline(always)] pub fn add_append_unit(s: ImmutableString, _x: ()) -> ImmutableString { s } #[rhai_fn(name = "+")] #[inline(always)] pub fn add_prepend_unit(_x: (), s: ImmutableString) -> ImmutableString { s } #[rhai_fn(name = "+=")] #[inline(always)] pub fn append_char(s: &mut ImmutableString, ch: char) { *s += ch; } #[rhai_fn(name = "+=")] #[inline(always)] pub fn append_string(s: &mut ImmutableString, add: ImmutableString) { *s += &add; } #[inline(always)] pub fn len(s: &mut ImmutableString) -> INT { s.chars().count() as INT } #[rhai_fn(get = "len")] #[inline(always)] pub fn len_prop(s: &mut ImmutableString) -> INT { len(s) } #[inline(always)] pub fn clear(s: &mut ImmutableString) { s.make_mut().clear(); } pub fn truncate(s: &mut ImmutableString, len: INT) { if len > 0 { let chars: StaticVec<_> = s.chars().collect(); let copy = s.make_mut(); copy.clear(); copy.extend(chars.into_iter().take(len as usize)); } else { s.make_mut().clear(); } } pub fn trim(s: &mut ImmutableString) { let trimmed = s.trim(); if trimmed.len() < s.len() { *s = trimmed.to_string().into(); } } #[rhai_fn(name = "contains")] #[inline(always)] pub fn contains_char(s: &mut ImmutableString, ch: char) -> bool { s.contains(ch) } #[rhai_fn(name = "contains")] #[inline(always)] pub fn contains_string(s: &mut ImmutableString, find: ImmutableString) -> bool { s.contains(find.as_str()) } #[rhai_fn(name = "index_of")] pub fn index_of_char_starting_from(s: &mut ImmutableString, ch: char, start: INT) -> INT { let start = if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return -1 as INT; } else { s.chars().take(start as usize).collect::<String>().len() }; s[start..] .find(ch) .map(|index| s[0..start + index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")] pub fn index_of_char(s: &mut ImmutableString, ch: char) -> INT { s.find(ch) .map(|index| s[0..index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")] pub fn index_of_string_starting_from( s: &mut ImmutableString, find: ImmutableString, start: INT, ) -> INT { let start = if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return -1 as INT; } else { s.chars().take(start as usize).collect::<String>().len() }; s[start..] .find(find.as_str()) .map(|index| s[0..start + index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")] pub fn index_of_string(s: &mut ImmutableString, find: ImmutableString) -> INT { s.find(find.as_str()) .map(|index| s[0..index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "sub_string")] pub fn sub_string(s: ImmutableString, start: INT, len: INT) -> ImmutableString { let offset = if s.is_empty() || len <= 0 { return "".to_string().into(); } else if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return "".to_string().into(); } else { start as usize }; let chars: StaticVec<_> = s.chars().collect(); let len = if offset + (len as usize) > chars.len() { chars.len() - offset } else { len as usize }; chars .iter() .skip(offset) .take(len) .cloned() .collect::<String>() .into() } #[rhai_fn(name = "sub_string")] #[inline(always)] pub fn sub_string_starting_from(s: ImmutableString, start: INT) -> ImmutableString { let len = s.len() as INT; sub_string(s, start, len) } #[rhai_fn(name = "crop")] pub fn crop_string(s: &mut ImmutableString, start: INT, len: INT) { let offset = if s.is_empty() || len <= 0 { s.make_mut().clear(); return; } else if start < 0 { 0 } else if (start as usize) >= s.chars().count() { s.make_mut().clear(); return; } else { start as usize }; let chars: StaticVec<_> = s.chars().collect(); let len = if offset + (len as usize) > chars.len() { chars.len() - offset } else { len as usize }; let copy = s.make_mut(); copy.clear(); copy.extend(chars.iter().skip(offset).take(len)); } #[rhai_fn(name = "crop")] #[inline(always)] pub fn crop_string_starting_from(s: &mut ImmutableString, start: INT) { crop_string(s, start, s.len() as INT); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_string(s: &mut ImmutableString, find: ImmutableString, sub: ImmutableString) { *s = s.replace(find.as_str(), sub.as_str()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_string_with_char(s: &mut ImmutableString, find: ImmutableString, sub: char) { *s = s.replace(find.as_str(), &sub.to_string()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_char_with_string(s: &mut ImmutableString, find: char, sub: ImmutableString) { *s = s.replace(&find.to_string(), sub.as_str()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_char(s: &mut ImmutableString, find: char, sub: char) { *s = s.replace(&find.to_string(), &sub.to_string()).into(); } #[cfg(not(feature = "no_index"))] pub mod arrays { use crate::engine::Array; #[rhai_fn(name = "+")] #[inline] pub fn append(x: &mut ImmutableString, y: Array) -> String { format!("{}{:?}", x, y) } #[rhai_fn(name = "+")] #[inline] pub fn prepend(x: &mut Array, y: ImmutableString) -> String { format!("{:?}{}", x, y) } } }
#![allow(non_snake_case)] use crate::any::Dynamic; use crate::def_package; use crate::engine::Engine; use crate::fn_native::FnPtr; use crate::parser::{ImmutableString, INT}; use crate::plugin::*; use crate::utils::StaticVec; #[cfg(not(feature = "unchecked"))] use crate::{result::EvalAltResult, token::Position}; use crate::stdlib::{ any::TypeId, boxed::Box, format, mem, string::String, string::ToString, vec::Vec, }; macro_rules! gen_concat_functions { ($root:ident => $($arg_type:ident),+ ) => { pub mod $root { $(pub mod $arg_type { use super::super::*; #[export_fn] #[inline] pub fn append_func(x: &mut ImmutableString, y: $arg_type) -> String { format!("{}{}", x, y) } #[export_fn] #[inline] pub fn prepend_func(x: &mut $arg_type, y: ImmutableString) -> String { format!("{}{}", x, y) } })* } } } macro_rules! reg_functions { ($mod_name:ident += $root:ident ; $($arg_type:ident),+) => { $( set_exported_fn!($mod_name, "+", $root::$arg_type::append_func); set_exported_fn!($mod_name, "+", $root::$arg_type::prepend_func); )* } } def_package!(crate:MoreStringPackage:"Additional string utilities, including string building.", lib, { reg_functions!(lib += basic; INT, bool, char, FnPtr); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] { reg_functions!(lib += numbers; i8, u8, i16, u16, i32, i64, u32, u64); #[cfg(not(target_arch = "wasm32"))] reg_functions!(lib += num_128; i128, u128); } #[cfg(not(feature = "no_float"))] reg_functions!(lib += float; f32, f64); lib.combine_flatten(exported_module!(string_functions)); lib.set_raw_fn( "pad", &[TypeId::of::<ImmutableString>(), TypeId::of::<INT>(), TypeId::of::<char>()], |_engine: &Engine, _: &Module, args: &mut [&mut Dynamic]| { let len = *args[1].read_lock::<INT>().unwrap(); #[cfg(not(feature = "unchecked"))] if _engine.limits.max_string_size > 0 && len > 0 && (len as usize) > _engine.limits.max_string_size { return EvalAltResult::ErrorDataTooLarge( "Length of string".to_string(), _engine.limits.max_string_size, len as usize, Position::none(), ).into(); } if len > 0 { let ch = mem::take(args[2]).cast::<char>(); let mut s = args[0].write_lock::<ImmutableString>().unwrap(); let orig_len = s.chars().count(); if len as usize > orig_len { let p = s.make_mut(); for _ in 0..(len as usize - orig_len) { p.push(ch); } #[cfg(not(feature = "unchecked"))] if _engine.limits.max_string_size > 0 && s.len() > _engine.limits.max_string_size { return EvalAltResult::ErrorDataTooLarge( "Length of string".to_string(), _engine.limits.max_string_size, s.len(), Position::none(), ).into(); } } } Ok(()) }, ); lib.set_iter( TypeId::of::<ImmutableString>(), |arr| Box::new( arr.cast::<ImmutableString>().chars().collect::<Vec<_>>().into_iter().map(Into::into) ) as Box<dyn Iterator<Item = Dynamic>>, ); }); gen_concat_functions!(basic => INT, bool, char, FnPtr); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] gen_concat_functions!(numbers => i8, u8, i16, u16, i32, i64, u32, u64); #[cfg(not(feature = "only_i32"))] #[cfg(not(feature = "only_i64"))] #[cfg(not(target_arch = "wasm32"))] gen_concat_functions!(num_128 => i128, u128); #[cfg(not(feature = "no_float"))] gen_concat_functions!(float => f32, f64); #[export_module] mod string_functions { #[rhai_fn(name = "+")] #[inline(always)] pub fn add_append_unit(s: ImmutableString, _x: ()) -> ImmutableString { s } #[rhai_fn(name = "+")] #[inline(always)] pub fn add_prepend_unit(_x: (), s: ImmutableString) -> ImmutableString { s } #[rhai_fn(name = "+=")] #[inline(always)] pub fn append_char(s: &mut ImmutableString, ch: char) { *s += ch; } #[rhai_fn(name = "+=")] #[inline(always)] pub fn append_string(s: &mut ImmutableString, add: ImmutableString) { *s += &add; } #[inline(always)] pub fn len(s: &mut ImmutableString) -> INT { s.chars().count() as INT } #[rhai_fn(get = "len")] #[inline(always)] pub fn len_prop(s: &mut ImmutableString) -> INT { len(s) } #[inline(always)] pub fn clear(s: &mut ImmutableString) { s.make_mut().clear(); } pub fn truncate(s: &mut ImmutableString, len: INT) { if len > 0 { let chars: StaticVec<_> = s.chars().collect(); let copy = s.make_mut(); copy.clear(); copy.extend(chars.into_iter().take(len as usize)); } else { s.make_mut().clear(); } } pub fn trim(s: &mut ImmutableString) { let trimmed = s.trim(); if trimmed.len() < s.len() { *s = trimmed.to_string().into(); } } #[rhai_fn(name = "contains")] #[inline(always)] pub fn contains_char(s: &mut ImmutableString, ch: char) -> bool { s.contains(ch) } #[rhai_fn(name = "contains")] #[inline(always)] pub fn contains_string(s: &mut ImmutableString, find: ImmutableString) -> bool { s.contains(find.as_str()) } #[rhai_fn(name = "index_of")] pub fn index_of_char_starting_from(s: &mut ImmutableString, ch: char, start: INT) -> INT { let start = if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return -1 as INT; } else { s.chars().take(start as usize).collect::<String>().len() }; s[start..] .find(ch) .map(|index| s[0..start + index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")] pub fn index_of_char(s: &mut ImmutableString, ch: char) -> INT { s.find(ch) .map(|index| s[0..index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "index_of")]
#[rhai_fn(name = "index_of")] pub fn index_of_string(s: &mut ImmutableString, find: ImmutableString) -> INT { s.find(find.as_str()) .map(|index| s[0..index].chars().count() as INT) .unwrap_or(-1 as INT) } #[rhai_fn(name = "sub_string")] pub fn sub_string(s: ImmutableString, start: INT, len: INT) -> ImmutableString { let offset = if s.is_empty() || len <= 0 { return "".to_string().into(); } else if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return "".to_string().into(); } else { start as usize }; let chars: StaticVec<_> = s.chars().collect(); let len = if offset + (len as usize) > chars.len() { chars.len() - offset } else { len as usize }; chars .iter() .skip(offset) .take(len) .cloned() .collect::<String>() .into() } #[rhai_fn(name = "sub_string")] #[inline(always)] pub fn sub_string_starting_from(s: ImmutableString, start: INT) -> ImmutableString { let len = s.len() as INT; sub_string(s, start, len) } #[rhai_fn(name = "crop")] pub fn crop_string(s: &mut ImmutableString, start: INT, len: INT) { let offset = if s.is_empty() || len <= 0 { s.make_mut().clear(); return; } else if start < 0 { 0 } else if (start as usize) >= s.chars().count() { s.make_mut().clear(); return; } else { start as usize }; let chars: StaticVec<_> = s.chars().collect(); let len = if offset + (len as usize) > chars.len() { chars.len() - offset } else { len as usize }; let copy = s.make_mut(); copy.clear(); copy.extend(chars.iter().skip(offset).take(len)); } #[rhai_fn(name = "crop")] #[inline(always)] pub fn crop_string_starting_from(s: &mut ImmutableString, start: INT) { crop_string(s, start, s.len() as INT); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_string(s: &mut ImmutableString, find: ImmutableString, sub: ImmutableString) { *s = s.replace(find.as_str(), sub.as_str()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_string_with_char(s: &mut ImmutableString, find: ImmutableString, sub: char) { *s = s.replace(find.as_str(), &sub.to_string()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_char_with_string(s: &mut ImmutableString, find: char, sub: ImmutableString) { *s = s.replace(&find.to_string(), sub.as_str()).into(); } #[rhai_fn(name = "replace")] #[inline(always)] pub fn replace_char(s: &mut ImmutableString, find: char, sub: char) { *s = s.replace(&find.to_string(), &sub.to_string()).into(); } #[cfg(not(feature = "no_index"))] pub mod arrays { use crate::engine::Array; #[rhai_fn(name = "+")] #[inline] pub fn append(x: &mut ImmutableString, y: Array) -> String { format!("{}{:?}", x, y) } #[rhai_fn(name = "+")] #[inline] pub fn prepend(x: &mut Array, y: ImmutableString) -> String { format!("{:?}{}", x, y) } } }
pub fn index_of_string_starting_from( s: &mut ImmutableString, find: ImmutableString, start: INT, ) -> INT { let start = if start < 0 { 0 } else if (start as usize) >= s.chars().count() { return -1 as INT; } else { s.chars().take(start as usize).collect::<String>().len() }; s[start..] .find(find.as_str()) .map(|index| s[0..start + index].chars().count() as INT) .unwrap_or(-1 as INT) }
function_block-full_function
[ { "content": "#[export_fn]\n\npub fn test_fn(input: &mut Clonable) -> &mut bool {\n\n &mut input.d\n\n}\n\n\n", "file_path": "codegen/ui_tests/return_mut_ref.rs", "rank": 0, "score": 353556.9496434691 }, { "content": "#[export_fn]\n\npub fn add_together(x: INT, y: INT) -> INT {\n\n x + y\n\n}\n\n\n", "file_path": "tests/macro_register.rs", "rank": 1, "score": 322877.21621564624 }, { "content": "fn reg_step<T>(lib: &mut Module)\n\nwhere\n\n for<'a> &'a T: Add<&'a T, Output = T>,\n\n T: Variant + Clone + PartialOrd,\n\n StepRange<T>: Iterator<Item = T>,\n\n{\n\n lib.set_iter(TypeId::of::<StepRange<T>>(), |source| {\n\n Box::new(source.cast::<StepRange<T>>().map(|x| x.into_dynamic()))\n\n as Box<dyn Iterator<Item = Dynamic>>\n\n });\n\n}\n\n\n", "file_path": "src/packages/iter_basic.rs", "rank": 2, "score": 308594.27675628074 }, { "content": "#[rhai_fn(\"wheeeee\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_junk_arg.rs", "rank": 3, "score": 307023.9836132859 }, { "content": "pub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_mod_junk_arg.rs", "rank": 4, "score": 307018.567089524 }, { "content": "#[rhai_fn(return_raw)]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_raw_return.rs", "rank": 5, "score": 307015.8267103115 }, { "content": "pub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_mod_return_raw.rs", "rank": 6, "score": 307004.8522916564 }, { "content": "// Register range function\n\nfn reg_range<T: Variant + Clone>(lib: &mut Module)\n\nwhere\n\n Range<T>: Iterator<Item = T>,\n\n{\n\n lib.set_iter(TypeId::of::<Range<T>>(), |source| {\n\n Box::new(source.cast::<Range<T>>().map(|x| x.into_dynamic()))\n\n as Box<dyn Iterator<Item = Dynamic>>\n\n });\n\n}\n\n\n", "file_path": "src/packages/iter_basic.rs", "rank": 7, "score": 292116.30471212143 }, { "content": "#[export_fn]\n\npub fn test_fn(a: u32, b: NonClonable) -> bool {\n\n a == 0 && b.d\n\n}\n\n\n", "file_path": "codegen/ui_tests/non_clonable_second.rs", "rank": 8, "score": 288150.5893049446 }, { "content": "#[export_fn]\n\n#[inline(always)]\n\nfn debug_fn_ptr(f: &mut FnPtr) -> ImmutableString {\n\n to_string(f)\n\n}\n", "file_path": "src/packages/string_basic.rs", "rank": 9, "score": 286876.7200716092 }, { "content": "pub fn is_valid_identifier(name: impl Iterator<Item = char>) -> bool {\n\n let mut first_alphabetic = false;\n\n\n\n for ch in name {\n\n match ch {\n\n '_' => (),\n\n _ if is_id_first_alphabetic(ch) => first_alphabetic = true,\n\n _ if !first_alphabetic => return false,\n\n _ if char::is_ascii_alphanumeric(&ch) => (),\n\n _ => return false,\n\n }\n\n }\n\n\n\n first_alphabetic\n\n}\n\n\n", "file_path": "src/token.rs", "rank": 10, "score": 278188.77372809406 }, { "content": "#[cfg(not(feature = \"unicode-xid-ident\"))]\n\n#[inline(always)]\n\nfn is_id_continue(x: char) -> bool {\n\n x.is_ascii_alphanumeric() || x == '_'\n\n}\n\n\n\n/// A type that implements the `InputStream` trait.\n\n/// Multiple character streams are jointed together to form one single stream.\n\npub struct MultiInputsStream<'a> {\n\n /// The input character streams.\n\n streams: StaticVec<Peekable<Chars<'a>>>,\n\n /// The current stream index.\n\n index: usize,\n\n}\n\n\n\nimpl InputStream for MultiInputsStream<'_> {\n\n /// Get the next character\n\n fn get_next(&mut self) -> Option<char> {\n\n loop {\n\n if self.index >= self.streams.len() {\n\n // No more streams\n\n return None;\n", "file_path": "src/token.rs", "rank": 11, "score": 272133.67816693225 }, { "content": "#[cfg(not(feature = \"no_object\"))]\n\n#[inline(always)]\n\npub fn make_getter(id: &str) -> String {\n\n format!(\"{}{}\", FN_GET, id)\n\n}\n\n\n\n/// Make setter function\n", "file_path": "src/engine.rs", "rank": 12, "score": 271204.10229585227 }, { "content": "#[cfg(not(feature = \"no_object\"))]\n\n#[inline(always)]\n\npub fn make_setter(id: &str) -> String {\n\n format!(\"{}{}\", FN_SET, id)\n\n}\n\n\n", "file_path": "src/engine.rs", "rank": 13, "score": 271204.10229585227 }, { "content": "#[export_fn(\"wheeeee\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_junk_arg.rs", "rank": 14, "score": 270916.09275127563 }, { "content": "#[rhai_fn(\"wheeeee\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_fn_junk_arg.rs", "rank": 15, "score": 270916.09275127563 }, { "content": "#[export_fn(return_raw)]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_raw_return.rs", "rank": 16, "score": 270907.9358483014 }, { "content": "#[inline(always)]\n\nfn to_string<T: Display>(x: &mut T) -> ImmutableString {\n\n x.to_string().into()\n\n}\n", "file_path": "src/packages/string_basic.rs", "rank": 17, "score": 270555.8541342007 }, { "content": "#[cfg(not(feature = \"unicode-xid-ident\"))]\n\n#[inline(always)]\n\nfn is_id_first_alphabetic(x: char) -> bool {\n\n x.is_ascii_alphabetic()\n\n}\n\n\n", "file_path": "src/token.rs", "rank": 18, "score": 269068.9307726931 }, { "content": "#[cfg(not(feature = \"foo\"))]\n\n#[rhai_fn]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_cfg.rs", "rank": 19, "score": 268305.7771512418 }, { "content": "#[rhai_fn(return_raw = \"yes\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_extra_value.rs", "rank": 20, "score": 265243.1322145045 }, { "content": "#[rhai_fn(name)]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_missing_value.rs", "rank": 21, "score": 265237.65353921225 }, { "content": "#[rhai_fn(name = true)]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_bad_value.rs", "rank": 22, "score": 265237.6141553654 }, { "content": "#[rhai_fn(unknown = \"thing\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_bad_attr.rs", "rank": 23, "score": 265237.6141553654 }, { "content": "#[rhai_fn(rhai::name = \"thing\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_path_attr.rs", "rank": 24, "score": 265237.5753401085 }, { "content": "pub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_mod_path_attr.rs", "rank": 25, "score": 265232.2370154504 }, { "content": "pub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_mod_missing_value.rs", "rank": 26, "score": 265232.2370154504 }, { "content": "pub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_mod_bad_attr.rs", "rank": 27, "score": 265232.2370154504 }, { "content": "pub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_mod_bad_value.rs", "rank": 28, "score": 265232.2370154504 }, { "content": "#[inline]\n\nfn to_debug<T: Debug>(x: &mut T) -> ImmutableString {\n\n format!(\"{:?}\", x).into()\n\n}\n\n#[cfg(not(feature = \"no_object\"))]\n\nmod format_map {\n\n use super::*;\n\n #[inline]\n\n #[export_fn]\n\n pub fn format_map(x: &mut Map) -> ImmutableString {\n\n format!(\"#{:?}\", x).into()\n\n }\n\n}\n", "file_path": "src/packages/string_basic.rs", "rank": 29, "score": 265037.67858254554 }, { "content": "#[rhai_fn(return_raw)]\n\npub fn test_fn(input: &mut Point) {\n\n input.x += 1.0;\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_mod_raw_noreturn.rs", "rank": 30, "score": 265010.53598208964 }, { "content": "/// Trim whitespace from a string. The original string argument is changed.\n\n///\n\n/// This version uses `&mut ImmutableString`\n\nfn trim_string(s: &mut ImmutableString) {\n\n *s = s.trim().into();\n\n}\n\n\n", "file_path": "examples/strings.rs", "rank": 31, "score": 261489.0060727892 }, { "content": "#[proc_macro_attribute]\n\npub fn export_fn(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let mut output = proc_macro2::TokenStream::from(input.clone());\n\n\n\n let parsed_params = match crate::attrs::outer_item_attributes(args.into(), \"export_fn\") {\n\n Ok(args) => args,\n\n Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),\n\n };\n\n let mut function_def = parse_macro_input!(input as function::ExportedFn);\n\n if let Err(e) = function_def.set_params(parsed_params) {\n\n return e.to_compile_error().into();\n\n }\n\n\n\n output.extend(function_def.generate());\n\n proc_macro::TokenStream::from(output)\n\n}\n\n\n", "file_path": "codegen/src/lib.rs", "rank": 32, "score": 257196.53520195116 }, { "content": "#[proc_macro_attribute]\n\npub fn export_module(\n\n _args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let module_def = parse_macro_input!(input as module::Module);\n\n let tokens = module_def.generate();\n\n proc_macro::TokenStream::from(tokens)\n\n}\n\n\n", "file_path": "codegen/src/lib.rs", "rank": 33, "score": 255072.51902789966 }, { "content": "#[inline(always)]\n\npub fn by_ref<T: Variant + Clone>(data: &mut Dynamic) -> DynamicWriteLock<T> {\n\n // Directly cast the &mut Dynamic into DynamicWriteLock to access the underlying data.\n\n data.write_lock::<T>().unwrap()\n\n}\n\n\n\n/// Dereference into value.\n", "file_path": "src/fn_register.rs", "rank": 34, "score": 254394.88218230542 }, { "content": "#[inline(always)]\n\npub fn by_value<T: Variant + Clone>(data: &mut Dynamic) -> T {\n\n if TypeId::of::<T>() == TypeId::of::<&str>() {\n\n // If T is &str, data must be ImmutableString, so map directly to it\n\n let ref_str = data.as_str().unwrap();\n\n let ref_T = unsafe { mem::transmute::<_, &T>(&ref_str) };\n\n ref_T.clone()\n\n } else if TypeId::of::<T>() == TypeId::of::<String>() {\n\n // If T is String, data must be ImmutableString, so map directly to it\n\n *unsafe_cast_box(Box::new(data.clone().take_string().unwrap())).unwrap()\n\n } else {\n\n // We consume the argument and then replace it with () - the argument is not supposed to be used again.\n\n // This way, we avoid having to clone the argument again, because it is already a clone when passed here.\n\n mem::take(data).cast::<T>()\n\n }\n\n}\n\n\n\nimpl<PL: Plugin> RegisterPlugin<PL> for Engine {\n\n fn register_plugin(&mut self, plugin: PL) {\n\n plugin.register_contents(self);\n\n }\n", "file_path": "src/fn_register.rs", "rank": 35, "score": 253798.29322836938 }, { "content": "/// Search for a module within an imports stack.\n\n/// Position in `EvalAltResult` is `None` and must be set afterwards.\n\npub fn search_imports_mut<'s>(\n\n mods: &'s mut Imports,\n\n state: &mut State,\n\n modules: &Box<ModuleRef>,\n\n) -> Result<&'s mut Module, Box<EvalAltResult>> {\n\n let (root, root_pos) = &modules[0];\n\n\n\n // Qualified - check if the root module is directly indexed\n\n let index = if state.always_search {\n\n None\n\n } else {\n\n modules.index()\n\n };\n\n\n\n Ok(if let Some(index) = index {\n\n let offset = mods.len() - index.get();\n\n &mut mods.get_mut(offset).unwrap().1\n\n } else {\n\n mods.iter_mut()\n\n .rev()\n\n .find(|(n, _)| n == root)\n\n .map(|(_, m)| m)\n\n .ok_or_else(|| EvalAltResult::ErrorModuleNotFound(root.to_string(), *root_pos))?\n\n })\n\n}\n\n\n", "file_path": "src/engine.rs", "rank": 36, "score": 251690.42446284485 }, { "content": "/// Test if the given character is an octal character.\n\nfn is_octal_char(c: char) -> bool {\n\n match c {\n\n '0'..='7' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/token.rs", "rank": 37, "score": 247753.76604631834 }, { "content": "/// Test if the given character is a hex character.\n\nfn is_hex_char(c: char) -> bool {\n\n match c {\n\n 'a'..='f' => true,\n\n 'A'..='F' => true,\n\n '0'..='9' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/token.rs", "rank": 38, "score": 247753.76604631834 }, { "content": "/// Test if the given character is a binary character.\n\nfn is_binary_char(c: char) -> bool {\n\n match c {\n\n '0' | '1' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/token.rs", "rank": 39, "score": 247753.76604631834 }, { "content": "/// This version uses `ImmutableString` and `&str`.\n\nfn find_substring(s: ImmutableString, sub: &str) -> INT {\n\n s.as_str().find(sub).map(|x| x as INT).unwrap_or(-1)\n\n}\n\n\n", "file_path": "examples/strings.rs", "rank": 40, "score": 244666.07788172973 }, { "content": "#[export_fn]\n\npub fn test_fn(input: Clonable, factor: &bool) -> bool {\n\n input.d & factor\n\n}\n\n\n", "file_path": "codegen/ui_tests/second_shared_ref.rs", "rank": 41, "score": 235926.23036565603 }, { "content": "#[cfg(not(feature = \"foo\"))]\n\n#[export_fn]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_cfg.rs", "rank": 42, "score": 231270.293073075 }, { "content": "#[bench]\n\nfn bench_eval_loop_strings_build(bench: &mut Bencher) {\n\n let script = r#\"\n\n let s = 0;\n\n for x in range(0, 10000) {\n\n s += \"x\";\n\n }\n\n \"#;\n\n\n\n let mut engine = Engine::new();\n\n engine.set_optimization_level(OptimizationLevel::None);\n\n\n\n let ast = engine.compile(script).unwrap();\n\n\n\n bench.iter(|| engine.consume_ast(&ast).unwrap());\n\n}\n\n\n", "file_path": "benches/eval_expression.rs", "rank": 43, "score": 230726.3248965528 }, { "content": "#[bench]\n\nfn bench_eval_loop_strings_no_build(bench: &mut Bencher) {\n\n let script = r#\"\n\n let s = \"hello\";\n\n for x in range(0, 10000) {\n\n s += \"\";\n\n }\n\n \"#;\n\n\n\n let mut engine = Engine::new();\n\n engine.set_optimization_level(OptimizationLevel::None);\n\n\n\n let ast = engine.compile(script).unwrap();\n\n\n\n bench.iter(|| engine.consume_ast(&ast).unwrap());\n\n}\n", "file_path": "benches/eval_expression.rs", "rank": 44, "score": 230726.3248965528 }, { "content": "/// Consume a `Shared` resource and return a mutable reference to the wrapped value.\n\n/// If the resource is shared (i.e. has other outstanding references), a cloned copy is used.\n\npub fn shared_make_mut<T: Clone>(value: &mut Shared<T>) -> &mut T {\n\n #[cfg(not(feature = \"sync\"))]\n\n return Rc::make_mut(value);\n\n #[cfg(feature = \"sync\")]\n\n return Arc::make_mut(value);\n\n}\n\n\n", "file_path": "src/fn_native.rs", "rank": 45, "score": 230410.79943597253 }, { "content": "#[rhai_fn(return_raw = \"yes\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_fn_extra_value.rs", "rank": 46, "score": 229135.2413524943 }, { "content": "#[export_fn(return_raw = \"yes\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_extra_value.rs", "rank": 47, "score": 229135.2413524943 }, { "content": "#[export_fn(name)]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_missing_value.rs", "rank": 48, "score": 229129.762677202 }, { "content": "#[rhai_fn(name)]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_fn_missing_value.rs", "rank": 49, "score": 229129.762677202 }, { "content": "#[export_fn(unknown = \"thing\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_bad_attr.rs", "rank": 50, "score": 229129.7232933551 }, { "content": "#[rhai_fn(name = true)]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_fn_bad_value.rs", "rank": 51, "score": 229129.72329335508 }, { "content": "#[rhai_fn(unknown = \"thing\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_fn_bad_attr.rs", "rank": 52, "score": 229129.7232933551 }, { "content": "#[export_fn(name = true)]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_bad_value.rs", "rank": 53, "score": 229129.72329335508 }, { "content": "#[rhai_fn(rhai::name = \"thing\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n}\n\n\n", "file_path": "codegen/ui_tests/rhai_fn_path_attr.rs", "rank": 54, "score": 229129.68447809826 }, { "content": "#[export_fn(rhai::name = \"thing\")]\n\npub fn test_fn(input: Point) -> bool {\n\n input.x > input.y\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_path_attr.rs", "rank": 55, "score": 229129.68447809826 }, { "content": "#[export_fn(return_raw)]\n\npub fn test_fn(input: &mut Point) {\n\n input.x += 1.0;\n\n}\n\n\n", "file_path": "codegen/ui_tests/export_fn_raw_noreturn.rs", "rank": 56, "score": 228902.64512007945 }, { "content": "#[export_fn]\n\npub fn test_fn(input: NonClonable) -> bool {\n\n input.d\n\n}\n\n\n", "file_path": "codegen/ui_tests/non_clonable.rs", "rank": 57, "score": 225846.86588931194 }, { "content": "#[inline(always)]\n\npub fn is_keyword_function(name: &str) -> bool {\n\n match name {\n\n #[cfg(not(feature = \"no_closure\"))]\n\n KEYWORD_IS_SHARED => true,\n\n KEYWORD_PRINT | KEYWORD_DEBUG | KEYWORD_TYPE_OF | KEYWORD_EVAL | KEYWORD_FN_PTR\n\n | KEYWORD_FN_PTR_CALL | KEYWORD_FN_PTR_CURRY => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n/// Can this keyword be overridden as a function?\n", "file_path": "src/token.rs", "rank": 58, "score": 224930.19389752633 }, { "content": "#[export_fn]\n\npub fn test_fn(input: &NonClonable) -> bool {\n\n input.d\n\n}\n\n\n", "file_path": "codegen/ui_tests/first_shared_ref.rs", "rank": 59, "score": 223587.21969855984 }, { "content": "pub fn make_getter(id: &str) -> String {\n\n format!(\"get${}\", id)\n\n}\n", "file_path": "codegen/src/function.rs", "rank": 60, "score": 222558.20299595117 }, { "content": "pub fn make_setter(id: &str) -> String {\n\n format!(\"set${}\", id)\n\n}\n\n\n\nimpl Parse for ExportedFnParams {\n\n fn parse(args: ParseStream) -> syn::Result<Self> {\n\n if args.is_empty() {\n\n return Ok(ExportedFnParams::default());\n\n }\n\n\n\n let info = crate::attrs::parse_attr_items(args)?;\n\n Self::from_info(info)\n\n }\n\n}\n\n\n\nimpl ExportedParams for ExportedFnParams {\n\n fn parse_stream(args: ParseStream) -> syn::Result<Self> {\n\n Self::parse(args)\n\n }\n\n\n", "file_path": "codegen/src/function.rs", "rank": 61, "score": 222558.20299595117 }, { "content": "#[cfg(not(feature = \"no_function\"))]\n\n#[inline(always)]\n\npub fn can_override_keyword(name: &str) -> bool {\n\n match name {\n\n KEYWORD_PRINT | KEYWORD_DEBUG | KEYWORD_TYPE_OF | KEYWORD_EVAL | KEYWORD_FN_PTR => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/token.rs", "rank": 62, "score": 222120.7495542582 }, { "content": "pub fn add_generic<T: std::ops::Add<Output = T>>(x: T, y: T) -> T {\n\n x + y\n\n}\n\n\n", "file_path": "tests/macro_unroll.rs", "rank": 63, "score": 219516.5743556936 }, { "content": "#[proc_macro]\n\npub fn register_exported_fn(args: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let (engine_expr, export_name, rust_modpath) = match crate::register::parse_register_macro(args)\n\n {\n\n Ok(triple) => triple,\n\n Err(e) => return e.to_compile_error().into(),\n\n };\n\n let gen_mod_path = crate::register::generated_module_path(&rust_modpath);\n\n let tokens = quote! {\n\n #engine_expr.register_result_fn(&(#export_name), #gen_mod_path::dynamic_result_fn);\n\n };\n\n proc_macro::TokenStream::from(tokens)\n\n}\n\n\n", "file_path": "codegen/src/lib.rs", "rank": 64, "score": 218864.05286320852 }, { "content": "#[proc_macro]\n\npub fn set_exported_fn(args: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let (module_expr, export_name, rust_modpath) = match crate::register::parse_register_macro(args)\n\n {\n\n Ok(triple) => triple,\n\n Err(e) => return e.to_compile_error().into(),\n\n };\n\n let gen_mod_path = crate::register::generated_module_path(&rust_modpath);\n\n let tokens = quote! {\n\n #module_expr.set_fn(#export_name, FnAccess::Public,\n\n #gen_mod_path::token_input_types().as_ref(),\n\n #gen_mod_path::token_callable());\n\n };\n\n proc_macro::TokenStream::from(tokens)\n\n}\n", "file_path": "codegen/src/lib.rs", "rank": 65, "score": 218864.05286320852 }, { "content": "#[proc_macro]\n\npub fn exported_module(module_path: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let module_path = parse_macro_input!(module_path as syn::Path);\n\n let tokens = quote::quote! {\n\n #module_path::rhai_module_generate()\n\n };\n\n proc_macro::TokenStream::from(tokens)\n\n}\n\n\n", "file_path": "codegen/src/lib.rs", "rank": 66, "score": 218690.4490568717 }, { "content": "/// Consume the next character.\n\nfn eat_next(stream: &mut impl InputStream, pos: &mut Position) -> Option<char> {\n\n pos.advance();\n\n stream.get_next()\n\n}\n\n\n", "file_path": "src/token.rs", "rank": 67, "score": 215795.2082926294 }, { "content": "#[bench]\n\nfn bench_engine_register_fn(bench: &mut Bencher) {\n\n fn hello(_a: INT, _b: Array, _c: Map) -> bool {\n\n true\n\n }\n\n\n\n bench.iter(|| {\n\n let mut engine = Engine::new_raw();\n\n engine.register_fn(\"hello\", hello);\n\n });\n\n}\n", "file_path": "benches/engine.rs", "rank": 68, "score": 214708.50046202503 }, { "content": "/// Calculate a `u64` hash key from a module-qualified function name and parameter types.\n\n///\n\n/// Module names are passed in via `&str` references from an iterator.\n\n/// Parameter types are passed in via `TypeId` values from an iterator.\n\n///\n\n/// # Note\n\n///\n\n/// The first module name is skipped. Hashing starts from the _second_ module in the chain.\n\npub fn calc_fn_spec<'a>(\n\n modules: impl Iterator<Item = &'a str>,\n\n fn_name: &str,\n\n num: usize,\n\n params: impl Iterator<Item = TypeId>,\n\n) -> u64 {\n\n #[cfg(feature = \"no_std\")]\n\n let mut s: AHasher = Default::default();\n\n #[cfg(not(feature = \"no_std\"))]\n\n let mut s = DefaultHasher::new();\n\n\n\n // We always skip the first module\n\n modules.skip(1).for_each(|m| m.hash(&mut s));\n\n s.write(fn_name.as_bytes());\n\n s.write_usize(num);\n\n params.for_each(|t| t.hash(&mut s));\n\n s.finish()\n\n}\n\n\n\n/// [INTERNALS] Alias to [`smallvec::SmallVec<[T; 4]>`](https://crates.io/crates/smallvec),\n", "file_path": "src/utils.rs", "rank": 69, "score": 214345.83562515373 }, { "content": "#[export_fn]\n\n#[inline(always)]\n\nfn not(x: bool) -> bool {\n\n !x\n\n}\n\n\n\n#[cfg(not(feature = \"only_i32\"))]\n\n#[cfg(not(feature = \"only_i64\"))]\n\ngen_cmp_functions!(numbers => i8, u8, i16, u16, i32, u32, u64);\n\n\n\n#[cfg(not(feature = \"only_i32\"))]\n\n#[cfg(not(feature = \"only_i64\"))]\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\ngen_cmp_functions!(num_128 => i128, u128);\n\n\n\n#[cfg(not(feature = \"no_float\"))]\n\ngen_cmp_functions!(float => f32);\n", "file_path": "src/packages/logic.rs", "rank": 70, "score": 210765.53619542005 }, { "content": "#[inline(always)]\n\npub fn make_err(msg: String) -> Box<EvalAltResult> {\n\n EvalAltResult::ErrorArithmetic(msg, Position::none()).into()\n\n}\n\n\n\nmacro_rules! gen_arithmetic_functions {\n\n ($root:ident => $($arg_type:ident),+) => {\n\n pub mod $root { $(pub mod $arg_type {\n\n use super::super::*;\n\n\n\n #[export_module]\n\n pub mod functions {\n\n #[rhai_fn(name = \"+\", return_raw)]\n\n #[inline]\n\n pub fn add(x: $arg_type, y: $arg_type) -> Result<Dynamic, Box<EvalAltResult>> {\n\n if cfg!(not(feature = \"unchecked\")) {\n\n x.checked_add(y).ok_or_else(|| make_err(format!(\"Addition overflow: {} + {}\", x, y))).map(Dynamic::from)\n\n } else {\n\n Ok(Dynamic::from(x + y))\n\n }\n\n }\n", "file_path": "src/packages/arithmetic.rs", "rank": 71, "score": 210643.73815110346 }, { "content": "#[export_fn]\n\n#[inline(always)]\n\nfn print_unit(_x: ()) -> ImmutableString {\n\n \"\".to_string().into()\n\n}\n", "file_path": "src/packages/string_basic.rs", "rank": 72, "score": 210640.4214871568 }, { "content": "/// Notice this is different from the built-in Rhai 'len' function for strings\n\n/// which counts the actual number of Unicode _characters_ in a string.\n\n/// This version simply counts the number of _bytes_ in the UTF-8 representation.\n\n///\n\n/// This version uses `&str`.\n\nfn count_string_bytes(s: &str) -> INT {\n\n s.len() as INT\n\n}\n\n\n", "file_path": "examples/strings.rs", "rank": 73, "score": 210242.4209112227 }, { "content": "#[inline(always)]\n\npub fn unsafe_cast_box<X: Variant, T: Variant>(item: Box<X>) -> Result<Box<T>, Box<X>> {\n\n // Only allow casting to the exact same type\n\n if TypeId::of::<X>() == TypeId::of::<T>() {\n\n // SAFETY: just checked whether we are pointing to the correct type\n\n unsafe {\n\n let raw: *mut dyn Any = Box::into_raw(item as Box<dyn Any>);\n\n Ok(Box::from_raw(raw as *mut T))\n\n }\n\n } else {\n\n // Return the consumed item for chaining.\n\n Err(item)\n\n }\n\n}\n\n\n\n/// # DANGEROUS!!!\n\n///\n\n/// A dangerous function that blindly casts a `&str` from one lifetime to a `Cow<str>` of\n\n/// another lifetime. This is mainly used to let us push a block-local variable into the\n\n/// current `Scope` without cloning the variable name. Doing this is safe because all local\n\n/// variables in the `Scope` are cleared out before existing the block.\n\n///\n\n/// Force-casting a local variable's lifetime to the current `Scope`'s larger lifetime saves\n\n/// on allocations and string cloning, thus avoids us having to maintain a chain of `Scope`'s.\n", "file_path": "src/unsafe.rs", "rank": 74, "score": 210082.6970589334 }, { "content": "/// Trait to register fallible custom functions returning `Result<Dynamic, Box<EvalAltResult>>` with the `Engine`.\n\npub trait RegisterResultFn<FN, ARGS> {\n\n /// Register a custom fallible function with the `Engine`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use rhai::{Engine, Dynamic, RegisterResultFn, EvalAltResult};\n\n ///\n\n /// // Normal function\n\n /// fn div(x: i64, y: i64) -> Result<Dynamic, Box<EvalAltResult>> {\n\n /// if y == 0 {\n\n /// // '.into()' automatically converts to 'Box<EvalAltResult::ErrorRuntime>'\n\n /// Err(\"division by zero!\".into())\n\n /// } else {\n\n /// Ok((x / y).into())\n\n /// }\n\n /// }\n\n ///\n\n /// let mut engine = Engine::new();\n\n ///\n", "file_path": "src/fn_register.rs", "rank": 75, "score": 208053.4519941413 }, { "content": "/// [INTERNALS] Parse a string literal wrapped by `enclosing_char`.\n\n/// Exported under the `internals` feature only.\n\n///\n\n/// ## WARNING\n\n///\n\n/// This type is volatile and may change.\n\npub fn parse_string_literal(\n\n stream: &mut impl InputStream,\n\n state: &mut TokenizeState,\n\n pos: &mut Position,\n\n enclosing_char: char,\n\n) -> Result<String, (LexError, Position)> {\n\n let mut result: StaticVec<char> = Default::default();\n\n let mut escape: StaticVec<char> = Default::default();\n\n\n\n loop {\n\n let next_char = stream.get_next().ok_or((LERR::UnterminatedString, *pos))?;\n\n\n\n pos.advance();\n\n\n\n if state.max_string_size > 0 && result.len() > state.max_string_size {\n\n return Err((LexError::StringTooLong(state.max_string_size), *pos));\n\n }\n\n\n\n match next_char {\n\n // \\...\n", "file_path": "src/token.rs", "rank": 76, "score": 207383.9904829678 }, { "content": "/// Search for a module within an imports stack.\n\n/// Position in `EvalAltResult` is `None` and must be set afterwards.\n\npub fn search_imports<'s>(\n\n mods: &'s Imports,\n\n state: &mut State,\n\n modules: &Box<ModuleRef>,\n\n) -> Result<&'s Module, Box<EvalAltResult>> {\n\n let (root, root_pos) = &modules[0];\n\n\n\n // Qualified - check if the root module is directly indexed\n\n let index = if state.always_search {\n\n None\n\n } else {\n\n modules.index()\n\n };\n\n\n\n Ok(if let Some(index) = index {\n\n let offset = mods.len() - index.get();\n\n &mods.get(offset).unwrap().1\n\n } else {\n\n mods.iter()\n\n .rev()\n\n .find(|(n, _)| n == root)\n\n .map(|(_, m)| m)\n\n .ok_or_else(|| EvalAltResult::ErrorModuleNotFound(root.to_string(), *root_pos))?\n\n })\n\n}\n\n\n", "file_path": "src/engine.rs", "rank": 77, "score": 206801.2710822098 }, { "content": "#[bench]\n\nfn bench_engine_new(bench: &mut Bencher) {\n\n bench.iter(|| Engine::new());\n\n}\n\n\n", "file_path": "benches/engine.rs", "rank": 78, "score": 206301.78874402455 }, { "content": "/// Trait to register custom functions with the `Engine`.\n\npub trait RegisterFn<FN, ARGS, RET> {\n\n /// Register a custom function with the `Engine`.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # fn main() -> Result<(), Box<rhai::EvalAltResult>> {\n\n /// use rhai::{Engine, RegisterFn};\n\n ///\n\n /// // Normal function\n\n /// fn add(x: i64, y: i64) -> i64 {\n\n /// x + y\n\n /// }\n\n ///\n\n /// let mut engine = Engine::new();\n\n ///\n\n /// // You must use the trait rhai::RegisterFn to get this method.\n\n /// engine.register_fn(\"add\", add);\n\n ///\n\n /// assert_eq!(engine.eval::<i64>(\"add(40, 2)\")?, 42);\n", "file_path": "src/fn_register.rs", "rank": 79, "score": 205189.41894328687 }, { "content": "#[bench]\n\nfn bench_engine_new_raw(bench: &mut Bencher) {\n\n bench.iter(|| Engine::new_raw());\n\n}\n\n\n", "file_path": "benches/engine.rs", "rank": 80, "score": 203703.67428906396 }, { "content": "/// Search for a variable within the scope or within imports,\n\n/// depending on whether the variable name is qualified.\n\npub fn search_namespace<'s, 'a>(\n\n scope: &'s mut Scope,\n\n mods: &'s mut Imports,\n\n state: &mut State,\n\n this_ptr: &'s mut Option<&mut Dynamic>,\n\n expr: &'a Expr,\n\n) -> Result<(&'s mut Dynamic, &'a str, ScopeEntryType, Position), Box<EvalAltResult>> {\n\n match expr {\n\n Expr::Variable(v) => match v.as_ref() {\n\n // Qualified variable\n\n ((name, pos), Some(modules), hash_var, _) => {\n\n let module = search_imports_mut(mods, state, modules)?;\n\n let target = module\n\n .get_qualified_var_mut(*hash_var)\n\n .map_err(|err| match *err {\n\n EvalAltResult::ErrorVariableNotFound(_, _) => {\n\n EvalAltResult::ErrorVariableNotFound(\n\n format!(\"{}{}\", modules, name),\n\n *pos,\n\n )\n", "file_path": "src/engine.rs", "rank": 81, "score": 203633.34514624762 }, { "content": "/// Search for a variable within the scope\n\npub fn search_scope_only<'s, 'a>(\n\n scope: &'s mut Scope,\n\n state: &mut State,\n\n this_ptr: &'s mut Option<&mut Dynamic>,\n\n expr: &'a Expr,\n\n) -> Result<(&'s mut Dynamic, &'a str, ScopeEntryType, Position), Box<EvalAltResult>> {\n\n let ((name, pos), _, _, index) = match expr {\n\n Expr::Variable(v) => v.as_ref(),\n\n _ => unreachable!(),\n\n };\n\n\n\n // Check if the variable is `this`\n\n if name == KEYWORD_THIS {\n\n if let Some(val) = this_ptr {\n\n return Ok(((*val).into(), KEYWORD_THIS, ScopeEntryType::Normal, *pos));\n\n } else {\n\n return EvalAltResult::ErrorUnboundThis(*pos).into();\n\n }\n\n }\n\n\n", "file_path": "src/engine.rs", "rank": 82, "score": 203633.34514624762 }, { "content": "#[bench]\n\nfn bench_engine_new_raw_core(bench: &mut Bencher) {\n\n use rhai::packages::*;\n\n let package = CorePackage::new();\n\n\n\n bench.iter(|| {\n\n let mut engine = Engine::new_raw();\n\n engine.load_package(package.get());\n\n });\n\n}\n\n\n", "file_path": "benches/engine.rs", "rank": 83, "score": 201199.54499305476 }, { "content": "#[cfg(not(feature = \"no_function\"))]\n\npub fn get_script_function_by_signature<'a>(\n\n module: &'a Module,\n\n name: &str,\n\n params: usize,\n\n pub_only: bool,\n\n) -> Option<&'a ScriptFnDef> {\n\n // Qualifiers (none) + function name + number of arguments.\n\n let hash_script = calc_fn_hash(empty(), name, params, empty());\n\n let func = module.get_fn(hash_script, pub_only)?;\n\n if func.is_script() {\n\n Some(func.get_fn_def())\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n/// [INTERNALS] A type containing all the limits imposed by the `Engine`.\n\n/// Exported under the `internals` feature only.\n\n///\n\n/// ## WARNING\n", "file_path": "src/engine.rs", "rank": 84, "score": 200553.27991163294 }, { "content": "/// Optimize a statement.\n\nfn optimize_stmt(stmt: Stmt, state: &mut State, preserve_result: bool) -> Stmt {\n\n match stmt {\n\n // if expr { Noop }\n\n Stmt::IfThenElse(x) if matches!(x.1, Stmt::Noop(_)) => {\n\n state.set_dirty();\n\n\n\n let pos = x.0.position();\n\n let expr = optimize_expr(x.0, state);\n\n\n\n if preserve_result {\n\n // -> { expr, Noop }\n\n let mut statements = StaticVec::new();\n\n statements.push(Stmt::Expr(Box::new(expr)));\n\n statements.push(x.1);\n\n\n\n Stmt::Block(Box::new((statements, pos)))\n\n } else {\n\n // -> expr\n\n Stmt::Expr(Box::new(expr))\n\n }\n", "file_path": "src/optimize.rs", "rank": 85, "score": 197983.75064415834 }, { "content": "fn pad<T: Variant + Clone>(\n\n _engine: &Engine,\n\n _: &Module,\n\n args: &mut [&mut Dynamic],\n\n) -> Result<(), Box<EvalAltResult>> {\n\n let len = *args[1].read_lock::<INT>().unwrap();\n\n\n\n // Check if array will be over max size limit\n\n #[cfg(not(feature = \"unchecked\"))]\n\n if _engine.limits.max_array_size > 0\n\n && len > 0\n\n && (len as usize) > _engine.limits.max_array_size\n\n {\n\n return EvalAltResult::ErrorDataTooLarge(\n\n \"Size of array\".to_string(),\n\n _engine.limits.max_array_size,\n\n len as usize,\n\n Position::none(),\n\n )\n\n .into();\n", "file_path": "src/packages/array_basic.rs", "rank": 86, "score": 196337.55487512465 }, { "content": "/// Match a particular token, consuming it if matched.\n\nfn match_token(input: &mut TokenStream, token: Token) -> Result<bool, ParseError> {\n\n let (t, _) = input.peek().unwrap();\n\n if *t == token {\n\n eat_token(input, token);\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 87, "score": 195677.78878816142 }, { "content": "/// Trait that represents arguments to a function call.\n\n/// Any data type that can be converted into a `Vec<Dynamic>` can be used\n\n/// as arguments to a function call.\n\npub trait FuncArgs {\n\n /// Convert to a `StaticVec<Dynamic>` of the function call arguments.\n\n fn into_vec(self) -> StaticVec<Dynamic>;\n\n}\n\n\n\n/// Macro to implement `FuncArgs` for tuples of standard types (each can be\n\n/// converted into `Dynamic`).\n\nmacro_rules! impl_args {\n\n ($($p:ident),*) => {\n\n impl<$($p: Variant + Clone),*> FuncArgs for ($($p,)*)\n\n {\n\n fn into_vec(self) -> StaticVec<Dynamic> {\n\n let ($($p,)*) = self;\n\n\n\n let mut _v = StaticVec::new();\n\n $(_v.push($p.into_dynamic());)*\n\n\n\n _v\n\n }\n\n }\n", "file_path": "src/fn_args.rs", "rank": 88, "score": 191055.1907493975 }, { "content": "#[export_fn]\n\npub fn test_fn(input: Clonable) -> *const str {\n\n \"yes\"\n\n}\n\n\n", "file_path": "codegen/ui_tests/return_pointer.rs", "rank": 89, "score": 190278.09372498697 }, { "content": "#[bench]\n\nfn bench_eval_loop_number(bench: &mut Bencher) {\n\n let script = r#\"\n\n let s = 0;\n\n for x in range(0, 10000) {\n\n s += 1;\n\n }\n\n \"#;\n\n\n\n let mut engine = Engine::new();\n\n engine.set_optimization_level(OptimizationLevel::None);\n\n\n\n let ast = engine.compile(script).unwrap();\n\n\n\n bench.iter(|| engine.consume_ast(&ast).unwrap());\n\n}\n\n\n", "file_path": "benches/eval_expression.rs", "rank": 90, "score": 189993.9326001783 }, { "content": "#[export_fn]\n\npub fn test_fn(input: Clonable) -> &'static str {\n\n \"yes\"\n\n}\n\n\n", "file_path": "codegen/ui_tests/return_shared_ref.rs", "rank": 91, "score": 188017.26846087404 }, { "content": "#[bench]\n\nfn bench_eval_expression_number_literal(bench: &mut Bencher) {\n\n let script = \"2 > 1\";\n\n\n\n let mut engine = Engine::new();\n\n engine.set_optimization_level(OptimizationLevel::None);\n\n\n\n let ast = engine.compile_expression(script).unwrap();\n\n\n\n bench.iter(|| engine.consume_ast(&ast).unwrap());\n\n}\n\n\n", "file_path": "benches/eval_expression.rs", "rank": 92, "score": 187400.458566083 }, { "content": "#[bench]\n\nfn bench_eval_expression_number_operators(bench: &mut Bencher) {\n\n let script = \"2 + 2 == 4\";\n\n\n\n let mut engine = Engine::new();\n\n engine.set_optimization_level(OptimizationLevel::None);\n\n\n\n let ast = engine.compile_expression(script).unwrap();\n\n\n\n bench.iter(|| engine.consume_ast(&ast).unwrap());\n\n}\n\n\n", "file_path": "benches/eval_expression.rs", "rank": 93, "score": 187400.458566083 }, { "content": "/// Deserialize a `Dynamic` value into a Rust type that implements `serde::Deserialize`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # fn main() -> Result<(), Box<rhai::EvalAltResult>> {\n\n/// # #[cfg(not(feature = \"no_index\"))]\n\n/// # #[cfg(not(feature = \"no_object\"))]\n\n/// # {\n\n/// use rhai::{Dynamic, Array, Map, INT};\n\n/// use rhai::de::from_dynamic;\n\n/// use serde::Deserialize;\n\n///\n\n/// #[derive(Debug, Deserialize, PartialEq)]\n\n/// struct Hello {\n\n/// a: INT,\n\n/// b: bool,\n\n/// }\n\n///\n\n/// #[derive(Debug, Deserialize, PartialEq)]\n\n/// struct Test {\n\n/// int: u32,\n\n/// seq: Vec<String>,\n\n/// obj: Hello,\n\n/// }\n\n///\n\n/// let mut map = Map::new();\n\n/// map.insert(\"int\".into(), Dynamic::from(42_u32));\n\n///\n\n/// let mut map2 = Map::new();\n\n/// map2.insert(\"a\".into(), (123 as INT).into());\n\n/// map2.insert(\"b\".into(), true.into());\n\n///\n\n/// map.insert(\"obj\".into(), map2.into());\n\n///\n\n/// let arr: Array = vec![\"foo\".into(), \"bar\".into(), \"baz\".into()];\n\n/// map.insert(\"seq\".into(), arr.into());\n\n///\n\n/// let value: Test = from_dynamic(&map.into())?;\n\n///\n\n/// let expected = Test {\n\n/// int: 42,\n\n/// seq: vec![\"foo\".into(), \"bar\".into(), \"baz\".into()],\n\n/// obj: Hello { a: 123, b: true },\n\n/// };\n\n///\n\n/// assert_eq!(value, expected);\n\n/// # }\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn from_dynamic<'de, T: Deserialize<'de>>(\n\n value: &'de Dynamic,\n\n) -> Result<T, Box<EvalAltResult>> {\n\n T::deserialize(&mut DynamicDeserializer::from_dynamic(value))\n\n}\n\n\n\nimpl Error for Box<EvalAltResult> {\n\n fn custom<T: fmt::Display>(err: T) -> Self {\n\n EvalAltResult::ErrorParsing(ParseErrorType::BadInput(err.to_string()), Position::none())\n\n .into()\n\n }\n\n}\n\n\n\nimpl<'de> Deserializer<'de> for &mut DynamicDeserializer<'de> {\n\n type Error = Box<EvalAltResult>;\n\n\n\n fn deserialize_any<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value, Box<EvalAltResult>> {\n\n match &self.value.0 {\n\n Union::Unit(_) => self.deserialize_unit(visitor),\n\n Union::Bool(_) => self.deserialize_bool(visitor),\n", "file_path": "src/serde/de.rs", "rank": 94, "score": 186653.9391298083 }, { "content": "/// Map a `Dynamic` value to an expression.\n\n///\n\n/// Returns Some(expression) if conversion is successful. Otherwise None.\n\npub fn map_dynamic_to_expr(value: Dynamic, pos: Position) -> Option<Expr> {\n\n match value.0 {\n\n #[cfg(not(feature = \"no_float\"))]\n\n Union::Float(value) => Some(Expr::FloatConstant(Box::new(FloatWrapper(value, pos)))),\n\n\n\n Union::Unit(_) => Some(Expr::Unit(pos)),\n\n Union::Int(value) => Some(Expr::IntegerConstant(Box::new((value, pos)))),\n\n Union::Char(value) => Some(Expr::CharConstant(Box::new((value, pos)))),\n\n Union::Str(value) => Some(Expr::StringConstant(Box::new((value.clone(), pos)))),\n\n Union::Bool(true) => Some(Expr::True(pos)),\n\n Union::Bool(false) => Some(Expr::False(pos)),\n\n #[cfg(not(feature = \"no_index\"))]\n\n Union::Array(array) => {\n\n let items: Vec<_> = array\n\n .into_iter()\n\n .map(|x| map_dynamic_to_expr(x, pos))\n\n .collect();\n\n\n\n if items.iter().all(Option::is_some) {\n\n Some(Expr::Array(Box::new((\n", "file_path": "src/parser.rs", "rank": 95, "score": 186341.26085118792 }, { "content": "#[inline(always)]\n\npub fn map_dynamic<T: Variant + Clone>(data: T) -> Result<Dynamic, Box<EvalAltResult>> {\n\n Ok(data.into_dynamic())\n\n}\n\n\n\n/// To Dynamic mapping function.\n", "file_path": "src/fn_register.rs", "rank": 96, "score": 182327.84808759968 }, { "content": "#[export_fn]\n\n#[inline(always)]\n\nfn print_empty_string() -> ImmutableString {\n\n \"\".to_string().into()\n\n}\n", "file_path": "src/packages/string_basic.rs", "rank": 97, "score": 177531.44980536756 } ]
Rust
examples/linked_timer_rtic.rs
akashihi/stm32l0xx-hal
d53ec21dc02348ecc8351f0578ec4eee08a447cf
#![no_main] #![no_std] extern crate panic_halt; use core::fmt::Write; use rtic::app; use stm32l0xx_hal::prelude::*; use stm32l0xx_hal::{ pac, rcc::Config, serial::{self, Serial}, time, timer::{LinkedTimer, LinkedTimerPair, Timer}, }; const LOGGER_FREQUENCY: u32 = 2; #[app(device = stm32l0xx_hal::pac, peripherals = true)] const APP: () = { struct Resources { serial: Serial<pac::USART1>, timer: Timer<pac::TIM6>, linked_tim2_tim3: LinkedTimerPair<pac::TIM2, pac::TIM3>, linked_tim21_tim22: LinkedTimerPair<pac::TIM21, pac::TIM22>, } #[init] fn init(ctx: init::Context) -> init::LateResources { let cp: cortex_m::Peripherals = ctx.core; let dp: pac::Peripherals = ctx.device; let mut rcc = dp.RCC.freeze(Config::hsi16()); let mut delay = cp.SYST.delay(rcc.clocks); let gpiob = dp.GPIOB.split(&mut rcc); let mut serial = Serial::usart1( dp.USART1, gpiob.pb6.into_floating_input(), gpiob.pb7.into_floating_input(), serial::Config::default(), &mut rcc, ) .unwrap(); writeln!(serial, "Starting example").ok(); writeln!(serial, "Init TIM2/TIM3...").ok(); let linked_tim2_tim3 = LinkedTimerPair::tim2_tim3(dp.TIM2, dp.TIM3, &mut rcc); delay.delay_ms(1000u16); writeln!(serial, "Init TIM21/TIM22...").ok(); let linked_tim21_tim22 = LinkedTimerPair::tim21_tim22(dp.TIM21, dp.TIM22, &mut rcc); let mut timer = dp.TIM6.timer(LOGGER_FREQUENCY.hz(), &mut rcc); timer.listen(); init::LateResources { serial, timer, linked_tim2_tim3, linked_tim21_tim22, } } #[task(binds = TIM6, resources = [serial, timer, linked_tim2_tim3, linked_tim21_tim22])] fn logger(ctx: logger::Context) { static mut PREV_TIM2_TIM3: u32 = 0; static mut PREV_TIM21_TIM22: u32 = 0; static mut TIMES_UNTIL_RESET: u32 = 3 * LOGGER_FREQUENCY; ctx.resources.timer.clear_irq(); if *TIMES_UNTIL_RESET > 1 { *TIMES_UNTIL_RESET -= 1; } else if *TIMES_UNTIL_RESET == 1 { writeln!(ctx.resources.serial, "Reset",).ok(); ctx.resources.linked_tim2_tim3.reset(); ctx.resources.linked_tim21_tim22.reset(); *TIMES_UNTIL_RESET -= 1; } print_timer( "TIM2/TIM3 ", ctx.resources.linked_tim2_tim3, ctx.resources.serial, PREV_TIM2_TIM3, ); print_timer( "TIM21/TIM22 ", ctx.resources.linked_tim21_tim22, ctx.resources.serial, PREV_TIM21_TIM22, ); } }; fn print_timer( name: &'static str, timer: &impl LinkedTimer, serial: &mut Serial<pac::USART1>, previous: &mut u32, ) { let cnt = timer.get_counter(); let delta = cnt - *previous; let freq = delta * LOGGER_FREQUENCY / 1000; writeln!( serial, "{} count {:>10} (msb={} lsb={} Δ{} {} kHz)", name, cnt, (cnt & 0xffff0000) >> 16, cnt & 0xffff, delta, freq, ) .ok(); *previous = cnt; }
#![no_main] #![no_std] extern crate panic_halt; use core::fmt::Write; use rtic::app; use stm32l0xx_hal::prelude::*; use stm32l0xx_hal::{ pac, rcc::Config, serial::{self, Serial}, time, timer::{LinkedTimer, LinkedTimerPair, Timer}, }; const LOGGER_FREQUENCY: u32 = 2; #[app(device = stm32l0xx_hal::pac, peripherals = true)] const APP: () = { struct Resources { serial: Serial<pac::USART1>, timer: Timer<pac::TIM6>, linked_tim2_tim3: LinkedTimerPair<pac::TIM2, pac::TIM3>, linked_tim21_tim22: LinkedTimerPair<pac::TIM21, pac::TIM22>, } #[init] fn init(ctx: init::Context) -> init::LateResources { let cp: cortex_m::Peripherals = ctx.core; let dp: pac::Peripherals = ctx.device; let mut rcc = dp.RCC.freeze(Config::hsi16()); let mut delay = cp.SYST.delay(rcc.clocks); let gpiob = dp.GPIOB.split(&mut rcc); let mut serial = Serial::usart1( dp.USART1, gpiob.pb6.into_floating_input(), gpiob.pb7.into_floating_input(), serial::Config::default(), &mut rcc, ) .unwrap(); writeln!(serial, "Starting example").ok(); writeln!(serial, "Init TIM2/TIM3...").ok(); let linked_tim2_tim3 = LinkedTimerPair::tim2_tim3(dp.T
#[task(binds = TIM6, resources = [serial, timer, linked_tim2_tim3, linked_tim21_tim22])] fn logger(ctx: logger::Context) { static mut PREV_TIM2_TIM3: u32 = 0; static mut PREV_TIM21_TIM22: u32 = 0; static mut TIMES_UNTIL_RESET: u32 = 3 * LOGGER_FREQUENCY; ctx.resources.timer.clear_irq(); if *TIMES_UNTIL_RESET > 1 { *TIMES_UNTIL_RESET -= 1; } else if *TIMES_UNTIL_RESET == 1 { writeln!(ctx.resources.serial, "Reset",).ok(); ctx.resources.linked_tim2_tim3.reset(); ctx.resources.linked_tim21_tim22.reset(); *TIMES_UNTIL_RESET -= 1; } print_timer( "TIM2/TIM3 ", ctx.resources.linked_tim2_tim3, ctx.resources.serial, PREV_TIM2_TIM3, ); print_timer( "TIM21/TIM22 ", ctx.resources.linked_tim21_tim22, ctx.resources.serial, PREV_TIM21_TIM22, ); } }; fn print_timer( name: &'static str, timer: &impl LinkedTimer, serial: &mut Serial<pac::USART1>, previous: &mut u32, ) { let cnt = timer.get_counter(); let delta = cnt - *previous; let freq = delta * LOGGER_FREQUENCY / 1000; writeln!( serial, "{} count {:>10} (msb={} lsb={} Δ{} {} kHz)", name, cnt, (cnt & 0xffff0000) >> 16, cnt & 0xffff, delta, freq, ) .ok(); *previous = cnt; }
IM2, dp.TIM3, &mut rcc); delay.delay_ms(1000u16); writeln!(serial, "Init TIM21/TIM22...").ok(); let linked_tim21_tim22 = LinkedTimerPair::tim21_tim22(dp.TIM21, dp.TIM22, &mut rcc); let mut timer = dp.TIM6.timer(LOGGER_FREQUENCY.hz(), &mut rcc); timer.listen(); init::LateResources { serial, timer, linked_tim2_tim3, linked_tim21_tim22, } }
function_block-function_prefixed
[ { "content": "fn delay() {\n\n // We can't use `Delay`, as that requires a frequency of at least one MHz.\n\n // Given our clock selection, the following loop should give us a nice delay\n\n // when compiled in release mode.\n\n for _ in 0..1_000 {\n\n asm::nop()\n\n }\n\n}\n", "file_path": "examples/pwr.rs", "rank": 0, "score": 122466.5420655542 }, { "content": "fn delay() {\n\n // We can't use `Delay`, as that requires a frequency of at least one MHz.\n\n // Given our clock selection, the following loop should give us a nice delay\n\n // when compiled in release mode.\n\n for _ in 0..1_000 {\n\n asm::nop()\n\n }\n\n}\n", "file_path": "examples/lptim.rs", "rank": 1, "score": 122466.5420655542 }, { "content": "fn get_clock_config(freq: u32, clk: u32) -> (u16, u16) {\n\n let ticks = clk / freq;\n\n let psc = u16((ticks - 1) / (1 << 16)).unwrap();\n\n let arr = u16(ticks / u32(psc + 1)).unwrap();\n\n return (psc, arr);\n\n}\n\n\n", "file_path": "src/pwm.rs", "rank": 2, "score": 98048.85114079542 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in\n\n // the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // Configure PA1 as output.\n\n let led = gpioa.pa1.into_push_pull_output();\n\n\n\n // Configure the timer.\n\n let mut timer = dp.TIM2.timer(1.hz(), &mut rcc);\n\n timer.listen();\n\n\n\n // Store the LED and timer in mutex refcells to make them available from the\n\n // timer interrupt.\n\n cortex_m::interrupt::free(|cs| {\n", "file_path": "examples/timer.rs", "rank": 4, "score": 95059.09564227182 }, { "content": "#[interrupt]\n\nfn TIM2() {\n\n // Keep a state to blink the LED.\n\n static mut STATE: bool = false;\n\n\n\n cortex_m::interrupt::free(|cs| {\n\n if let Some(ref mut timer) = TIMER.borrow(cs).borrow_mut().deref_mut() {\n\n // Clear the interrupt flag.\n\n timer.clear_irq();\n\n\n\n // Change the LED state on each interrupt.\n\n if let Some(ref mut led) = LED.borrow(cs).borrow_mut().deref_mut() {\n\n if *STATE {\n\n led.set_low().unwrap();\n\n *STATE = false;\n\n } else {\n\n led.set_high().unwrap();\n\n *STATE = true;\n\n }\n\n }\n\n }\n\n });\n\n}\n", "file_path": "examples/timer.rs", "rank": 5, "score": 95059.09564227182 }, { "content": "#[derive(Copy, Clone)]\n\nstruct TimeConf {\n\n psc_encoded: u8,\n\n arr: u16,\n\n}\n\n\n\nimpl TimeConf {\n\n const ARR_MAX: u16 = u16::max_value();\n\n\n\n /// Calculates prescaler and autoreload value for producing overflows at a rate of\n\n /// `output_freq`.\n\n fn calculate_freq(input_freq: Hertz, output_freq: Hertz) -> Self {\n\n // Fi = Frequency of input clock\n\n // Fo = Output frequency (frequency of timer overflows, using ARR)\n\n // psc = prescaler (must be power of two in range 1..=128)\n\n // We know Fi and Fo, and want to know psc and ARR.\n\n //\n\n // The timer works like this:\n\n // Fo = (Fi / psc) / ARR\n\n //\n\n // Therefore:\n", "file_path": "src/lptim.rs", "rank": 6, "score": 94941.09326610313 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in\n\n // the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // Choose TX / RX pins\n\n let tx_pin = gpioa.pa2;\n\n let rx_pin = gpioa.pa3;\n\n\n\n // Configure the serial peripheral.\n\n let serial = dp\n\n .USART2\n\n .usart(tx_pin, rx_pin, serial::Config::default(), &mut rcc)\n\n .unwrap();\n\n\n", "file_path": "examples/serial.rs", "rank": 7, "score": 94748.0896968499 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n let cp = cortex_m::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in\n\n // the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // Configure PA1 as output.\n\n let mut led = gpioa.pa1.into_push_pull_output();\n\n\n\n // Get the delay provider.\n\n let mut delay = cp.SYST.delay(rcc.clocks);\n\n\n\n loop {\n\n led.set_high().unwrap();\n\n delay.delay_ms(500_u16);\n\n\n\n led.set_low().unwrap();\n\n delay.delay_ms(500_u16);\n\n }\n\n}\n", "file_path": "examples/blinky_delay.rs", "rank": 8, "score": 91580.53449735805 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n // Create an encoder instance that counts between 0 and 64 using inputs on PB13 and PB14.\n\n let mut encoder = dp\n\n .TIM21\n\n .encoder((gpiob.pb13, gpiob.pb14), Mode::Qei, 64, &mut rcc);\n\n\n\n loop {\n\n #[allow(unused)]\n\n let Status {\n\n count,\n\n did_overflow,\n\n direction,\n\n } = encoder.status();\n\n\n\n // Use encoder state here\n\n }\n\n}\n", "file_path": "examples/timer_encoder.rs", "rank": 9, "score": 91343.77964636801 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut dma = DMA::new(dp.DMA1, &mut rcc);\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n let mut tx_channel = dma.channels.channel4;\n\n let mut rx_channel = dma.channels.channel5;\n\n\n\n let (mut tx, mut rx) = dp\n\n .USART2\n\n .usart(\n\n gpioa.pa2,\n\n gpioa.pa3,\n\n serial::Config::default().baudrate(115_200.bps()),\n\n &mut rcc,\n\n )\n\n .unwrap()\n\n .split();\n", "file_path": "examples/serial_dma.rs", "rank": 10, "score": 91049.06305211272 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(rcc::Config::hsi16());\n\n let mut syscfg = SYSCFG::new(dp.SYSCFG, &mut rcc);\n\n let hsi48 = rcc.enable_hsi48(&mut syscfg, dp.CRS);\n\n\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n let usb = USB::new(dp.USB, gpioa.pa11, gpioa.pa12, hsi48);\n\n let usb_bus = UsbBus::new(usb);\n\n\n\n let mut serial = SerialPort::new(&usb_bus);\n\n\n\n let mut usb_dev = UsbDeviceBuilder::new(&usb_bus, UsbVidPid(0x16c0, 0x27dd))\n\n .manufacturer(\"Fake company\")\n\n .product(\"Serial port\")\n\n .serial_number(\"TEST\")\n\n .device_class(USB_CLASS_CDC)\n\n .build();\n", "file_path": "examples/usb_serial.rs", "rank": 11, "score": 91049.06305211272 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut dma = DMA::new(dp.DMA1, &mut rcc);\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n let (tx, rx) = dp\n\n .USART2\n\n .usart(\n\n gpioa.pa2,\n\n gpioa.pa3,\n\n serial::Config::default().baudrate(115_200.bps()),\n\n &mut rcc,\n\n )\n\n .unwrap()\n\n .split();\n\n\n\n // we only have two elements for each queue, so U2 is fine (size is max 2)\n\n let mut rx_buffers: Queue<Pin<DmaBuffer>, U2> = Queue::new();\n", "file_path": "examples/serial_dma_async.rs", "rank": 12, "score": 87718.23445127564 }, { "content": "fn blink(led: &mut PB<Output<PushPull>>) {\n\n led.set_high().unwrap();\n\n delay();\n\n led.set_low().unwrap();\n\n delay();\n\n}\n\n\n", "file_path": "examples/pwr.rs", "rank": 13, "score": 85504.3356061802 }, { "content": "fn blink(led: &mut PB<Output<PushPull>>) {\n\n led.set_high().unwrap();\n\n delay();\n\n led.set_low().unwrap();\n\n delay();\n\n}\n\n\n", "file_path": "examples/lptim.rs", "rank": 14, "score": 85504.3356061802 }, { "content": "type DmaBuffer = &'static mut [u8; 1];\n\n\n", "file_path": "examples/serial_dma_async.rs", "rank": 15, "score": 71627.47744671782 }, { "content": "type TxTarget = serial::Tx<serial::USART2>;\n", "file_path": "examples/serial_dma_async.rs", "rank": 16, "score": 68535.4391091025 }, { "content": "type RxTarget = serial::Rx<serial::USART2>;\n", "file_path": "examples/serial_dma_async.rs", "rank": 17, "score": 68535.4391091025 }, { "content": "/// Extension trait that adds convenience methods to the `u32` type\n\npub trait U32Ext {\n\n /// Wrap in `Bps`\n\n fn bps(self) -> Bps;\n\n\n\n /// Wrap in `Hertz`\n\n fn hz(self) -> Hertz;\n\n\n\n /// Wrap in `Hertz`\n\n fn khz(self) -> Hertz;\n\n\n\n /// Wrap in `Hertz`\n\n fn mhz(self) -> Hertz;\n\n\n\n /// Wrap in `MicroSeconds`\n\n fn us(self) -> MicroSeconds;\n\n\n\n /// Wrap in `MicroSeconds`\n\n fn ms(self) -> MicroSeconds;\n\n}\n\n\n", "file_path": "src/time.rs", "rank": 18, "score": 64479.394758279006 }, { "content": "type PacSyscfg = pac::SYSCFG;\n\n\n\npub struct SYSCFG {\n\n pub(crate) syscfg: PacSyscfg,\n\n}\n\n\n\nimpl SYSCFG {\n\n pub fn new(syscfg: PacSyscfg, rcc: &mut Rcc) -> Self {\n\n // Reset SYSCFG peripheral\n\n rcc.rb.apb2rstr.modify(|_, w| w.syscfgrst().set_bit());\n\n rcc.rb.apb2rstr.modify(|_, w| w.syscfgrst().clear_bit());\n\n\n\n // Enable SYSCFG peripheral\n\n rcc.rb.apb2enr.modify(|_, w| w.syscfgen().set_bit());\n\n\n\n SYSCFG { syscfg }\n\n }\n\n}\n", "file_path": "src/syscfg.rs", "rank": 19, "score": 63552.51211559473 }, { "content": "/// Provides access to the buffer that the DMA writes ADC values into\n\n///\n\n/// Since the DMA transfer takes ownership of the buffer, we need to access it\n\n/// with unsafe means. This struct is a safe wrapper around this unsafe access.\n\nstruct Buffer {\n\n ptr: *const u16,\n\n len: u16,\n\n pos: u16,\n\n dma_pos: u16,\n\n\n\n /// Indicates order of read and write indices\n\n ///\n\n /// This is initially `false`, indicating that the read position (the `pos`\n\n /// field) is smaller than or equal to the write position (internally\n\n /// managed by the DMA peripheral).\n\n ///\n\n /// Once the write position wraps around the buffer boundary, this becomes\n\n /// `true` until the read position also wraps around.\n\n r_gt_w: bool,\n\n}\n\n\n\nimpl Buffer {\n\n fn read<T, C, B>(\n\n &mut self,\n", "file_path": "src/adc.rs", "rank": 20, "score": 62911.78316339548 }, { "content": "fn main() {\n\n // Put the linker script somewhere the linker can find it\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n\n\n let mut feature_count = 0;\n\n\n\n if cfg!(feature = \"stm32l0x1\") {\n\n feature_count += 1;\n\n }\n\n\n\n if cfg!(feature = \"stm32l0x2\") {\n\n feature_count += 1;\n\n }\n\n\n\n if cfg!(feature = \"stm32l0x3\") {\n\n feature_count += 1;\n\n }\n\n\n\n if feature_count != 1 {\n\n panic!(\"\\n\\nMust select exactly one package for linker script generation!\\nChoices: 'stm32l0x1' or 'stm32l0x2' or 'stm32l0x3'\\nAlternatively, pick the mcu-feature that matches your MCU, for example 'mcu-STM32L071KBTx'\\n\\n\");\n", "file_path": "build.rs", "rank": 21, "score": 61626.30997907382 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct TransferState {\n\n pos: u16,\n\n half: bool,\n\n complete: bool,\n\n}\n\n\n\n/// Iterator over buffered ADC values\n\npub struct ReadAvailable<'r, T, C, B> {\n\n buffer: &'r mut Buffer,\n\n transfer: &'r dma::Transfer<T, C, B, dma::Started>,\n\n}\n\n\n\nimpl<T, C, B> Iterator for ReadAvailable<'_, T, C, B>\n\nwhere\n\n C: dma::Channel,\n\n{\n\n type Item = Result<u16, Error>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.buffer.read(self.transfer)\n", "file_path": "src/adc.rs", "rank": 22, "score": 61032.57814890001 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut adc = dp.ADC.constrain(&mut rcc);\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n let mut a0 = gpioa.pa0.into_analog();\n\n\n\n let mut blue = gpiob.pb6.into_push_pull_output();\n\n let mut red = gpiob.pb7.into_push_pull_output();\n\n\n\n loop {\n\n let val: u16 = adc.read(&mut a0).unwrap();\n\n\n\n if val > 2000 {\n\n blue.set_high().unwrap();\n\n red.set_low().unwrap();\n\n } else {\n\n red.set_high().unwrap();\n\n blue.set_low().unwrap();\n\n }\n\n }\n\n}\n", "file_path": "examples/adc.rs", "rank": 23, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let cp = pac::CorePeripherals::take().unwrap();\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut scb = cp.SCB;\n\n let mut rcc = dp.RCC.freeze(rcc::Config::msi(rcc::MSIRange::Range0));\n\n let mut exti = Exti::new(dp.EXTI);\n\n let mut pwr = PWR::new(dp.PWR, &mut rcc);\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n let mut led = gpiob.pb2.into_push_pull_output().downgrade();\n\n\n\n let instant = Instant::new()\n\n .set_year(19)\n\n .set_month(9)\n\n .set_day(26)\n\n .set_hour(11)\n\n .set_minute(57)\n\n .set_second(0);\n\n\n", "file_path": "examples/pwr.rs", "rank": 24, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n let cp = cortex_m::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Get the delay provider.\n\n let mut delay = cp.SYST.delay(rcc.clocks);\n\n\n\n // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in\n\n // the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // Initialize TIM2 for PWM\n\n let pwm = pwm::Timer::new(dp.TIM2, 10.khz(), &mut rcc);\n\n\n\n #[cfg(feature = \"stm32l0x1\")]\n\n let mut pwm = pwm.channel2.assign(gpioa.pa1);\n\n\n", "file_path": "examples/pwm.rs", "rank": 25, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in\n\n // the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // Configure PA1 as output.\n\n let mut led = gpioa.pa1.into_push_pull_output();\n\n\n\n loop {\n\n // Set the LED high one million times in a row.\n\n for _ in 0..1_000_000 {\n\n led.set_high().unwrap();\n\n }\n\n\n\n // Set the LED low one million times in a row.\n\n for _ in 0..1_000_000 {\n\n led.set_low().unwrap();\n\n }\n\n }\n\n}\n", "file_path": "examples/blinky.rs", "rank": 26, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the 16MHz internal clock\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // Source MCO from HSI16, configure prescaler to divide by 8 to get 2MHz output.\n\n rcc.configure_mco(MCOSEL_A::HSI16, MCOPRE_A::DIV8, (gpioa.pa8, gpioa.pa9));\n\n\n\n // Individual pins can also be set by passing them directly:\n\n // rcc.enable_mco(MCOSEL_A::HSI16, MCOPRE_A::DIV8, gpioa.pa8);\n\n\n\n // Or for larger devices, all 3 MCO pins can be configured:\n\n // rcc.configure_mco(MCOSEL_A::HSI16, MCOPRE_A::DIV8, (gpioa.pa8, gpioa.pa9, gpiob.pb13));\n\n\n\n // Probe PA8 or PA9 to see generated 2MHz MCO signal.\n\n loop {}\n\n}\n", "file_path": "examples/mco.rs", "rank": 27, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n let cp = cortex_m::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Configure a delay to feed the watchdog.\n\n let mut delay = cp.SYST.delay(rcc.clocks);\n\n\n\n // Configure the independent watchdog.\n\n let mut watchdog = dp.IWDG.watchdog();\n\n\n\n // Start a watchdog with a 100ms period.\n\n watchdog.start(100.ms());\n\n\n\n let mut counter = 50;\n\n loop {\n\n // Perform some “work”.\n\n delay.delay_ms(90_u16);\n", "file_path": "examples/watchdog.rs", "rank": 28, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut syscfg = SYSCFG::new(dp.SYSCFG, &mut rcc);\n\n\n\n // constructor initializes 48 MHz clock that RNG requires\n\n // Initialize 48 MHz clock and RNG\n\n let hsi48 = rcc.enable_hsi48(&mut syscfg, dp.CRS);\n\n let mut rng = Rng::new(dp.RNG, &mut rcc, hsi48);\n\n\n\n loop {\n\n // enable starts the ADC conversions that generate the random number\n\n rng.enable();\n\n // wait until the flag flips; interrupt driven is possible but no implemented\n\n rng.wait();\n\n // reading the result clears the ready flag\n\n let _ = rng.take_result();\n\n // can save some power by disabling until next random number needed\n\n rng.disable();\n\n }\n\n}\n", "file_path": "examples/rng.rs", "rank": 29, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let cp = cortex_m::Peripherals::take().unwrap();\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(rcc::Config::hsi16());\n\n let mut flash = FLASH::new(dp.FLASH, &mut rcc);\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n let mut led = gpiob.pb2.into_push_pull_output();\n\n\n\n // Get the delay provider.\n\n let mut delay = cp.SYST.delay(rcc.clocks);\n\n\n\n // This should be the first word in the second flash bank. Since this\n\n // example should be quite small, we can be reasonably sure that it fully\n\n // fits into the first flash bank. This means we won't overwrite our own\n\n // code or stall execution.\n\n //\n\n // This example requires STM32L082, which has 2 banks.\n\n let address = FLASH_START + flash_size_in_kb() / 2 * 1024;\n", "file_path": "examples/flash.rs", "rank": 30, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(rcc::Config::hsi16());\n\n let mut pwr = PWR::new(dp.PWR, &mut rcc);\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n let button = gpiob.pb2.into_floating_input();\n\n\n\n let serial = dp\n\n .USART2\n\n .usart(\n\n gpioa.pa2,\n\n gpioa.pa3,\n\n serial::Config::default().baudrate(115_200.bps()),\n\n &mut rcc,\n\n )\n\n .unwrap();\n\n let (mut tx, _) = serial.split();\n", "file_path": "examples/rtc.rs", "rank": 31, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in\n\n // the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n let sda = gpioa.pa10.into_open_drain_output();\n\n let scl = gpioa.pa9.into_open_drain_output();\n\n\n\n let mut i2c = dp.I2C1.i2c(sda, scl, 100.khz(), &mut rcc);\n\n\n\n let mut buffer = [0u8; 2];\n\n const MAX17048_ADDR: u8 = 0xFF;\n\n\n\n loop {\n\n i2c.write(MAX17048_ADDR, &mut buffer).unwrap();\n\n }\n\n}\n", "file_path": "examples/i2c.rs", "rank": 32, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in\n\n // the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n let mut nss = gpioa.pa4.into_push_pull_output();\n\n let sck = gpioa.pa5;\n\n let miso = gpioa.pa6;\n\n let mosi = gpioa.pa7;\n\n\n\n // Initialise the SPI peripheral.\n\n let mut spi = dp\n\n .SPI1\n\n .spi((sck, miso, mosi), spi::MODE_0, 100_000.hz(), &mut rcc);\n\n\n\n loop {\n\n nss.set_low().unwrap();\n\n spi.write(&[0, 1]).unwrap();\n\n nss.set_high().unwrap();\n\n }\n\n}\n", "file_path": "examples/spi.rs", "rank": 33, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let cp = pac::CorePeripherals::take().unwrap();\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut scb = cp.SCB;\n\n let mut rcc = dp.RCC.freeze(rcc::Config::msi(rcc::MSIRange::Range0));\n\n let mut exti = Exti::new(dp.EXTI);\n\n let mut pwr = PWR::new(dp.PWR, &mut rcc);\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n let mut led = gpiob.pb2.into_push_pull_output().downgrade();\n\n\n\n let mut lptim = LpTimer::init_periodic(dp.LPTIM, &mut pwr, &mut rcc, ClockSrc::Lse);\n\n\n\n let exti_line = DirectLine::Lptim1;\n\n\n\n lptim.enable_interrupts(lptim::Interrupts {\n\n autoreload_match: true,\n\n ..lptim::Interrupts::default()\n\n });\n", "file_path": "examples/lptim.rs", "rank": 34, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n let cp = cortex_m::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPI0A and GPIOB peripherals. This also enables the clock for\n\n // GPIOA and GPIOB in the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n // Configure PA0 as input.\n\n let button = gpioa.pa0.into_pull_up_input();\n\n\n\n // Configure PB6 as output.\n\n let mut led = gpiob.pb6.into_push_pull_output();\n\n\n\n // Get the delay provider.\n\n let mut delay = cp.SYST.delay(rcc.clocks);\n", "file_path": "examples/button.rs", "rank": 35, "score": 59553.5153363265 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(rcc::Config::hsi16());\n\n let adc = dp.ADC.constrain(&mut rcc);\n\n let mut dma = DMA::new(dp.DMA1, &mut rcc);\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // The A0 connector on the B-L072Z-LRWAN1 Discovery kit\n\n let a0 = gpioa.pa0.into_analog();\n\n\n\n // Connected to the host computer via the ST-LINK\n\n let tx = gpioa.pa2;\n\n let rx = gpioa.pa3;\n\n\n\n // Initialize USART for test output\n\n let (mut tx, _) = dp\n\n .USART2\n\n .usart(\n\n tx,\n", "file_path": "examples/adc_trig.rs", "rank": 36, "score": 57697.85146119965 }, { "content": "#[interrupt]\n\nfn EXTI2_3() {\n\n // Keep the LED state.\n\n static mut STATE: bool = false;\n\n\n\n cortex_m::interrupt::free(|cs| {\n\n // Clear the interrupt flag.\n\n Exti::unpend(GpioLine::from_raw_line(2).unwrap());\n\n\n\n // Change the LED state on each interrupt.\n\n if let Some(ref mut led) = LED.borrow(cs).borrow_mut().deref_mut() {\n\n if *STATE {\n\n led.set_low().unwrap();\n\n *STATE = false;\n\n } else {\n\n led.set_high().unwrap();\n\n *STATE = true;\n\n }\n\n }\n\n });\n\n}\n", "file_path": "examples/button_irq.rs", "rank": 37, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let cp = pac::CorePeripherals::take().unwrap();\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n let mut exti = Exti::new(dp.EXTI);\n\n let mut pwr = PWR::new(dp.PWR, &mut rcc);\n\n let mut delay = cp.SYST.delay(rcc.clocks);\n\n let mut scb = cp.SCB;\n\n\n\n // Those are the user button and blue LED on the B-L072Z-LRWAN1 Discovery\n\n // board.\n\n let button = gpiob.pb2.into_floating_input();\n\n let mut led = gpiob.pb6.into_push_pull_output();\n\n\n\n let mut syscfg = SYSCFG::new(dp.SYSCFG, &mut rcc);\n\n\n\n let line = GpioLine::from_raw_line(button.pin_number()).unwrap();\n\n\n", "file_path": "examples/exti_wakeup.rs", "rank": 38, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPIOB peripheral. This also enables the clock for GPIOB in\n\n // the RCC register.\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n // Configure PB6 as output.\n\n let led = gpiob.pb6.into_push_pull_output();\n\n\n\n // Configure PB2 as input.\n\n let button = gpiob.pb2.into_pull_up_input();\n\n\n\n let mut syscfg = SYSCFG::new(dp.SYSCFG, &mut rcc);\n\n let mut exti = Exti::new(dp.EXTI);\n\n\n\n // Configure the external interrupt on the falling edge for the pin 0.\n", "file_path": "examples/button_irq.rs", "rank": 39, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut aes = AES::new(dp.AES, &mut rcc);\n\n\n\n let key = [0x01234567, 0x89abcdef, 0x01234567, 0x89abcdef];\n\n let ivr = [0xfedcba98, 0x76543210, 0xfedcba98, 0x76543210];\n\n\n\n let data = [\n\n 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee,\n\n 0xff,\n\n ];\n\n\n\n loop {\n\n let mut stream = aes.enable(aes::Mode::cbc_encrypt(ivr), key);\n\n\n\n let mut encrypted = [[0; 16]; 4];\n\n encrypted[0] = stream.process(&data).unwrap();\n\n encrypted[1] = stream.process(&data).unwrap();\n", "file_path": "examples/aes_cbc.rs", "rank": 40, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(rcc::Config::hsi16());\n\n let adc = dp.ADC.constrain(&mut rcc);\n\n let mut dma = DMA::new(dp.DMA1, &mut rcc);\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // The A0 connector on the B-L072Z-LRWAN1 Discovery kit\n\n let a0 = gpioa.pa0.into_analog();\n\n\n\n // Connected to the host computer via the ST-LINK\n\n let tx = gpioa.pa2;\n\n let rx = gpioa.pa3;\n\n\n\n // Initialize USART for test output\n\n let (mut tx, _) = dp\n\n .USART2\n\n .usart(\n\n tx,\n", "file_path": "examples/adc_cont.rs", "rank": 41, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut aes = AES::new(dp.AES, &mut rcc);\n\n\n\n let key = [0x01234567, 0x89abcdef, 0x01234567, 0x89abcdef];\n\n\n\n let data = [\n\n 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee,\n\n 0xff,\n\n ];\n\n\n\n loop {\n\n let mut stream = aes.enable(aes::Mode::ecb_encrypt(), key);\n\n\n\n let mut encrypted = [[0; 16]; 4];\n\n encrypted[0] = stream.process(&data).unwrap();\n\n encrypted[1] = stream.process(&data).unwrap();\n\n encrypted[2] = stream.process(&data).unwrap();\n", "file_path": "examples/aes_ecb.rs", "rank": 42, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(rcc::Config::hsi16());\n\n let adc = dp.ADC.constrain(&mut rcc);\n\n let mut dma = DMA::new(dp.DMA1, &mut rcc);\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // Connected to the host computer via the ST-LINK\n\n let tx = gpioa.pa2;\n\n let rx = gpioa.pa3;\n\n\n\n // Initialize USART for test output\n\n let (mut tx, _) = dp\n\n .USART2\n\n .usart(\n\n tx,\n\n rx,\n\n serial::Config::default().baudrate(115_200.bps()),\n\n &mut rcc,\n", "file_path": "examples/adc_multi.rs", "rank": 43, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let cp = pac::CorePeripherals::take().unwrap();\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(rcc::Config::hsi16());\n\n\n\n // Initialize all the GPIO we need\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n let mut led = gpiob.pb6.into_push_pull_output();\n\n let button = gpiob.pb2.into_pull_down_input();\n\n\n\n // Enable LED to signal that MCU is running\n\n led.set_high().unwrap();\n\n\n\n let mut scb = cp.SCB;\n\n let mut exti = Exti::new(dp.EXTI);\n\n let mut pwr = PWR::new(dp.PWR, &mut rcc);\n\n\n\n let instant = Instant::new()\n\n .set_year(19)\n", "file_path": "examples/rtc_wakeup.rs", "rank": 44, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // Configure the clock.\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n\n\n // Acquire the GPIOA peripheral. This also enables the clock for GPIOA in\n\n // the RCC register.\n\n let gpioa = dp.GPIOA.split(&mut rcc);\n\n\n\n // Configure the timer as PWM on PA1.\n\n let pwm = pwm::Timer::new(dp.TIM2, 1.khz(), &mut rcc);\n\n let mut pwm = pwm.channel2.assign(gpioa.pa1);\n\n let max_duty = pwm.get_max_duty() / 4095;\n\n pwm.enable();\n\n\n\n let mut adc = dp.ADC.constrain(&mut rcc);\n\n\n\n // Configure PA0 as analog.\n\n let mut adc_pin = gpioa.pa0.into_analog();\n\n\n\n loop {\n\n // Set the PWM duty cycle from the value read on the ADC pin.\n\n let val: u16 = adc.read(&mut adc_pin).unwrap();\n\n pwm.set_duty(max_duty * val);\n\n }\n\n}\n", "file_path": "examples/adc_pwm.rs", "rank": 45, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let cp = pac::CorePeripherals::take().unwrap();\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut scb = cp.SCB;\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut dma = DMA::new(dp.DMA1, &mut rcc);\n\n let mut delay = cp.SYST.delay(rcc.clocks);\n\n let mut pwr = PWR::new(dp.PWR, &mut rcc);\n\n\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n let sda = gpiob.pb9.into_open_drain_output();\n\n let scl = gpiob.pb8.into_open_drain_output();\n\n\n\n let mut green = gpiob.pb5.into_push_pull_output();\n\n let mut red = gpiob.pb7.into_push_pull_output();\n\n\n\n let mut i2c = dp.I2C1.i2c(sda, scl, 100.khz(), &mut rcc);\n\n\n", "file_path": "examples/i2c_dma.rs", "rank": 46, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut aes = AES::new(dp.AES, &mut rcc);\n\n\n\n let key = [0x01234567, 0x89abcdef, 0x01234567, 0x89abcdef];\n\n let ivr = [0xfedcba98, 0x76543210, 0xfedcba98];\n\n\n\n let data = [\n\n 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee,\n\n 0xff,\n\n ];\n\n\n\n loop {\n\n let mut ctr_stream = aes.enable(aes::Mode::ctr(ivr), key);\n\n\n\n let mut encrypted = [[0; 16]; 4];\n\n encrypted[0] = ctr_stream.process(&data).unwrap();\n\n encrypted[1] = ctr_stream.process(&data).unwrap();\n", "file_path": "examples/aes_ctr.rs", "rank": 47, "score": 57697.85146119965 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n let mut rcc = dp.RCC.freeze(Config::hsi16());\n\n let mut aes = AES::new(dp.AES, &mut rcc);\n\n let mut dma = DMA::new(dp.DMA1, &mut rcc);\n\n\n\n let key = [0x01234567, 0x89abcdef, 0x01234567, 0x89abcdef];\n\n let ivr = [0xfedcba98, 0x76543210, 0xfedcba98];\n\n\n\n const DATA: Aligned<A4, [u8; 32]> = Aligned([\n\n 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee,\n\n 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd,\n\n 0xee, 0xff,\n\n ]);\n\n let data = Pin::new(&DATA);\n\n\n\n static mut ENCRYPTED: Aligned<A4, [u8; 32]> = Aligned([0; 32]);\n\n static mut DECRYPTED: Aligned<A4, [u8; 32]> = Aligned([0; 32]);\n\n\n", "file_path": "examples/aes_ctr_dma.rs", "rank": 48, "score": 56026.89882894259 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = pac::Peripherals::take().unwrap();\n\n\n\n // This should put the APB1 clock at 2 times the RTC clock, if I follow the\n\n // code correctly. Exactly the range that is still acceptable, but requires\n\n // special handling in the RTC code.\n\n let mut rcc = dp.RCC.freeze(rcc::Config::msi(rcc::MSIRange::Range0));\n\n let mut pwr = PWR::new(dp.PWR, &mut rcc);\n\n let gpiob = dp.GPIOB.split(&mut rcc);\n\n\n\n let mut led = gpiob.pb5.into_push_pull_output();\n\n\n\n let instant = Instant::new()\n\n .set_year(19)\n\n .set_month(8)\n\n .set_day(9)\n\n .set_hour(13)\n\n .set_minute(36)\n\n .set_second(0);\n\n\n", "file_path": "examples/rtc_low_apb1.rs", "rank": 49, "score": 56026.89882894259 }, { "content": "// Determine size of the flash memory in KiB.\n\n//\n\n// This information can be read from the \"Flash size register\".\n\n//\n\n// Reference:\n\n//\n\n// - STM32L0x1 reference manual, section 28.1.1\n\n// - STM32L0x2 reference manual, section 33.1.1\n\n// - STM32L0x3 reference manual, section 34.1.1\n\npub fn flash_size_in_kb() -> usize {\n\n // This is safe, as we're reading from a valid address (as per the\n\n // reference manual) which is aligned to 16 bits.\n\n unsafe { (0x1FF8_007C as *const u16).read() as usize }\n\n}\n\n\n\nextern \"C\" {\n\n /// Writes a half-page at the given address\n\n ///\n\n /// Unfortunately this function had to be implemented in C. No access to\n\n /// Flash memory is allowed after the first word has been written, and that\n\n /// includes execution of code that is located in Flash. This means the\n\n /// function that writes the half-page has to be executed from memory, and\n\n /// is not allowed to call any functions that are not located in memory.\n\n ///\n\n /// Unfortunately I found this impossible to achieve in Rust. I can write\n\n /// a Rust function that is located in RAM, using `#[link_section=\".data\"]`,\n\n /// but I failed to write any useful Rust code that doesn't include function\n\n /// calls to _something_ that is outside of my control, as so much of Rust's\n\n /// functionality is defined in terms of function calls.\n\n ///\n\n /// I ended up writing it in C, as that was the only solution I could come\n\n /// up with that will run on the stable channel (is nightly is acceptable,\n\n /// we could use a Rust function with inline assembly).\n\n fn write_half_page(address: *mut u32, words: *const u32);\n\n}\n\n\n", "file_path": "src/flash.rs", "rank": 50, "score": 48467.22900727336 }, { "content": "fn derive_key(aes: &aes::RegisterBlock) {\n\n // Select key derivation mode. This is safe, as we're writing a valid bit\n\n // pattern.\n\n aes.cr.modify(|_, w| w.mode().bits(0b01));\n\n\n\n // Enable the peripheral. It will be automatically disabled again once the\n\n // key has been derived.\n\n aes.cr.modify(|_, w| w.en().set_bit());\n\n\n\n // Wait for key derivation to finish\n\n while aes.sr.read().ccf().bit_is_clear() {}\n\n}\n\n\n\n/// Used to identify encryption mode\n\npub struct Encrypt;\n\n\n\n/// Used to identify decryption mode\n\npub struct Decrypt;\n\n\n\n/// A 128-bit block\n", "file_path": "src/aes.rs", "rank": 51, "score": 45493.577950437495 }, { "content": "pub trait DelayExt {\n\n fn delay(self, clocks: Clocks) -> Delay;\n\n}\n\n\n\nimpl DelayExt for SYST {\n\n fn delay(self, clocks: Clocks) -> Delay {\n\n Delay::new(self, clocks)\n\n }\n\n}\n\n\n\n/// System timer (SysTick) as a delay provider\n\npub struct Delay {\n\n ticks_per_us: u32,\n\n syst: SYST,\n\n}\n\n\n\nimpl Delay {\n\n /// Configures the system timer (SysTick) as a delay provider\n\n pub fn new(mut syst: SYST, clocks: Clocks) -> Self {\n\n syst.set_clock_source(SystClkSource::Core);\n", "file_path": "src/delay.rs", "rank": 52, "score": 42892.996527379546 }, { "content": "/// Extension trait that freezes the `RCC` peripheral with provided clocks configuration\n\npub trait RccExt {\n\n fn freeze(self, config: Config) -> Rcc;\n\n}\n\n\n\nimpl RccExt for RCC {\n\n // `cfgr` is almost always a constant, so make sure it can be constant-propagated properly by\n\n // marking this function and all `Config` constructors and setters as `#[inline]`.\n\n // This saves ~900 Bytes for the `pwr.rs` example.\n\n #[inline]\n\n fn freeze(self, cfgr: Config) -> Rcc {\n\n let (sys_clk, sw_bits) = match cfgr.mux {\n\n ClockSrc::MSI(range) => {\n\n let range = range as u8;\n\n // Set MSI range\n\n self.icscr.write(|w| w.msirange().bits(range));\n\n\n\n // Enable MSI\n\n self.cr.write(|w| w.msion().set_bit());\n\n while self.cr.read().msirdy().bit_is_clear() {}\n\n\n", "file_path": "src/rcc.rs", "rank": 53, "score": 42743.330485700455 }, { "content": "/// Two linked 16 bit timers that form a 32 bit timer.\n\npub trait LinkedTimer {\n\n /// Return the current 16 bit counter value of the MSB timer.\n\n fn get_counter_msb(&self) -> u16;\n\n\n\n /// Return the current 16 bit counter value of the LSB timer.\n\n fn get_counter_lsb(&self) -> u16;\n\n\n\n /// Return the current 32 bit counter value.\n\n fn get_counter(&self) -> u32;\n\n\n\n /// Reset the counter to 0.\n\n fn reset(&mut self);\n\n}\n\n\n\n/// A pair of timers that can be linked.\n\n///\n\n/// The two timers are configured so that an overflow of the primary timer\n\n/// triggers an update on the secondary timer. This way, two 16 bit timers can\n\n/// be combined to a single 32 bit timer.\n\npub struct LinkedTimerPair<PRIMARY, SECONDARY> {\n", "file_path": "src/timer.rs", "rank": 54, "score": 42598.36626026671 }, { "content": "type TxTransfer = dma::Transfer<TxTarget, TxChannel, DmaBuffer, dma::Started>;\n\n\n", "file_path": "examples/serial_dma_async.rs", "rank": 55, "score": 41322.738847214685 }, { "content": "type RxTransfer = dma::Transfer<RxTarget, RxChannel, DmaBuffer, dma::Started>;\n\n\n", "file_path": "examples/serial_dma_async.rs", "rank": 56, "score": 41322.738847214685 }, { "content": "pub trait GeneralPurposeTimer {\n\n type MasterMode;\n\n\n\n fn select_master_mode(&mut self, variant: Self::MasterMode);\n\n}\n\n\n", "file_path": "src/timer.rs", "rank": 57, "score": 41226.52790289451 }, { "content": "pub trait TimerExt<TIM> {\n\n fn timer<T>(self, timeout: T, rcc: &mut Rcc) -> Timer<TIM>\n\n where\n\n T: Into<Hertz>;\n\n}\n\n\n\n/// Hardware timers\n\npub struct Timer<TIM> {\n\n clocks: Clocks,\n\n tim: TIM,\n\n}\n\n\n\nimpl Timer<SYST> {\n\n /// Configures the SYST clock as a periodic count down timer\n\n pub fn syst<T>(mut syst: SYST, timeout: T, rcc: &mut Rcc) -> Self\n\n where\n\n T: Into<Hertz>,\n\n {\n\n syst.set_clock_source(SystClkSource::Core);\n\n let mut timer = Timer {\n", "file_path": "src/timer.rs", "rank": 58, "score": 41226.52790289451 }, { "content": "use core::fmt;\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Clone, Copy)]\n\npub struct Bps(pub u32);\n\n\n\nimpl fmt::Display for Bps {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{} bps\", self.0)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Clone, Copy)]\n\npub struct Hertz(pub u32);\n\n\n\nimpl fmt::Display for Hertz {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{} Hz\", self.0)\n\n }\n\n}\n\n\n", "file_path": "src/time.rs", "rank": 59, "score": 37886.01878212068 }, { "content": "#[derive(Debug, PartialEq, PartialOrd, Clone, Copy)]\n\npub struct MicroSeconds(pub u32);\n\n\n\nimpl fmt::Display for MicroSeconds {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{} µs\", self.0)\n\n }\n\n}\n\n\n\n/// Extension trait that adds convenience methods to the `u32` type\n", "file_path": "src/time.rs", "rank": 60, "score": 37883.44958148903 }, { "content": "impl U32Ext for u32 {\n\n fn bps(self) -> Bps {\n\n Bps(self)\n\n }\n\n\n\n fn hz(self) -> Hertz {\n\n Hertz(self)\n\n }\n\n\n\n fn khz(self) -> Hertz {\n\n Hertz(self * 1_000)\n\n }\n\n\n\n fn mhz(self) -> Hertz {\n\n Hertz(self * 1_000_000)\n\n }\n\n\n\n fn us(self) -> MicroSeconds {\n\n MicroSeconds(self)\n\n }\n", "file_path": "src/time.rs", "rank": 61, "score": 37880.71087448496 }, { "content": "\n\n fn ms(self) -> MicroSeconds {\n\n MicroSeconds(self * 1_000)\n\n }\n\n}\n\n\n\nimpl Into<MicroSeconds> for Hertz {\n\n fn into(self) -> MicroSeconds {\n\n let freq = self.0;\n\n assert!(freq != 0 && freq <= 1_000_000);\n\n MicroSeconds(1_000_000 / freq)\n\n }\n\n}\n\n\n\nimpl Into<Hertz> for MicroSeconds {\n\n fn into(self) -> Hertz {\n\n let period = self.0;\n\n assert!(period != 0 && period <= 1_000_000);\n\n Hertz(1_000_000 / period)\n\n }\n\n}\n", "file_path": "src/time.rs", "rank": 62, "score": 37876.14202149124 }, { "content": "//! Delays\n\nuse crate::hal::blocking::delay::{DelayMs, DelayUs};\n\nuse crate::rcc::Clocks;\n\nuse crate::time::MicroSeconds;\n\nuse cast::u32;\n\nuse cortex_m::peripheral::syst::SystClkSource;\n\nuse cortex_m::peripheral::SYST;\n\n\n", "file_path": "src/delay.rs", "rank": 63, "score": 37871.74568776124 }, { "content": " let freq = clocks.sys_clk().0;\n\n assert!(freq > 1_000_000_u32);\n\n let ticks_per_us = freq / 1_000_000_u32;\n\n Delay { syst, ticks_per_us }\n\n }\n\n pub fn delay<T>(&mut self, delay: T)\n\n where\n\n T: Into<MicroSeconds>,\n\n {\n\n self.delay_us(delay.into().0)\n\n }\n\n\n\n /// Releases the system timer (SysTick) resource\n\n pub fn free(self) -> SYST {\n\n self.syst\n\n }\n\n}\n\n\n\nimpl DelayMs<u32> for Delay {\n\n fn delay_ms(&mut self, ms: u32) {\n", "file_path": "src/delay.rs", "rank": 64, "score": 37866.476882275056 }, { "content": " self.delay_us(ms * 1_000);\n\n }\n\n}\n\n\n\nimpl DelayMs<u16> for Delay {\n\n fn delay_ms(&mut self, ms: u16) {\n\n self.delay_ms(u32(ms));\n\n }\n\n}\n\n\n\nimpl DelayMs<u8> for Delay {\n\n fn delay_ms(&mut self, ms: u8) {\n\n self.delay_ms(u32(ms));\n\n }\n\n}\n\n\n\nimpl DelayUs<u32> for Delay {\n\n fn delay_us(&mut self, us: u32) {\n\n const MAX_RVR: u32 = 0x00FF_FFFF;\n\n let mut total_rvr = self.ticks_per_us * us;\n", "file_path": "src/delay.rs", "rank": 65, "score": 37862.184506583275 }, { "content": " while total_rvr > 0 {\n\n let current_rvr = if total_rvr <= MAX_RVR {\n\n total_rvr\n\n } else {\n\n MAX_RVR\n\n };\n\n self.syst.set_reload(current_rvr);\n\n self.syst.clear_current();\n\n self.syst.enable_counter();\n\n total_rvr -= current_rvr;\n\n while !self.syst.has_wrapped() {}\n\n self.syst.disable_counter();\n\n }\n\n }\n\n}\n\n\n\nimpl DelayUs<u16> for Delay {\n\n fn delay_us(&mut self, us: u16) {\n\n self.delay_us(u32(us))\n\n }\n\n}\n\n\n\nimpl DelayUs<u8> for Delay {\n\n fn delay_us(&mut self, us: u8) {\n\n self.delay_us(u32(us))\n\n }\n\n}\n", "file_path": "src/delay.rs", "rank": 66, "score": 37859.49442063068 }, { "content": "use crate::mco;\n\nuse crate::pac::rcc::cfgr::{MCOPRE_A, MCOSEL_A};\n\nuse crate::pac::RCC;\n\nuse crate::pwr::PWR;\n\nuse crate::time::{Hertz, U32Ext};\n\n\n\n#[cfg(any(feature = \"stm32l0x2\", feature = \"stm32l0x3\"))]\n\nuse crate::{pac::CRS, syscfg::SYSCFG};\n\n\n\n/// System clock mux source\n\n#[derive(Clone, Copy)]\n\npub enum ClockSrc {\n\n MSI(MSIRange),\n\n PLL(PLLSource, PLLMul, PLLDiv),\n\n HSE(Hertz),\n\n HSI16,\n\n}\n\n\n\n/// MSI Clock Range\n\n///\n", "file_path": "src/rcc.rs", "rank": 67, "score": 37733.7963601779 }, { "content": " T: Into<Hertz>,\n\n {\n\n Config {\n\n mux: ClockSrc::HSE(freq.into()),\n\n ahb_pre: AHBPrescaler::NotDivided,\n\n apb1_pre: APBPrescaler::NotDivided,\n\n apb2_pre: APBPrescaler::NotDivided,\n\n }\n\n }\n\n}\n\n\n\n/// RCC peripheral\n\npub struct Rcc {\n\n pub clocks: Clocks,\n\n pub(crate) rb: RCC,\n\n}\n\n\n\nimpl Rcc {\n\n pub fn enable_lse(&mut self, _: &PWR) {\n\n self.rb.csr.modify(|_, w| {\n", "file_path": "src/rcc.rs", "rank": 68, "score": 37721.770372668645 }, { "content": "\n\nimpl Rcc {\n\n /// Configure MCO (Microcontroller Clock Output).\n\n pub fn configure_mco<P>(\n\n &mut self,\n\n source: MCOSEL_A,\n\n prescaler: MCOPRE_A,\n\n output_pin: P,\n\n ) -> MCOEnabled\n\n where\n\n P: mco::Pin,\n\n {\n\n output_pin.into_mco();\n\n\n\n self.rb.cfgr.modify(|_, w| {\n\n w.mcosel().variant(source);\n\n w.mcopre().variant(prescaler)\n\n });\n\n\n\n MCOEnabled(())\n\n }\n\n}\n\n\n\n/// Extension trait that freezes the `RCC` peripheral with provided clocks configuration\n", "file_path": "src/rcc.rs", "rank": 69, "score": 37717.96097807275 }, { "content": " }\n\n\n\n /// Returns the frequency of the APB2\n\n pub fn apb2_clk(&self) -> Hertz {\n\n self.apb2_clk\n\n }\n\n\n\n /// Returns the frequency of the APB2 timers\n\n pub fn apb2_tim_clk(&self) -> Hertz {\n\n self.apb2_tim_clk\n\n }\n\n}\n\n\n\n/// Token that exists only, if the HSI48 clock has been enabled\n\n///\n\n/// You can get an instance of this struct by calling [`Rcc::enable_hsi48`].\n\n#[derive(Clone, Copy)]\n\npub struct HSI48(());\n\n\n\n/// Token that exists only if MCO (Microcontroller Clock Out) has been enabled.\n\n///\n\n/// You can get an instance of this struct by calling [`Rcc::configure_mco`].\n\n#[derive(Clone, Copy)]\n\npub struct MCOEnabled(());\n", "file_path": "src/rcc.rs", "rank": 70, "score": 37717.60817465192 }, { "content": " // Enable LSE clock\n\n w.lseon().set_bit()\n\n });\n\n while self.rb.csr.read().lserdy().bit_is_clear() {}\n\n }\n\n}\n\n\n\n#[cfg(any(feature = \"stm32l0x2\", feature = \"stm32l0x3\"))]\n\nimpl Rcc {\n\n pub fn enable_hsi48(&mut self, syscfg: &mut SYSCFG, crs: CRS) -> HSI48 {\n\n // Reset CRS peripheral\n\n self.rb.apb1rstr.modify(|_, w| w.crsrst().set_bit());\n\n self.rb.apb1rstr.modify(|_, w| w.crsrst().clear_bit());\n\n\n\n // Enable CRS peripheral\n\n self.rb.apb1enr.modify(|_, w| w.crsen().set_bit());\n\n\n\n // Initialize CRS\n\n crs.cfgr.write(|w|\n\n // Select LSE as synchronization source\n", "file_path": "src/rcc.rs", "rank": 71, "score": 37717.241947887196 }, { "content": "}\n\n\n\nimpl Config {\n\n #[inline]\n\n pub fn clock_src(mut self, mux: ClockSrc) -> Self {\n\n self.mux = mux;\n\n self\n\n }\n\n\n\n #[inline]\n\n pub fn ahb_pre(mut self, pre: AHBPrescaler) -> Self {\n\n self.ahb_pre = pre;\n\n self\n\n }\n\n\n\n #[inline]\n\n pub fn apb1_pre(mut self, pre: APBPrescaler) -> Self {\n\n self.apb1_pre = pre;\n\n self\n\n }\n", "file_path": "src/rcc.rs", "rank": 72, "score": 37715.89607378322 }, { "content": " }\n\n\n\n /// Returns the system (core) frequency\n\n pub fn sys_clk(&self) -> Hertz {\n\n self.sys_clk\n\n }\n\n\n\n /// Returns the frequency of the AHB\n\n pub fn ahb_clk(&self) -> Hertz {\n\n self.ahb_clk\n\n }\n\n\n\n /// Returns the frequency of the APB1\n\n pub fn apb1_clk(&self) -> Hertz {\n\n self.apb1_clk\n\n }\n\n\n\n /// Returns the frequency of the APB1 timers\n\n pub fn apb1_tim_clk(&self) -> Hertz {\n\n self.apb1_tim_clk\n", "file_path": "src/rcc.rs", "rank": 73, "score": 37714.4770478502 }, { "content": "/// These ranges control the frequency of the MSI. Internally, these ranges map\n\n/// to the `MSIRANGE` bits in the `RCC_ICSCR` register.\n\n#[derive(Clone, Copy)]\n\npub enum MSIRange {\n\n /// Around 65.536 kHz\n\n Range0 = 0,\n\n /// Around 131.072 kHz\n\n Range1 = 1,\n\n /// Around 262.144 kHz\n\n Range2 = 2,\n\n /// Around 524.288 kHz\n\n Range3 = 3,\n\n /// Around 1.048 MHz\n\n Range4 = 4,\n\n /// Around 2.097 MHz (reset value)\n\n Range5 = 5,\n\n /// Around 4.194 MHz\n\n Range6 = 6,\n\n}\n\n\n", "file_path": "src/rcc.rs", "rank": 74, "score": 37714.32792440244 }, { "content": "\n\n #[inline]\n\n pub fn apb2_pre(mut self, pre: APBPrescaler) -> Self {\n\n self.apb2_pre = pre;\n\n self\n\n }\n\n\n\n #[inline]\n\n pub fn hsi16() -> Config {\n\n Config {\n\n mux: ClockSrc::HSI16,\n\n ahb_pre: AHBPrescaler::NotDivided,\n\n apb1_pre: APBPrescaler::NotDivided,\n\n apb2_pre: APBPrescaler::NotDivided,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn msi(range: MSIRange) -> Config {\n\n Config {\n", "file_path": "src/rcc.rs", "rank": 75, "score": 37714.22199156045 }, { "content": " // Enable HSE\n\n self.cr.write(|w| w.hseon().set_bit());\n\n while self.cr.read().hserdy().bit_is_clear() {}\n\n (true, freq.0)\n\n }\n\n PLLSource::HSI16 => {\n\n // Enable HSI\n\n self.cr.write(|w| w.hsi16on().set_bit());\n\n while self.cr.read().hsi16rdyf().bit_is_clear() {}\n\n (false, HSI_FREQ)\n\n }\n\n };\n\n\n\n // Disable PLL\n\n self.cr.modify(|_, w| w.pllon().clear_bit());\n\n while self.cr.read().pllrdy().bit_is_set() {}\n\n\n\n let mul_bytes = mul as u8;\n\n let div_bytes = div as u8;\n\n\n", "file_path": "src/rcc.rs", "rank": 76, "score": 37714.20868307484 }, { "content": "pub const HSI_FREQ: u32 = 16_000_000;\n\n\n\n/// Clocks configutation\n\npub struct Config {\n\n mux: ClockSrc,\n\n ahb_pre: AHBPrescaler,\n\n apb1_pre: APBPrescaler,\n\n apb2_pre: APBPrescaler,\n\n}\n\n\n\nimpl Default for Config {\n\n #[inline]\n\n fn default() -> Config {\n\n Config {\n\n mux: ClockSrc::MSI(MSIRange::default()),\n\n ahb_pre: AHBPrescaler::NotDivided,\n\n apb1_pre: APBPrescaler::NotDivided,\n\n apb2_pre: APBPrescaler::NotDivided,\n\n }\n\n }\n", "file_path": "src/rcc.rs", "rank": 77, "score": 37714.180973608134 }, { "content": " let (apb2_freq, apb2_tim_freq) = match cfgr.apb2_pre {\n\n APBPrescaler::NotDivided => (ahb_freq, ahb_freq),\n\n pre => {\n\n let freq = ahb_freq / (1 << (pre as u8 - 3));\n\n (freq, freq * 2)\n\n }\n\n };\n\n\n\n let clocks = Clocks {\n\n source: cfgr.mux,\n\n sys_clk: sys_clk.hz(),\n\n ahb_clk: ahb_freq.hz(),\n\n apb1_clk: apb1_freq.hz(),\n\n apb2_clk: apb2_freq.hz(),\n\n apb1_tim_clk: apb1_tim_freq.hz(),\n\n apb2_tim_clk: apb2_tim_freq.hz(),\n\n };\n\n\n\n Rcc { rb: self, clocks }\n\n }\n", "file_path": "src/rcc.rs", "rank": 78, "score": 37713.85612928861 }, { "content": " unsafe { w.syncsrc().bits(0b01) });\n\n crs.cr\n\n .modify(|_, w| w.autotrimen().set_bit().cen().set_bit());\n\n\n\n // Enable VREFINT reference for HSI48 oscillator\n\n syscfg\n\n .syscfg\n\n .cfgr3\n\n .modify(|_, w| w.enref_hsi48().set_bit().en_vrefint().set_bit());\n\n\n\n // Select HSI48 as USB clock\n\n self.rb.ccipr.modify(|_, w| w.hsi48msel().set_bit());\n\n\n\n // Enable dedicated USB clock\n\n self.rb.crrcr.modify(|_, w| w.hsi48on().set_bit());\n\n while self.rb.crrcr.read().hsi48rdy().bit_is_clear() {}\n\n\n\n HSI48(())\n\n }\n\n}\n", "file_path": "src/rcc.rs", "rank": 79, "score": 37711.5343030391 }, { "content": "impl Default for MSIRange {\n\n fn default() -> MSIRange {\n\n MSIRange::Range5\n\n }\n\n}\n\n\n\n/// PLL divider\n\n#[derive(Clone, Copy)]\n\npub enum PLLDiv {\n\n Div2 = 1,\n\n Div3 = 2,\n\n Div4 = 3,\n\n}\n\n\n\n/// PLL multiplier\n\n#[derive(Clone, Copy)]\n\npub enum PLLMul {\n\n Mul3 = 0,\n\n Mul4 = 1,\n\n Mul6 = 2,\n", "file_path": "src/rcc.rs", "rank": 80, "score": 37711.5343030391 }, { "content": " w.pllmul()\n\n .bits(mul_bytes)\n\n .plldiv()\n\n .bits(div_bytes)\n\n .pllsrc()\n\n .bit(src_bit)\n\n });\n\n\n\n // Enable PLL\n\n self.cr.modify(|_, w| w.pllon().set_bit());\n\n while self.cr.read().pllrdy().bit_is_clear() {}\n\n\n\n (freq, 3)\n\n }\n\n };\n\n\n\n self.cfgr.modify(|_, w| unsafe {\n\n w.sw()\n\n .bits(sw_bits)\n\n .hpre()\n", "file_path": "src/rcc.rs", "rank": 81, "score": 37711.5343030391 }, { "content": "}\n\n\n\n/// Frozen clock frequencies\n\n///\n\n/// The existence of this value indicates that the clock configuration can no longer be changed\n\n#[derive(Clone, Copy)]\n\npub struct Clocks {\n\n source: ClockSrc,\n\n sys_clk: Hertz,\n\n ahb_clk: Hertz,\n\n apb1_clk: Hertz,\n\n apb1_tim_clk: Hertz,\n\n apb2_clk: Hertz,\n\n apb2_tim_clk: Hertz,\n\n}\n\n\n\nimpl Clocks {\n\n /// Returns the clock source\n\n pub fn source(&self) -> &ClockSrc {\n\n &self.source\n", "file_path": "src/rcc.rs", "rank": 82, "score": 37711.5343030391 }, { "content": " let freq = match mul {\n\n PLLMul::Mul3 => freq * 3,\n\n PLLMul::Mul4 => freq * 4,\n\n PLLMul::Mul6 => freq * 6,\n\n PLLMul::Mul8 => freq * 8,\n\n PLLMul::Mul12 => freq * 12,\n\n PLLMul::Mul16 => freq * 16,\n\n PLLMul::Mul24 => freq * 24,\n\n PLLMul::Mul32 => freq * 32,\n\n PLLMul::Mul48 => freq * 48,\n\n };\n\n\n\n let freq = match div {\n\n PLLDiv::Div2 => freq / 2,\n\n PLLDiv::Div3 => freq / 3,\n\n PLLDiv::Div4 => freq / 4,\n\n };\n\n assert!(freq <= 32.mhz().0);\n\n\n\n self.cfgr.write(move |w| unsafe {\n", "file_path": "src/rcc.rs", "rank": 83, "score": 37711.5343030391 }, { "content": " mux: ClockSrc::MSI(range),\n\n ahb_pre: AHBPrescaler::NotDivided,\n\n apb1_pre: APBPrescaler::NotDivided,\n\n apb2_pre: APBPrescaler::NotDivided,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn pll(pll_src: PLLSource, pll_mul: PLLMul, pll_div: PLLDiv) -> Config {\n\n Config {\n\n mux: ClockSrc::PLL(pll_src, pll_mul, pll_div),\n\n ahb_pre: AHBPrescaler::NotDivided,\n\n apb1_pre: APBPrescaler::NotDivided,\n\n apb2_pre: APBPrescaler::NotDivided,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn hse<T>(freq: T) -> Config\n\n where\n", "file_path": "src/rcc.rs", "rank": 84, "score": 37711.5343030391 }, { "content": " let freq = 32_768 * (1 << (range + 1));\n\n (freq, 0)\n\n }\n\n ClockSrc::HSI16 => {\n\n // Enable HSI16\n\n self.cr.write(|w| w.hsi16on().set_bit());\n\n while self.cr.read().hsi16rdyf().bit_is_clear() {}\n\n\n\n (HSI_FREQ, 1)\n\n }\n\n ClockSrc::HSE(freq) => {\n\n // Enable HSE\n\n self.cr.write(|w| w.hseon().set_bit());\n\n while self.cr.read().hserdy().bit_is_clear() {}\n\n\n\n (freq.0, 2)\n\n }\n\n ClockSrc::PLL(src, mul, div) => {\n\n let (src_bit, freq) = match src {\n\n PLLSource::HSE(freq) => {\n", "file_path": "src/rcc.rs", "rank": 85, "score": 37711.5343030391 }, { "content": "}\n\n\n\n/// APB prescaler\n\n#[derive(Clone, Copy)]\n\npub enum APBPrescaler {\n\n NotDivided = 0,\n\n Div2 = 0b100,\n\n Div4 = 0b101,\n\n Div8 = 0b110,\n\n Div16 = 0b111,\n\n}\n\n\n\n/// PLL clock input source\n\n#[derive(Clone, Copy)]\n\npub enum PLLSource {\n\n HSI16,\n\n HSE(Hertz),\n\n}\n\n\n\n/// HSI speed\n", "file_path": "src/rcc.rs", "rank": 86, "score": 37711.5343030391 }, { "content": " .bits(cfgr.ahb_pre as u8)\n\n .ppre1()\n\n .bits(cfgr.apb1_pre as u8)\n\n .ppre2()\n\n .bits(cfgr.apb2_pre as u8)\n\n });\n\n\n\n let ahb_freq = match cfgr.ahb_pre {\n\n AHBPrescaler::NotDivided => sys_clk,\n\n pre => sys_clk / (1 << (pre as u8 - 7)),\n\n };\n\n\n\n let (apb1_freq, apb1_tim_freq) = match cfgr.apb1_pre {\n\n APBPrescaler::NotDivided => (ahb_freq, ahb_freq),\n\n pre => {\n\n let freq = ahb_freq / (1 << (pre as u8 - 3));\n\n (freq, freq * 2)\n\n }\n\n };\n\n\n", "file_path": "src/rcc.rs", "rank": 87, "score": 37711.5343030391 }, { "content": " Mul8 = 3,\n\n Mul12 = 4,\n\n Mul16 = 5,\n\n Mul24 = 6,\n\n Mul32 = 7,\n\n Mul48 = 8,\n\n}\n\n\n\n/// AHB prescaler\n\n#[derive(Clone, Copy)]\n\npub enum AHBPrescaler {\n\n NotDivided = 0,\n\n Div2 = 0b1000,\n\n Div4 = 0b1001,\n\n Div8 = 0b1010,\n\n Div16 = 0b1011,\n\n Div64 = 0b1100,\n\n Div128 = 0b1101,\n\n Div256 = 0b1110,\n\n Div512 = 0b1111,\n", "file_path": "src/rcc.rs", "rank": 88, "score": 37711.5343030391 }, { "content": "//! Timers\n\nuse crate::hal::timer::{CountDown, Periodic};\n\nuse crate::pac::{tim2, tim21, tim22, tim6, TIM2, TIM21, TIM22, TIM3, TIM6};\n\nuse crate::rcc::{Clocks, Rcc};\n\nuse crate::time::Hertz;\n\nuse cast::{u16, u32};\n\nuse cortex_m::peripheral::syst::SystClkSource;\n\nuse cortex_m::peripheral::SYST;\n\nuse nb;\n\nuse void::Void;\n\n\n", "file_path": "src/timer.rs", "rank": 89, "score": 37608.6163923462 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nextern crate panic_halt;\n\n\n\nuse core::cell::RefCell;\n\nuse core::ops::DerefMut;\n\n\n\nuse cortex_m::asm;\n\nuse cortex_m::interrupt::Mutex;\n\nuse cortex_m::peripheral::NVIC;\n\nuse cortex_m_rt::entry;\n\nuse stm32l0xx_hal::{\n\n gpio::*,\n\n pac::{self, interrupt, Interrupt},\n\n prelude::*,\n\n rcc::Config,\n\n timer::Timer,\n\n};\n\n\n\nstatic LED: Mutex<RefCell<Option<gpioa::PA1<Output<PushPull>>>>> = Mutex::new(RefCell::new(None));\n\nstatic TIMER: Mutex<RefCell<Option<Timer<pac::TIM2>>>> = Mutex::new(RefCell::new(None));\n\n\n\n#[entry]\n", "file_path": "examples/timer.rs", "rank": 90, "score": 37603.75261290561 }, { "content": " tim: syst,\n\n clocks: rcc.clocks,\n\n };\n\n timer.start(timeout);\n\n timer\n\n }\n\n\n\n /// Starts listening\n\n pub fn listen(&mut self) {\n\n self.tim.enable_interrupt()\n\n }\n\n\n\n /// Stops listening\n\n pub fn unlisten(&mut self) {\n\n self.tim.disable_interrupt()\n\n }\n\n}\n\n\n\nimpl CountDown for Timer<SYST> {\n\n type Time = Hertz;\n", "file_path": "src/timer.rs", "rank": 91, "score": 37602.24285731559 }, { "content": " {\n\n Timer::$tim(self, timeout, rcc)\n\n }\n\n }\n\n\n\n impl Timer<$TIM> where $TIM: GeneralPurposeTimer {\n\n /// Configures a TIM peripheral as a periodic count down timer\n\n pub fn $tim<T>(tim: $TIM, timeout: T, rcc: &mut Rcc) -> Self\n\n where\n\n T: Into<Hertz>,\n\n {\n\n rcc.rb.$apbenr.modify(|_, w| w.$timXen().set_bit());\n\n rcc.rb.$apbrstr.modify(|_, w| w.$timXrst().set_bit());\n\n rcc.rb.$apbrstr.modify(|_, w| w.$timXrst().clear_bit());\n\n\n\n let mut timer = Timer {\n\n tim,\n\n clocks: rcc.clocks,\n\n };\n\n timer.start(timeout);\n", "file_path": "src/timer.rs", "rank": 92, "score": 37598.47230507675 }, { "content": " }\n\n }\n\n\n\n impl CountDown for Timer<$TIM> {\n\n type Time = Hertz;\n\n\n\n fn start<T>(&mut self, timeout: T)\n\n where\n\n T: Into<Hertz>,\n\n {\n\n // pause\n\n self.tim.cr1.modify(|_, w| w.cen().clear_bit());\n\n // reset counter\n\n self.tim.cnt.reset();\n\n\n\n let freq = timeout.into().0;\n\n let ticks = self.clocks.$timclk().0 / freq;\n\n let psc = u16((ticks - 1) / (1 << 16)).unwrap();\n\n self.tim.psc.write(|w| w.psc().bits(psc));\n\n // This is only unsafe for some timers, so we need this to\n", "file_path": "src/timer.rs", "rank": 93, "score": 37597.95116710512 }, { "content": "}\n\n\n\nimpl TimerExt<SYST> for SYST {\n\n fn timer<T>(self, timeout: T, rcc: &mut Rcc) -> Timer<SYST>\n\n where\n\n T: Into<Hertz>,\n\n {\n\n Timer::syst(self, timeout, rcc)\n\n }\n\n}\n\n\n\nimpl Periodic for Timer<SYST> {}\n\n\n\nmacro_rules! timers {\n\n ($($TIM:ident: ($tim:ident, $timXen:ident, $timXrst:ident, $apbenr:ident, $apbrstr:ident, $timclk:ident, $mms:ty),)+) => {\n\n $(\n\n impl TimerExt<$TIM> for $TIM {\n\n fn timer<T>(self, timeout: T, rcc: &mut Rcc) -> Timer<$TIM>\n\n where\n\n T: Into<Hertz>,\n", "file_path": "src/timer.rs", "rank": 94, "score": 37596.14571269025 }, { "content": " pub fn $new(tim_primary: $PRIMARY, tim_secondary: $SECONDARY, rcc: &mut Rcc) -> Self {\n\n // Enable timers\n\n rcc.rb.$apbenr.modify(|_, w| w.$master_en().set_bit());\n\n rcc.rb.$apbenr.modify(|_, w| w.$slave_en().set_bit());\n\n\n\n // Reset timers\n\n rcc.rb.$apbrstr.modify(|_, w| w.$master_rst().set_bit());\n\n rcc.rb.$apbrstr.modify(|_, w| w.$master_rst().clear_bit());\n\n rcc.rb.$apbrstr.modify(|_, w| w.$slave_rst().set_bit());\n\n rcc.rb.$apbrstr.modify(|_, w| w.$slave_rst().clear_bit());\n\n\n\n // Enable counter\n\n tim_primary.cr1.modify(|_, w| w.cen().set_bit());\n\n tim_secondary.cr1.modify(|_, w| w.cen().set_bit());\n\n\n\n // In the MMS (Master Mode Selection) register, set the master mode so\n\n // that a rising edge is output on the trigger output TRGO every time\n\n // an update event is generated.\n\n tim_primary.cr2.modify(|_, w| w.mms().variant(<$mms>::UPDATE));\n\n\n", "file_path": "src/timer.rs", "rank": 95, "score": 37593.78793369216 }, { "content": " timer\n\n }\n\n\n\n /// Starts listening\n\n pub fn listen(&mut self) {\n\n self.tim.dier.write(|w| w.uie().set_bit());\n\n }\n\n\n\n /// Stops listening\n\n pub fn unlisten(&mut self) {\n\n self.tim.dier.write(|w| w.uie().clear_bit());\n\n }\n\n\n\n /// Clears interrupt flag\n\n pub fn clear_irq(&mut self) {\n\n self.tim.sr.write(|w| w.uif().clear_bit());\n\n }\n\n\n\n /// Releases the TIM peripheral\n\n pub fn release(self) -> $TIM {\n", "file_path": "src/timer.rs", "rank": 96, "score": 37592.85721715747 }, { "content": " *LED.borrow(cs).borrow_mut() = Some(led);\n\n *TIMER.borrow(cs).borrow_mut() = Some(timer);\n\n });\n\n\n\n // Enable the timer interrupt in the NVIC.\n\n unsafe {\n\n NVIC::unmask(Interrupt::TIM2);\n\n }\n\n\n\n loop {\n\n asm::wfi();\n\n }\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 97, "score": 37591.97490218345 }, { "content": " }\n\n\n\n /// Return the current 32 bit counter value.\n\n ///\n\n /// Note: Due to the potential for a race condition between\n\n /// reading MSB and LSB, it's possible that the registers must\n\n /// be re-read once. Therefore reading the counter value is not\n\n /// constant time.\n\n fn get_counter(&self) -> u32 {\n\n loop {\n\n let msb = self.tim_secondary.cnt.read().cnt().bits() as u32;\n\n let lsb = self.tim_primary.cnt.read().cnt().bits() as u32;\n\n\n\n // Because the timer is still running at high frequency\n\n // between reading MSB and LSB, it's possible that LSB\n\n // has already overflowed. Therefore we read MSB again\n\n // to check that it hasn't changed.\n\n let msb_again = self.tim_secondary.cnt.read().cnt().bits() as u32;\n\n if msb == msb_again {\n\n return (msb << 16) | lsb;\n", "file_path": "src/timer.rs", "rank": 98, "score": 37591.5752884296 }, { "content": " self.tim.sr.modify(|_, w| w.uif().clear_bit());\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n impl Periodic for Timer<$TIM> {}\n\n\n\n impl GeneralPurposeTimer for $TIM {\n\n type MasterMode = $mms;\n\n\n\n fn select_master_mode(&mut self, variant: Self::MasterMode) {\n\n self.cr2.modify(|_, w| w.mms().variant(variant));\n\n }\n\n }\n\n )+\n\n }\n\n}\n\n\n\ntimers! {\n", "file_path": "src/timer.rs", "rank": 99, "score": 37590.51454573721 } ]
Rust
src/lib.rs
msakuta/rotate-enum
4a4b64a3b28bd30f961688f1462f9db57dbd6502
use core::panic; use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, Data, DeriveInput}; #[proc_macro_derive(RotateEnum)] pub fn rotate_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let nexts = variants .iter() .skip(1) .chain(variants.get(0)) .map(|v| (&v.ident)) .collect::<Vec<_>>(); let tokens = quote! { impl #name{ pub fn next(self) -> Self { match self { #(Self::#variants => Self::#nexts, )* } } pub fn prev(self) -> Self { match self { #(Self::#nexts => Self::#variants, )* } } } }; tokens.into() } #[proc_macro_derive(ShiftEnum)] pub fn shift_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let nexts = variants .iter() .skip(1) .map(|v| quote! { Some(Self::#v) }) .chain(Some(quote! { None })) .collect::<Vec<_>>(); let none_quote = Some(quote! { None }); let prevs = variants .iter() .take(variants.len() - 1) .map(|v| quote! { Some(Self::#v) }) .collect::<Vec<_>>(); let prevs = none_quote.iter().chain(&prevs).collect::<Vec<_>>(); let tokens = quote! { impl #name{ pub fn next(self) -> Option<Self> { match self { #(Self::#variants => #nexts, )* } } pub fn prev(self) -> Option<Self> { match self { #(Self::#variants => #prevs, )* } } } }; tokens.into() } #[proc_macro_derive(IterEnum)] pub fn iter_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let first_variant = variants .first() .expect("derive(IterEnum) expects at least one variant in enum"); let nexts = variants .iter() .skip(1) .map(|v| quote! { Some(#name::#v) }) .chain(Some(quote! { None })) .collect::<Vec<_>>(); let iterator_name = syn::Ident::new(&(name.to_string() + "Iterator"), name.span()); let tokens = quote! { struct #iterator_name(Option<#name>); impl #iterator_name { fn new() -> Self { Self(Some(#name::#first_variant)) } } impl Iterator for #iterator_name { type Item = #name; fn next(&mut self) -> Option<Self::Item> { let ret = self.0.clone(); self.0 = match self.0 { #(Some(#name::#variants) => #nexts, )* None => None, }; ret } } impl #name { fn iter(&self) -> #iterator_name { #iterator_name(Some(self.clone())) } } }; tokens.into() }
use core::panic; use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, Data, DeriveInput}; #[proc_macro_derive(RotateEnum)] pub fn rotate_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident;
let nexts = variants .iter() .skip(1) .chain(variants.get(0)) .map(|v| (&v.ident)) .collect::<Vec<_>>(); let tokens = quote! { impl #name{ pub fn next(self) -> Self { match self { #(Self::#variants => Self::#nexts, )* } } pub fn prev(self) -> Self { match self { #(Self::#nexts => Self::#variants, )* } } } }; tokens.into() } #[proc_macro_derive(ShiftEnum)] pub fn shift_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let nexts = variants .iter() .skip(1) .map(|v| quote! { Some(Self::#v) }) .chain(Some(quote! { None })) .collect::<Vec<_>>(); let none_quote = Some(quote! { None }); let prevs = variants .iter() .take(variants.len() - 1) .map(|v| quote! { Some(Self::#v) }) .collect::<Vec<_>>(); let prevs = none_quote.iter().chain(&prevs).collect::<Vec<_>>(); let tokens = quote! { impl #name{ pub fn next(self) -> Option<Self> { match self { #(Self::#variants => #nexts, )* } } pub fn prev(self) -> Option<Self> { match self { #(Self::#variants => #prevs, )* } } } }; tokens.into() } #[proc_macro_derive(IterEnum)] pub fn iter_enum(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); }; let first_variant = variants .first() .expect("derive(IterEnum) expects at least one variant in enum"); let nexts = variants .iter() .skip(1) .map(|v| quote! { Some(#name::#v) }) .chain(Some(quote! { None })) .collect::<Vec<_>>(); let iterator_name = syn::Ident::new(&(name.to_string() + "Iterator"), name.span()); let tokens = quote! { struct #iterator_name(Option<#name>); impl #iterator_name { fn new() -> Self { Self(Some(#name::#first_variant)) } } impl Iterator for #iterator_name { type Item = #name; fn next(&mut self) -> Option<Self::Item> { let ret = self.0.clone(); self.0 = match self.0 { #(Some(#name::#variants) => #nexts, )* None => None, }; ret } } impl #name { fn iter(&self) -> #iterator_name { #iterator_name(Some(self.clone())) } } }; tokens.into() }
let variants = if let Data::Enum(data) = &input.data { data.variants.iter().collect::<Vec<_>>() } else { panic!("derive(RotateEnum) must be applied to an enum"); };
assignment_statement
[ { "content": "#[test]\n\nfn test_shift() {\n\n let up = Direction::Up;\n\n let left = Direction::Left;\n\n let down = Direction::Down;\n\n let right = Direction::Right;\n\n\n\n let mut iter = up.iter();\n\n assert!(iter.next() == Some(up));\n\n assert!(iter.next() == Some(left));\n\n assert!(iter.next() == Some(down));\n\n assert!(iter.next() == Some(right));\n\n assert!(iter.next() == None);\n\n\n\n assert_eq!(up.iter().collect::<Vec<_>>(), vec![up, left, down, right]);\n\n\n\n assert!(Direction::Up.iter().next() == Some(Direction::Up));\n\n\n\n assert_eq!(\n\n DirectionIterator::new().collect::<Vec<_>>(),\n\n vec![\n\n Direction::Up,\n\n Direction::Left,\n\n Direction::Down,\n\n Direction::Right,\n\n ]\n\n );\n\n}\n", "file_path": "tests/iter.rs", "rank": 3, "score": 25134.49615230503 }, { "content": "#[test]\n\nfn test_rotate() {\n\n let up = Direction::Up;\n\n let left = Direction::Left;\n\n let down = Direction::Down;\n\n let right = Direction::Right;\n\n\n\n assert!(up.next() == left);\n\n assert!(left.next() == down);\n\n assert!(down.next() == right);\n\n assert!(right.next() == up);\n\n\n\n assert!(up.prev() == right);\n\n assert!(left.prev() == up);\n\n assert!(down.prev() == left);\n\n assert!(right.prev() == down);\n\n}\n", "file_path": "tests/rotate.rs", "rank": 4, "score": 25134.49615230503 }, { "content": "#[test]\n\nfn test_shift() {\n\n let up = Direction::Up;\n\n let left = Direction::Left;\n\n let down = Direction::Down;\n\n let right = Direction::Right;\n\n\n\n assert!(up.next() == Some(left));\n\n assert!(left.next() == Some(down));\n\n assert!(down.next() == Some(right));\n\n assert!(right.next() == None);\n\n\n\n assert!(up.prev() == None);\n\n assert!(left.prev() == Some(up));\n\n assert!(down.prev() == Some(left));\n\n assert!(right.prev() == Some(down));\n\n}\n", "file_path": "tests/shift.rs", "rank": 5, "score": 25134.49615230503 }, { "content": "## Iterating\n\n\n\nThis crate also provides `IterEnum`, which will implement `Iterator` object\n\nthat yields enum variants in sequence. The first yield result will be the same\n\nvariant as the one started the iterator, i.e. `Direction::Up.iter().next() == Some(Direction::Up)`.\n\n\n\n```rust\n\nlet up = Direction::Up;\n\nlet left = Direction::Left;\n\nlet down = Direction::Down;\n\nlet right = Direction::Right;\n\n\n\nlet mut iter = up.iter();\n\nassert!(iter.next() == Some(up));\n\nassert!(iter.next() == Some(left));\n\nassert!(iter.next() == Some(down));\n\nassert!(iter.next() == Some(right));\n\nassert!(iter.next() == None);\n\n\n\nassert_eq!(up.iter().collect::<Vec<_>>(), vec![up, left, down, right]);\n\n```\n\n\n\nNote that it is not the same as `ShiftEnum` in the sense that the iterator is one-directional, which means you can go only forward and not `prev()`.\n\nIt can also be used with iterator methods like `collect()`.\n\n\n\n`IterEnum` also requires deriving `Clone`.\n\n\n\n## Usage\n\n\n\nUse `#[derive(...)]` macro to annotate your enum.\n\n\n\n```rust\n\nuse rotate_enum::RotateEnum;\n\n\n\n#[derive(RotateEnum)]\n\nenum Direction {\n\n Up,\n\n Left,\n\n Down,\n\n Right,\n\n}\n\n```\n", "file_path": "README.md", "rank": 8, "score": 2.9283611569779975 }, { "content": "![Rust](https://github.com/msakuta/rotate-enum/workflows/Rust/badge.svg)\n\n\n\n# rotate-enum crate\n\n\n\nSimple derive macros that implement `prev()` and `next()` methods to an enum in Rust\n\n\n\n## Motivation\n\n\n\nSometimes you define an enum like this\n\n\n\n```rust\n\nenum Direction {\n\n Up,\n\n Left,\n\n Down,\n\n Right,\n\n}\n\n```\n\n\n\nand you want to rotate them in some logic,\n\n\n\n```rust\n\nlet up = Direction::Up;\n\nlet left = Direction::Left;\n\nlet down = Direction::Down;\n\nlet right = Direction::Right;\n\n\n\nassert!(up.next() == left);\n\nassert!(left.next() == down);\n\nassert!(down.next() == right);\n\nassert!(right.next() == up);\n\n\n\nassert!(up.prev() == right);\n\nassert!(left.prev() == up);\n\nassert!(down.prev() == left);\n\nassert!(right.prev() == down);\n\n```\n\n\n\nYou can of course implement these methods manually, but it's repetitive and error prone.\n\nDon't you think it should be automated?\n\nThis crate provides a `RotateEnum` derive macro to just do this.\n\n\n\n\n\n## Shifting\n\n\n\nThis crate also provides `ShiftEnum`, which will exhaust at the end of the enum list,\n\nrather than rotating.\n\n\n\n```rust\n\nlet up = Direction::Up;\n\nlet left = Direction::Left;\n\nlet down = Direction::Down;\n\nlet right = Direction::Right;\n\n\n\nassert!(up.next() == Some(left));\n\nassert!(left.next() == Some(down));\n\nassert!(down.next() == Some(right));\n\nassert!(right.next() == None);\n\n\n\nassert!(up.prev() == None);\n\nassert!(left.prev() == Some(up));\n\nassert!(down.prev() == Some(left));\n\nassert!(right.prev() == Some(down));\n\n```\n\n\n\nNote that you can only derive either one of `RotateEnum` or `ShiftEnum`, but not both, because their semantics conflict.\n\n\n", "file_path": "README.md", "rank": 11, "score": 1.894107368171654 }, { "content": "use rotate_enum::RotateEnum;\n\n\n\n#[derive(RotateEnum, PartialEq, Clone, Copy)]\n", "file_path": "tests/rotate.rs", "rank": 13, "score": 1.7675118308952849 }, { "content": "use rotate_enum::ShiftEnum;\n\n\n\n#[derive(ShiftEnum, PartialEq, Clone, Copy)]\n", "file_path": "tests/shift.rs", "rank": 14, "score": 1.7675118308952849 }, { "content": "use rotate_enum::IterEnum;\n\n\n\n#[derive(IterEnum, PartialEq, Clone, Copy, Debug)]\n", "file_path": "tests/iter.rs", "rank": 15, "score": 1.7275703400807145 } ]
Rust
src/ping.rs
FrozenDroid/esp-idf-svc
d394bc67d288b5a3b8dcdeb896adcdf1ba7f1533
use core::{mem, ptr, time::Duration}; use ::log::*; #[cfg(feature = "std")] use std::sync::*; use embedded_svc::ipv4; use embedded_svc::mutex::Mutex; use embedded_svc::ping::*; use esp_idf_sys::*; use crate::private::common::*; #[derive(Debug)] pub struct EspPing(u32); unsafe impl Send for EspPing {} unsafe impl Sync for EspPing {} impl Default for EspPing { fn default() -> Self { Self(0) } } impl EspPing { pub fn new(interface_index: u32) -> Self { Self(interface_index) } fn run_ping<F: Fn(&Summary, &Reply)>( &self, ip: ipv4::Ipv4Addr, conf: &Configuration, tracker: &mut Tracker<F>, ) -> Result<(), EspError> { #[allow(clippy::needless_update)] let config = esp_ping_config_t { count: conf.count, interval_ms: conf.interval.as_millis() as u32, timeout_ms: conf.timeout.as_millis() as u32, data_size: conf.data_size, tos: conf.tos, target_addr: ip_addr_t { u_addr: ip_addr__bindgen_ty_1 { ip4: Newtype::<ip4_addr_t>::from(ip).0, }, type_: 0, }, task_stack_size: 4096, task_prio: 2, interface: self.0, ..Default::default() }; let callbacks = esp_ping_callbacks_t { on_ping_success: Some(EspPing::on_ping_success::<F>), on_ping_timeout: Some(EspPing::on_ping_timeout::<F>), on_ping_end: Some(EspPing::on_ping_end::<F>), cb_args: tracker as *mut Tracker<F> as *mut c_types::c_void, }; let mut handle: esp_ping_handle_t = ptr::null_mut(); let handle_ref = &mut handle; esp!(unsafe { esp_ping_new_session(&config, &callbacks, handle_ref as *mut *mut c_types::c_void) })?; if handle.is_null() { return Err(EspError::from(ESP_ERR_INVALID_ARG as _).unwrap()); } info!("Ping session established, got handle {:?}", handle); #[allow(clippy::mutex_atomic)] tracker.running.with_lock(|running| *running = true); esp!(unsafe { esp_ping_start(handle) })?; info!("Ping session started"); info!("Waiting for the ping session to complete"); #[cfg(feature = "std")] { #[allow(clippy::mutex_atomic)] let _running = tracker .cvar .wait_while(tracker.running.lock().unwrap(), |running| *running) .unwrap(); } #[cfg(not(feature = "std"))] { while tracker.running.with_lock(|running| *running) { unsafe { vTaskDelay(500) }; } } esp!(unsafe { esp_ping_stop(handle) })?; info!("Ping session stopped"); esp!(unsafe { esp_ping_delete_session(handle) })?; info!("Ping session {:?} removed", &handle); Ok(()) } unsafe extern "C" fn on_ping_success<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping success callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); let mut seqno: c_types::c_ushort = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SEQNO, &mut seqno as *mut c_types::c_ushort as *mut c_types::c_void, mem::size_of_val(&seqno) as u32, ); let mut ttl: c_types::c_uchar = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_TTL, &mut ttl as *mut c_types::c_uchar as *mut c_types::c_void, mem::size_of_val(&ttl) as u32, ); let mut target_addr_raw = [0_u8; mem::size_of::<ip_addr_t>()]; let target_addr: &mut ip_addr_t = mem::transmute(&mut target_addr_raw); esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_IPADDR, target_addr as *mut ip_addr_t as *mut c_types::c_void, mem::size_of::<ip_addr_t>() as _, ); let mut elapsed_time: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_TIMEGAP, &mut elapsed_time as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&elapsed_time) as u32, ); let mut recv_len: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SIZE, &mut recv_len as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&recv_len) as u32, ); let addr = ipv4::Ipv4Addr::from(Newtype(target_addr.u_addr.ip4)); info!( "From {} icmp_seq={} ttl={} time={}ms bytes={}", addr, seqno, ttl, elapsed_time, recv_len ); if let Some(reply_callback) = tracker.reply_callback { Self::update_summary(handle, &mut tracker.summary); reply_callback( &tracker.summary, &Reply::Success(Info { addr, seqno: seqno as u32, ttl: ttl as u8, recv_len: recv_len as u32, elapsed_time: Duration::from_millis(elapsed_time as u64), }), ); } } unsafe extern "C" fn on_ping_timeout<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping timeout callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); let mut seqno: c_types::c_ushort = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SEQNO, &mut seqno as *mut c_types::c_ushort as *mut c_types::c_void, mem::size_of_val(&seqno) as u32, ); let mut target_addr_raw = [0_u8; mem::size_of::<ip_addr_t>()]; let target_addr: &mut ip_addr_t = mem::transmute(&mut target_addr_raw); esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_IPADDR, target_addr as *mut ip_addr_t as *mut c_types::c_void, mem::size_of::<ip_addr_t>() as _, ); info!("From {} icmp_seq={} timeout", "???", seqno); if let Some(reply_callback) = tracker.reply_callback { Self::update_summary(handle, &mut tracker.summary); reply_callback(&tracker.summary, &Reply::Timeout); } } #[allow(clippy::mutex_atomic)] unsafe extern "C" fn on_ping_end<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping end callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); Self::update_summary(handle, &mut tracker.summary); info!( "{} packets transmitted, {} received, time {}ms", tracker.summary.transmitted, tracker.summary.received, tracker.summary.time.as_millis() ); #[cfg(feature = "std")] { *tracker.running.lock().unwrap() = false; tracker.cvar.notify_one(); } #[cfg(not(feature = "std"))] tracker.running.with_lock(|running| *running = false); } unsafe fn update_summary(handle: esp_ping_handle_t, summary: &mut Summary) { let mut transmitted: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_REQUEST, &mut transmitted as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&transmitted) as u32, ); let mut received: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_REPLY, &mut received as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&received) as u32, ); let mut total_time: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_DURATION, &mut total_time as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&total_time) as u32, ); summary.transmitted = transmitted; summary.received = received; summary.time = Duration::from_millis(total_time as u64); } } impl Ping for EspPing { type Error = EspError; fn ping(&mut self, ip: ipv4::Ipv4Addr, conf: &Configuration) -> Result<Summary, Self::Error> { info!( "About to run a summary ping {} with configuration {:?}", ip, conf ); let mut tracker = Tracker::new(Some(&nop_callback)); self.run_ping(ip, conf, &mut tracker)?; Ok(tracker.summary) } fn ping_details<F: Fn(&Summary, &Reply)>( &mut self, ip: ipv4::Ipv4Addr, conf: &Configuration, reply_callback: &F, ) -> Result<Summary, Self::Error> { info!( "About to run a detailed ping {} with configuration {:?}", ip, conf ); let mut tracker = Tracker::new(Some(reply_callback)); self.run_ping(ip, conf, &mut tracker)?; Ok(tracker.summary) } } struct Tracker<'a, F: Fn(&Summary, &Reply)> { summary: Summary, #[cfg(feature = "std")] cvar: Condvar, #[cfg(feature = "std")] running: std::sync::Mutex<bool>, #[cfg(not(feature = "std"))] running: EspMutex<bool>, reply_callback: Option<&'a F>, } impl<'a, F: Fn(&Summary, &Reply)> Tracker<'a, F> { #[allow(clippy::mutex_atomic)] pub fn new(reply_callback: Option<&'a F>) -> Self { Self { summary: Default::default(), #[cfg(feature = "std")] cvar: Condvar::new(), #[cfg(feature = "std")] running: std::sync::Mutex::new(false), #[cfg(not(feature = "std"))] running: EspMutex::new(false), reply_callback, } } } fn nop_callback(_summary: &Summary, _reply: &Reply) {}
use core::{mem, ptr, time::Duration}; use ::log::*; #[cfg(feature = "std")] use std::sync::*; use embedded_svc::ipv4; use embedded_svc::mutex::Mutex; use embedded_svc::ping::*; use esp_idf_sys::*; use crate::private::common::*; #[derive(Debug)] pub struct EspPing(u32); unsafe impl Send for EspPing {} unsafe impl Sync for EspPing {} impl Default for EspPing { fn default() -> Self { Self(0) } } impl EspPing { pub fn new(interface_index: u32) -> Self { Self(interface_index) } fn run_ping<F: Fn(&Summary, &Reply)>( &self, ip: ipv4::Ipv4Addr, conf: &Configuration, tracker: &mut Tracker<F>, ) -> Result<(), EspError> { #[allow(clippy::needless_update)] let config = esp_ping_config_t { count: conf.count, interval_ms: conf.interval.as_millis() as u32, timeout_ms: conf.timeout.as_millis() as u32, data_size: conf.data_size, tos: conf.tos, target_addr: ip_addr_t { u_addr: ip_addr__bindgen_ty_1 { ip4: Newtype::<ip4_addr_t>::from(ip).0, }, type_: 0, }, task_stack_size: 4096, task_prio: 2, interface: self.0, ..Default::default() }; let callbacks = esp_ping_callbacks_t { on_ping_success: Some(EspPing::on_ping_success::<F>), on_ping_timeout: Some(EspPing::on_ping_timeout::<F>), on_ping_end: Some(EspPing::on_ping_end::<F>), cb_args: tracker as *mut Tracker<F> as *mut c_types::c_void, }; let mut handle: esp_ping_handle_t = ptr::null_mut(); let handle_ref = &mut handle; esp!(unsafe { esp_ping_new_session(&config, &callbacks, handle_ref as *mut *mut c_types::c_void) })?; if handle.is_null() { return Err(EspError::from(ESP_ERR_INVALID_ARG as _).unwrap()); } info!("Ping session established, got handle {:?}", handle); #[allow(clippy::mutex_atomic)] tracker.running.with_lock(|running| *running = true); esp!(unsafe { esp_ping_start(handle) })?; info!("Ping session started"); info!("Waiting for the ping session to complete"); #[cfg(feature = "std")] { #[allow(clippy::mutex_atomic)] let _running = tracker .cvar .wait_while(tracker.running.lock().unwrap(), |running| *running) .unwrap(); } #[cfg(not(feature = "std"))] { while tracker.running.with_lock(|running| *running) { unsafe { vTaskDelay(500) }; } } esp!(unsafe { esp_ping_stop(handle) })?; info!("Ping session stopped"); esp!(unsafe { esp_ping_delete_session(handle) })?; info!("Ping session {:?} removed", &handle); Ok(()) } unsafe extern "C" fn on_ping_success<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping success callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); let mut seqno: c_types::c_ushort = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SEQNO, &mut seqno as *mut c_types::c_ushort as *mut c_types::c_void, mem::size_of_val(&seqno) as u32, ); let mut ttl: c_types::c_uchar = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_TTL, &mut ttl as *mut c_types::c_uchar as *mut c_types::c_void, mem::size_of_val(&ttl) as u32, ); let mut target_addr_raw = [0_u8; mem::size_of::<ip_addr_t>()]; let target_addr: &mut ip_addr_t = mem::transmute(&mut target_addr_raw); esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_IPADDR, target_addr as *mut ip_addr_t as *mut c_types::c_void, mem::size_of::<ip_addr_t>() as _, ); let mut elapsed_time: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_TIMEGAP, &mut elapsed_time as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&e
id, mem::size_of_val(&total_time) as u32, ); summary.transmitted = transmitted; summary.received = received; summary.time = Duration::from_millis(total_time as u64); } } impl Ping for EspPing { type Error = EspError; fn ping(&mut self, ip: ipv4::Ipv4Addr, conf: &Configuration) -> Result<Summary, Self::Error> { info!( "About to run a summary ping {} with configuration {:?}", ip, conf ); let mut tracker = Tracker::new(Some(&nop_callback)); self.run_ping(ip, conf, &mut tracker)?; Ok(tracker.summary) } fn ping_details<F: Fn(&Summary, &Reply)>( &mut self, ip: ipv4::Ipv4Addr, conf: &Configuration, reply_callback: &F, ) -> Result<Summary, Self::Error> { info!( "About to run a detailed ping {} with configuration {:?}", ip, conf ); let mut tracker = Tracker::new(Some(reply_callback)); self.run_ping(ip, conf, &mut tracker)?; Ok(tracker.summary) } } struct Tracker<'a, F: Fn(&Summary, &Reply)> { summary: Summary, #[cfg(feature = "std")] cvar: Condvar, #[cfg(feature = "std")] running: std::sync::Mutex<bool>, #[cfg(not(feature = "std"))] running: EspMutex<bool>, reply_callback: Option<&'a F>, } impl<'a, F: Fn(&Summary, &Reply)> Tracker<'a, F> { #[allow(clippy::mutex_atomic)] pub fn new(reply_callback: Option<&'a F>) -> Self { Self { summary: Default::default(), #[cfg(feature = "std")] cvar: Condvar::new(), #[cfg(feature = "std")] running: std::sync::Mutex::new(false), #[cfg(not(feature = "std"))] running: EspMutex::new(false), reply_callback, } } } fn nop_callback(_summary: &Summary, _reply: &Reply) {}
lapsed_time) as u32, ); let mut recv_len: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SIZE, &mut recv_len as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&recv_len) as u32, ); let addr = ipv4::Ipv4Addr::from(Newtype(target_addr.u_addr.ip4)); info!( "From {} icmp_seq={} ttl={} time={}ms bytes={}", addr, seqno, ttl, elapsed_time, recv_len ); if let Some(reply_callback) = tracker.reply_callback { Self::update_summary(handle, &mut tracker.summary); reply_callback( &tracker.summary, &Reply::Success(Info { addr, seqno: seqno as u32, ttl: ttl as u8, recv_len: recv_len as u32, elapsed_time: Duration::from_millis(elapsed_time as u64), }), ); } } unsafe extern "C" fn on_ping_timeout<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping timeout callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); let mut seqno: c_types::c_ushort = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_SEQNO, &mut seqno as *mut c_types::c_ushort as *mut c_types::c_void, mem::size_of_val(&seqno) as u32, ); let mut target_addr_raw = [0_u8; mem::size_of::<ip_addr_t>()]; let target_addr: &mut ip_addr_t = mem::transmute(&mut target_addr_raw); esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_IPADDR, target_addr as *mut ip_addr_t as *mut c_types::c_void, mem::size_of::<ip_addr_t>() as _, ); info!("From {} icmp_seq={} timeout", "???", seqno); if let Some(reply_callback) = tracker.reply_callback { Self::update_summary(handle, &mut tracker.summary); reply_callback(&tracker.summary, &Reply::Timeout); } } #[allow(clippy::mutex_atomic)] unsafe extern "C" fn on_ping_end<F: Fn(&Summary, &Reply)>( handle: esp_ping_handle_t, args: *mut c_types::c_void, ) { info!("Ping end callback invoked"); let tracker_ptr: *mut Tracker<F> = args as _; let tracker = tracker_ptr.as_mut().unwrap(); Self::update_summary(handle, &mut tracker.summary); info!( "{} packets transmitted, {} received, time {}ms", tracker.summary.transmitted, tracker.summary.received, tracker.summary.time.as_millis() ); #[cfg(feature = "std")] { *tracker.running.lock().unwrap() = false; tracker.cvar.notify_one(); } #[cfg(not(feature = "std"))] tracker.running.with_lock(|running| *running = false); } unsafe fn update_summary(handle: esp_ping_handle_t, summary: &mut Summary) { let mut transmitted: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_REQUEST, &mut transmitted as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&transmitted) as u32, ); let mut received: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_REPLY, &mut received as *mut c_types::c_uint as *mut c_types::c_void, mem::size_of_val(&received) as u32, ); let mut total_time: c_types::c_uint = 0; esp_ping_get_profile( handle, esp_ping_profile_t_ESP_PING_PROF_DURATION, &mut total_time as *mut c_types::c_uint as *mut c_types::c_vo
random
[ { "content": "#[cfg(feature = \"alloc\")]\n\npub fn from_cstr_ptr(ptr: *const i8) -> alloc::string::String {\n\n unsafe { CStr::from_ptr(ptr) }.to_string_lossy().to_string()\n\n}\n\n\n", "file_path": "src/private/cstr.rs", "rank": 2, "score": 71492.7575833215 }, { "content": "#[cfg(feature = \"alloc\")]\n\npub fn set_str(buf: &mut [u8], s: &str) {\n\n let cs = CString::new(s).unwrap();\n\n let ss: &[u8] = cs.as_bytes_with_nul();\n\n buf[..ss.len()].copy_from_slice(ss);\n\n}\n\n\n", "file_path": "src/private/cstr.rs", "rank": 3, "score": 68876.27013713108 }, { "content": "fn main() -> anyhow::Result<()> {\n\n embuild::kconfig::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n\n\n println!(\n\n \"cargo:rustc-cfg={}\",\n\n std::env::var(\"DEP_ESP_IDF_MCU\").unwrap()\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 4, "score": 66467.80781854514 }, { "content": "#[cfg(feature = \"alloc\")]\n\npub fn from_cstr(buf: &[u8]) -> alloc::borrow::Cow<'_, str> {\n\n // We have to find the first '\\0' ourselves, because the passed buffer might\n\n // be wider than the ASCIIZ string it contains\n\n let len = buf.iter().position(|e| *e == 0).unwrap() + 1;\n\n\n\n unsafe { CStr::from_bytes_with_nul_unchecked(&buf[0..len]) }.to_string_lossy()\n\n}\n", "file_path": "src/private/cstr.rs", "rank": 5, "score": 46641.62529614967 }, { "content": "struct Shared {\n\n client_ip_conf: Option<ipv4::ClientConfiguration>,\n\n router_ip_conf: Option<ipv4::RouterConfiguration>,\n\n\n\n status: Status,\n\n operating: bool,\n\n}\n\n\n\nimpl Default for Shared {\n\n fn default() -> Self {\n\n Self {\n\n client_ip_conf: None,\n\n router_ip_conf: None,\n\n status: Status(ClientStatus::Stopped, ApStatus::Stopped),\n\n operating: false,\n\n }\n\n }\n\n}\n\n\n\npub struct EspWifi {\n", "file_path": "src/wifi.rs", "rank": 6, "score": 39381.0762610801 }, { "content": "#[derive(Debug)]\n\nstruct PrivateData;\n\n\n\n#[derive(Debug)]\n\npub struct EspNetifStack(PrivateData);\n\n\n\nimpl EspNetifStack {\n\n pub fn new() -> Result<Self, EspError> {\n\n unsafe {\n\n TAKEN.lock(|taken| {\n\n if taken.0 {\n\n Err(EspError::from(ESP_ERR_INVALID_STATE as i32).unwrap())\n\n } else {\n\n if !taken.1 {\n\n esp!(esp_netif_init())?;\n\n }\n\n\n\n *taken = (true, true);\n\n Ok(Self(PrivateData))\n\n }\n\n })\n", "file_path": "src/netif.rs", "rank": 7, "score": 37748.21039835535 }, { "content": "#[derive(Debug)]\n\nstruct PrivateData;\n\n\n\n#[derive(Debug)]\n\npub struct EspNapt(PrivateData);\n\n\n\npub enum Protocol {\n\n UDP,\n\n TCP,\n\n}\n\n\n\nimpl Protocol {\n\n fn get_num_proto(&self) -> u8 {\n\n match self {\n\n Self::UDP => 17,\n\n Self::TCP => 6,\n\n }\n\n }\n\n}\n\n\n\nstatic mut TAKEN: EspMutex<bool> = EspMutex::new(false);\n", "file_path": "src/napt.rs", "rank": 8, "score": 37748.21039835535 }, { "content": "#[derive(Debug)]\n\nstruct PrivateData;\n\n\n\n#[derive(Debug)]\n\npub struct EspSysLoopStack(PrivateData);\n\n\n\nimpl EspSysLoopStack {\n\n pub fn new() -> Result<Self, EspError> {\n\n unsafe {\n\n TAKEN.lock(|taken| {\n\n if *taken {\n\n Err(EspError::from(ESP_ERR_INVALID_STATE as i32).unwrap())\n\n } else {\n\n esp!(esp_event_loop_create_default())?;\n\n\n\n *taken = true;\n\n Ok(EspSysLoopStack(PrivateData))\n\n }\n\n })\n\n }\n\n }\n", "file_path": "src/sysloop.rs", "rank": 9, "score": 37748.21039835535 }, { "content": "#[derive(Debug)]\n\nstruct PrivateData;\n\n\n\n#[derive(Debug)]\n\npub struct EspDefaultNvs(PrivateData);\n\n\n\nimpl EspDefaultNvs {\n\n pub fn new() -> Result<Self, EspError> {\n\n unsafe {\n\n DEFAULT_TAKEN.lock(|taken| {\n\n if *taken {\n\n Err(EspError::from(ESP_ERR_INVALID_STATE as i32).unwrap())\n\n } else {\n\n let default_nvs = Self::init()?;\n\n\n\n *taken = true;\n\n Ok(default_nvs)\n\n }\n\n })\n\n }\n\n }\n", "file_path": "src/nvs.rs", "rank": 10, "score": 37748.21039835535 }, { "content": "struct IdfRequest<'r>(\n\n *mut esp_idf_sys::httpd_req_t,\n\n PhantomData<&'r esp_idf_sys::httpd_req_t>,\n\n);\n\n\n\nimpl<'r> IdfRequest<'r> {\n\n fn send(&mut self, response: Response) -> Result<()> {\n\n let mut status_string = response.status.to_string();\n\n if let Some(message) = response.status_message {\n\n status_string.push(' ');\n\n status_string.push_str(message.as_str());\n\n }\n\n\n\n let c_status = CString::new(status_string.as_str()).unwrap();\n\n\n\n esp!(unsafe { esp_idf_sys::httpd_resp_set_status(self.0, c_status.as_ptr()) })?;\n\n\n\n let mut c_headers: std::vec::Vec<(CString, CString)> = vec![];\n\n\n\n for (key, value) in response.headers {\n", "file_path": "src/httpd.rs", "rank": 11, "score": 34841.71308214232 }, { "content": "use crate::log::Logger;\n\n\n\nstatic LOGGER: Logger = Logger;\n\n\n\n/// When compiling a Rust binary crate (e.g. in Cargo-first builds) and NOT doing\n\n/// any tricks like using #[no_main] or #[start], the Rust compiler will autogenerate\n\n/// a C function with the signature as below which will be proxying\n\n/// the real Rust main function of your binary crate\n\n///\n\n/// So to bridge this function with the real C \"app_main()\" entrypoint\n\n/// that ESP-IDF expects it is enough to implement app_main() and call in it\n\n/// the \"main\" C function autogenerated by the Rust compiler\n\n///\n\n/// See https://github.com/rust-lang/rust/issues/29633 for more information\n\n#[cfg(feature = \"binstart\")]\n\nextern \"C\" {\n\n fn main(p1: isize, p2: *const *const u8) -> isize;\n\n}\n\n\n\n/// When compiling a static Rust library crate (e.g. by using a PIO->Cargo or a CMake->Cargo) build,\n", "file_path": "src/start.rs", "rank": 12, "score": 28330.948161544853 }, { "content": " .unwrap();\n\n\n\n #[cfg(feature = \"binstart\")]\n\n {\n\n match unsafe { main(0, &[core::ptr::null()] as *const *const u8) } {\n\n 0 => log::error!(\"Unexpected program exit!\\n(no error reported)\"),\n\n n => log::error!(\"Unexpected program exit!\\n{}\", n),\n\n }\n\n\n\n log::warn!(\"Will restart now...\");\n\n panic!();\n\n }\n\n\n\n #[cfg(feature = \"libstart\")]\n\n unsafe {\n\n main()\n\n }\n\n}\n", "file_path": "src/start.rs", "rank": 13, "score": 28330.256831562125 }, { "content": "/// there is no main function that the Rust compiler expects, nor autogeneration of a callable\n\n/// wrapper around it.\n\n///\n\n/// In that case (and if the \"libstart\" feature is enabled), it is _us_ (not the Rust compiler)\n\n/// expecting the user to define a rust \"main\" function and it is our code below which is explicitly\n\n/// calling it from app_main(). If the user does not define a main() runction in Rust, there will\n\n/// be a linkage error instead of the nice Rust syntax error for binary crates.\n\n///\n\n/// Another restriction of the \"libstart\" feature is that the Rust main function will always have one\n\n/// fixed signature: \"fn main() -> !\" - as opposed to the flexibility of main() in binary crates\n\n/// where it can have quite a few different returning types\n\n#[cfg(feature = \"libstart\")]\n\nextern \"Rust\" {\n\n fn main() -> !;\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn app_main() {\n\n log::set_logger(&LOGGER)\n\n .map(|()| LOGGER.initialize())\n", "file_path": "src/start.rs", "rank": 14, "score": 28326.018277415606 }, { "content": " let output = format!(\"{}\", record.args());\n\n\n\n let coutput = CString::new(output).unwrap();\n\n let ctarget = CString::new(record.metadata().target()).unwrap();\n\n\n\n if let Some(color) = Self::get_color(record.level()) {\n\n unsafe {\n\n esp_log_write(\n\n Newtype::<esp_log_level_t>::from(record.level()).0,\n\n b\"rust-logging\\0\" as *const u8 as *const _, // TODO: ctarget.as_c_str().as_ptr() as *const u8 as *const _,\n\n b\"\\x1b[0;%dm%s (%d) %s: %s\\x1b[0m\\n\\0\" as *const u8 as *const _,\n\n color as u32,\n\n Self::get_marker(record.metadata().level()).as_ptr(),\n\n esp_log_timestamp(),\n\n ctarget.as_c_str().as_ptr(),\n\n coutput.as_c_str().as_ptr(),\n\n );\n\n }\n\n } else {\n\n unsafe {\n", "file_path": "src/log.rs", "rank": 15, "score": 28174.735624620305 }, { "content": "use ::log::{Level, LevelFilter, Metadata, Record};\n\n\n\nuse esp_idf_sys::*;\n\n\n\nuse crate::private::common::*;\n\nuse crate::private::cstr::*;\n\n\n\npub struct Logger;\n\n\n\nunsafe impl Send for Logger {}\n\nunsafe impl Sync for Logger {}\n\n\n\n#[allow(non_upper_case_globals)]\n\nimpl From<Newtype<esp_log_level_t>> for LevelFilter {\n\n fn from(level: Newtype<esp_log_level_t>) -> Self {\n\n match level.0 {\n\n esp_log_level_t_ESP_LOG_NONE => LevelFilter::Off,\n\n esp_log_level_t_ESP_LOG_ERROR => LevelFilter::Error,\n\n esp_log_level_t_ESP_LOG_WARN => LevelFilter::Warn,\n\n esp_log_level_t_ESP_LOG_INFO => LevelFilter::Info,\n", "file_path": "src/log.rs", "rank": 16, "score": 28173.634262058415 }, { "content": " Level::Debug => esp_log_level_t_ESP_LOG_DEBUG,\n\n Level::Trace => esp_log_level_t_ESP_LOG_VERBOSE,\n\n })\n\n }\n\n}\n\n\n\nimpl Logger {\n\n pub fn initialize(&self) {\n\n ::log::set_max_level(self.get_max_level());\n\n }\n\n\n\n pub fn get_max_level(&self) -> LevelFilter {\n\n LevelFilter::from(Newtype(CONFIG_LOG_DEFAULT_LEVEL))\n\n }\n\n\n\n pub fn set_target_level(&self, target: impl AsRef<str>, level_filter: LevelFilter) {\n\n let ctarget = CString::new(target.as_ref()).unwrap();\n\n\n\n unsafe {\n\n esp_log_level_set(\n", "file_path": "src/log.rs", "rank": 17, "score": 28173.068574721 }, { "content": " ctarget.as_c_str().as_ptr(),\n\n Newtype::<esp_log_level_t>::from(level_filter).0,\n\n )\n\n };\n\n }\n\n\n\n fn get_marker(level: Level) -> &'static CStr {\n\n CStr::from_bytes_with_nul(match level {\n\n Level::Error => b\"E\\0\",\n\n Level::Warn => b\"W\\0\",\n\n Level::Info => b\"I\\0\",\n\n Level::Debug => b\"D\\0\",\n\n Level::Trace => b\"V\\0\",\n\n })\n\n .unwrap()\n\n }\n\n\n\n fn get_color(level: Level) -> Option<u8> {\n\n if CONFIG_LOG_COLORS == 0 {\n\n None\n", "file_path": "src/log.rs", "rank": 18, "score": 28165.48497447203 }, { "content": " esp_log_write(\n\n Newtype::<esp_log_level_t>::from(record.level()).0,\n\n b\"rust-logging\\0\" as *const u8 as *const _, // TODO: ctarget.as_c_str().as_ptr() as *const u8 as *const _,\n\n b\"%s (%d) %s: %s\\n\\0\" as *const u8 as *const _,\n\n Self::get_marker(record.metadata().level()).as_ptr(),\n\n esp_log_timestamp(),\n\n ctarget.as_c_str().as_ptr(),\n\n coutput.as_c_str().as_ptr(),\n\n );\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn flush(&self) {}\n\n}\n", "file_path": "src/log.rs", "rank": 19, "score": 28164.338220695707 }, { "content": "#[allow(non_upper_case_globals)]\n\nimpl From<Newtype<esp_log_level_t>> for Level {\n\n fn from(level: Newtype<esp_log_level_t>) -> Self {\n\n match level.0 {\n\n esp_log_level_t_ESP_LOG_ERROR => Level::Error,\n\n esp_log_level_t_ESP_LOG_WARN => Level::Warn,\n\n esp_log_level_t_ESP_LOG_INFO => Level::Info,\n\n esp_log_level_t_ESP_LOG_DEBUG => Level::Debug,\n\n esp_log_level_t_ESP_LOG_VERBOSE => Level::Trace,\n\n _ => Level::Trace,\n\n }\n\n }\n\n}\n\n\n\nimpl From<Level> for Newtype<esp_log_level_t> {\n\n fn from(level: Level) -> Self {\n\n Newtype(match level {\n\n Level::Error => esp_log_level_t_ESP_LOG_ERROR,\n\n Level::Warn => esp_log_level_t_ESP_LOG_WARN,\n\n Level::Info => esp_log_level_t_ESP_LOG_INFO,\n", "file_path": "src/log.rs", "rank": 20, "score": 28163.58814049281 }, { "content": " esp_log_level_t_ESP_LOG_DEBUG => LevelFilter::Debug,\n\n esp_log_level_t_ESP_LOG_VERBOSE => LevelFilter::Trace,\n\n _ => LevelFilter::Trace,\n\n }\n\n }\n\n}\n\n\n\nimpl From<LevelFilter> for Newtype<esp_log_level_t> {\n\n fn from(level: LevelFilter) -> Self {\n\n Newtype(match level {\n\n LevelFilter::Off => esp_log_level_t_ESP_LOG_NONE,\n\n LevelFilter::Error => esp_log_level_t_ESP_LOG_ERROR,\n\n LevelFilter::Warn => esp_log_level_t_ESP_LOG_WARN,\n\n LevelFilter::Info => esp_log_level_t_ESP_LOG_INFO,\n\n LevelFilter::Debug => esp_log_level_t_ESP_LOG_DEBUG,\n\n LevelFilter::Trace => esp_log_level_t_ESP_LOG_VERBOSE,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 21, "score": 28162.23381417892 }, { "content": " } else {\n\n match level {\n\n Level::Error => Some(31), // LOG_COLOR_RED\n\n Level::Warn => Some(33), // LOG_COLOR_BROWN\n\n Level::Info => Some(32), // LOG_COLOR_GREEN,\n\n _ => None,\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl ::log::Log for Logger {\n\n fn enabled(&self, metadata: &Metadata) -> bool {\n\n metadata.level() <= LevelFilter::from(Newtype(CONFIG_LOG_DEFAULT_LEVEL))\n\n }\n\n\n\n fn log(&self, record: &Record) {\n\n if self.enabled(record.metadata()) {\n\n // TODO: Get rid of all allocations, if possible\n\n\n", "file_path": "src/log.rs", "rank": 22, "score": 28162.148465238435 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use std::{any::Any, borrow::Borrow, sync::Arc};\n\n\n\n use embedded_svc::{\n\n edge_config::wifi,\n\n httpd::{app, registry::Registry, sessions, StateMap},\n\n wifi::{Configuration, Wifi},\n\n };\n\n\n\n use crate::{\n\n httpd::ServerRegistry, netif::EspNetif, nvs::EspDefaultNvs, sysloop::EspSysLoop,\n\n wifi::EspWifi,\n\n };\n\n\n\n #[test]\n\n pub fn test() -> anyhow::Result<()> {\n\n let mut wifi = EspWifi::new(\n\n Arc::new(EspNetif::new()?),\n\n Arc::new(EspSysLoop::new()?),\n", "file_path": "src/private/edge_config.rs", "rank": 39, "score": 24857.332593217812 }, { "content": " Arc::new(EspDefaultNvs::new()?),\n\n )?;\n\n\n\n wifi.set_configuration(&Configuration::AccessPoint(Default::default()))?;\n\n\n\n let boxed: Box<dyn Any> = Box::new(wifi);\n\n\n\n let app: StateMap = vec![(\"wifi\".to_string(), boxed)].into_iter().collect();\n\n\n\n let _server = ServerRegistry::new()\n\n .register(|registry| wifi::register(registry, \"/api\", None))?\n\n .at(\"\")\n\n .middleware(sessions::middleware(Default::default()))?\n\n .at(\"\")\n\n .middleware(app::middleware(app))?\n\n .start(&Default::default())?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/private/edge_config.rs", "rank": 40, "score": 24849.1435986525 }, { "content": "# Type-Safe Rust Wrappers for various ESP-IDF services (WiFi, Network, Httpd, Logging, etc.)\n\n\n\nThe wrappers are implementations of the abstractions defined in the [embedded-svc](https://github.com/ivmarkov/embedded-svc) project.\n\n<br><br>\n\n\n\nFor more information, check out:\n\n* The [Rust ESP32 STD compiler fork](https://github.com/ivmarkov/rust)\n\n* The [\"Hello, World\" demo](https://github.com/ivmarkov/rust-esp32-std-hello)\n\n* The [embedded-svc](https://github.com/ivmarkov/embedded-svc) project\n", "file_path": "README.md", "rank": 41, "score": 14786.760002702136 }, { "content": "impl registry::Registry for ServerRegistry {\n\n fn handler(self, handler: Handler) -> Result<Self> {\n\n Ok(Self(self.0.handler(handler)?))\n\n }\n\n\n\n fn middleware(self, middleware: Middleware) -> Result<Self> {\n\n Ok(Self(self.0.middleware(middleware)?))\n\n }\n\n}\n\n\n\npub struct Server {\n\n sd: esp_idf_sys::httpd_handle_t,\n\n registrations: Vec<(CString, esp_idf_sys::httpd_uri_t)>,\n\n}\n\n\n\nimpl Server {\n\n fn new(conf: &Configuration) -> Result<Self> {\n\n let config = Self::default_configuration(conf.http_port, conf.https_port);\n\n\n\n let mut handle: esp_idf_sys::httpd_handle_t = ptr::null_mut();\n", "file_path": "src/httpd.rs", "rank": 42, "score": 32.13346734168935 }, { "content": "\n\n match unsafe { nvs_get_u64(self.1, c_key.as_ptr(), dummy as *mut _) } as u32 {\n\n ESP_ERR_NVS_NOT_FOUND => Ok(false),\n\n ESP_ERR_NVS_INVALID_LENGTH => Ok(true),\n\n result => {\n\n esp!(result)?;\n\n Ok(true)\n\n }\n\n }\n\n }\n\n\n\n fn remove(&mut self, key: impl AsRef<str>) -> Result<bool, Self::Error> {\n\n let c_key = CString::new(key.as_ref()).unwrap();\n\n\n\n let result = unsafe { nvs_erase_key(self.1, c_key.as_ptr()) };\n\n\n\n if result == ESP_ERR_NVS_NOT_FOUND as i32 {\n\n Ok(false)\n\n } else {\n\n esp!(result)?;\n", "file_path": "src/nvs_storage.rs", "rank": 43, "score": 31.05309993939541 }, { "content": " info!(\"Setting STA configuration: {:?}\", conf);\n\n\n\n let mut wifi_config = wifi_config_t {\n\n sta: Newtype::<wifi_sta_config_t>::from(conf).0,\n\n };\n\n\n\n esp!(unsafe { esp_wifi_set_config(wifi_interface_t_WIFI_IF_STA, &mut wifi_config) })?;\n\n\n\n self.set_client_ip_conf(&conf.ip_conf)?;\n\n\n\n info!(\"STA configuration done\");\n\n\n\n Ok(())\n\n }\n\n\n\n fn get_ap_conf(&self) -> Result<AccessPointConfiguration, EspError> {\n\n let mut wifi_config: wifi_config_t = Default::default();\n\n esp!(unsafe { esp_wifi_get_config(wifi_interface_t_WIFI_IF_AP, &mut wifi_config) })?;\n\n\n\n let mut result: AccessPointConfiguration = unsafe { Newtype(wifi_config.ap).into() };\n", "file_path": "src/wifi.rs", "rank": 44, "score": 29.813412418021755 }, { "content": "impl EspNetif {\n\n pub fn new(\n\n netif_stack: Arc<EspNetifStack>,\n\n conf: &InterfaceConfiguration,\n\n ) -> Result<Self, EspError> {\n\n let c_if_key = CString::new(conf.key.as_str()).unwrap();\n\n let c_if_description = CString::new(conf.description.as_str()).unwrap();\n\n\n\n let (mut esp_inherent_config, ip_info, dhcps, dns, secondary_dns) = match conf\n\n .ip_configuration\n\n {\n\n InterfaceIpConfiguration::Client(ref ip_conf) => (\n\n esp_netif_inherent_config_t {\n\n flags: match ip_conf {\n\n ipv4::ClientConfiguration::DHCP => {\n\n esp_netif_flags_ESP_NETIF_DHCP_CLIENT\n\n | esp_netif_flags_ESP_NETIF_FLAG_GARP\n\n | esp_netif_flags_ESP_NETIF_FLAG_EVENT_IP_MODIFIED\n\n }\n\n ipv4::ClientConfiguration::Fixed(_) => {\n", "file_path": "src/netif.rs", "rank": 45, "score": 29.49128866145973 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n fn set_client_ip_conf(\n\n &mut self,\n\n conf: &Option<ipv4::ClientConfiguration>,\n\n ) -> Result<(), EspError> {\n\n Self::netif_unbind(self.sta_netif.as_mut())?;\n\n\n\n if let Some(conf) = conf {\n\n let mut iconf = InterfaceConfiguration::wifi_default_client();\n\n iconf.ip_configuration = InterfaceIpConfiguration::Client(conf.clone());\n\n\n\n info!(\"Setting STA interface configuration: {:?}\", iconf);\n\n\n\n let netif = EspNetif::new(self.netif_stack.clone(), &iconf)?;\n\n\n\n esp!(unsafe { esp_netif_attach_wifi_station(netif.1) })?;\n\n esp!(unsafe { esp_wifi_set_default_wifi_sta_handlers() })?;\n", "file_path": "src/wifi.rs", "rank": 46, "score": 28.553872972697093 }, { "content": " let handle_ref = &mut handle;\n\n\n\n esp!(unsafe { esp_idf_sys::httpd_start(handle_ref, &config as *const _) })?;\n\n\n\n info!(\"Started Httpd IDF server with config {:?}\", conf);\n\n\n\n Ok(Server {\n\n sd: handle,\n\n registrations: vec![],\n\n })\n\n }\n\n\n\n fn register(&mut self, handler: Handler) -> Result<()> {\n\n let c_str = CString::new(handler.uri().as_ref()).unwrap();\n\n let method = handler.method();\n\n\n\n let conf = esp_idf_sys::httpd_uri_t {\n\n uri: c_str.as_ptr(),\n\n method: Self::get_httpd_method(method),\n\n user_ctx: Box::into_raw(Box::new(handler.handler())) as *mut _,\n", "file_path": "src/httpd.rs", "rank": 47, "score": 28.48020349663718 }, { "content": " f(self.ap_netif.as_mut())\n\n }\n\n\n\n fn get_client_conf(&self) -> Result<ClientConfiguration, EspError> {\n\n let mut wifi_config = [0_u8; mem::size_of::<wifi_config_t>()];\n\n let wifi_config_ref: &mut wifi_config_t = unsafe { mem::transmute(&mut wifi_config) };\n\n\n\n esp!(unsafe { esp_wifi_get_config(wifi_interface_t_WIFI_IF_STA, wifi_config_ref) })?;\n\n\n\n let mut result: ClientConfiguration = unsafe { (&Newtype(wifi_config_ref.sta)).into() };\n\n result.ip_conf = self\n\n .shared\n\n .with_lock(|shared| shared.client_ip_conf.clone());\n\n\n\n info!(\"Providing STA configuration: {:?}\", &result);\n\n\n\n Ok(result)\n\n }\n\n\n\n fn set_client_conf(&mut self, conf: &ClientConfiguration) -> Result<(), EspError> {\n", "file_path": "src/wifi.rs", "rank": 48, "score": 27.950397810974767 }, { "content": " result.ip_conf = self\n\n .shared\n\n .with_lock(|shared| shared.router_ip_conf.clone());\n\n\n\n info!(\"Providing AP configuration: {:?}\", &result);\n\n\n\n Ok(result)\n\n }\n\n\n\n fn set_ap_conf(&mut self, conf: &AccessPointConfiguration) -> Result<(), EspError> {\n\n info!(\"Setting AP configuration: {:?}\", conf);\n\n\n\n let mut wifi_config = wifi_config_t {\n\n ap: Newtype::<wifi_ap_config_t>::from(conf).0,\n\n };\n\n\n\n esp!(unsafe { esp_wifi_set_config(wifi_interface_t_WIFI_IF_AP, &mut wifi_config) })?;\n\n self.set_router_ip_conf(&conf.ip_conf)?;\n\n\n\n info!(\"AP configuration done\");\n", "file_path": "src/wifi.rs", "rank": 49, "score": 27.805084204884263 }, { "content": "\n\n self.unregister(uri, registration)?;\n\n }\n\n\n\n esp!(unsafe { esp_idf_sys::httpd_stop(self.sd) })?;\n\n\n\n self.sd = ptr::null_mut();\n\n }\n\n\n\n info!(\"Httpd IDF server stopped\");\n\n\n\n Ok(())\n\n }\n\n\n\n unsafe extern \"C\" fn handle(rd: *mut esp_idf_sys::httpd_req_t) -> c_int {\n\n let handler = ((*rd).user_ctx as *mut Box<dyn Fn(Request) -> Result<Response>>)\n\n .as_ref()\n\n .unwrap();\n\n\n\n let idf_request = IdfRequest(rd, PhantomData);\n", "file_path": "src/httpd.rs", "rank": 50, "score": 26.8717150985928 }, { "content": " esp!(unsafe { nvs_commit(self.1) })?;\n\n\n\n Ok(true)\n\n }\n\n }\n\n\n\n fn get_raw(&self, key: impl AsRef<str>) -> Result<Option<vec::Vec<u8>>, Self::Error> {\n\n let c_key = CString::new(key.as_ref()).unwrap();\n\n\n\n let mut value: u_int64_t = 0;\n\n\n\n match unsafe { nvs_get_u64(self.1, c_key.as_ptr(), value as *mut _) } as u32 {\n\n ESP_ERR_NVS_NOT_FOUND => Ok(None),\n\n ESP_ERR_NVS_INVALID_LENGTH => {\n\n let mut len: size_t = 0;\n\n\n\n esp!(unsafe {\n\n nvs_get_blob(self.1, c_key.as_ptr(), ptr::null_mut(), &mut len as *mut _)\n\n })?;\n\n\n", "file_path": "src/nvs_storage.rs", "rank": 51, "score": 26.736663810160675 }, { "content": "\n\nimpl EspNapt {\n\n pub fn new() -> Result<Self, EspError> {\n\n unsafe {\n\n TAKEN.lock(|taken| {\n\n if *taken {\n\n Err(EspError::from(ESP_ERR_INVALID_STATE as i32).unwrap())\n\n } else {\n\n *taken = true;\n\n Ok(Self(PrivateData))\n\n }\n\n })\n\n }\n\n }\n\n\n\n pub fn add_portmap(\n\n protocol: Protocol,\n\n external_ip: ipv4::Ipv4Addr,\n\n external_port: u16,\n\n internal_ip: ipv4::Ipv4Addr,\n", "file_path": "src/napt.rs", "rank": 52, "score": 26.725331529701634 }, { "content": "pub enum InterfaceIpConfiguration {\n\n Client(ipv4::ClientConfiguration),\n\n Router(ipv4::RouterConfiguration),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\n#[cfg_attr(feature = \"use_serde\", derive(Serialize, Deserialize))]\n\npub struct InterfaceConfiguration {\n\n pub key: String,\n\n pub description: String,\n\n pub route_priority: u32,\n\n pub ip_configuration: InterfaceIpConfiguration,\n\n pub interface_stack: InterfaceStack,\n\n}\n\n\n\nimpl Default for InterfaceConfiguration {\n\n fn default() -> Self {\n\n Self::wifi_default_client()\n\n }\n\n}\n", "file_path": "src/netif.rs", "rank": 53, "score": 26.102275733348293 }, { "content": "\n\n fn factory_reset(self) -> Self::Error {\n\n todo!()\n\n }\n\n\n\n fn initiate_update(self) -> Result<Self::OtaUpdate, Self::Error> {\n\n let partition = unsafe { esp_ota_get_next_update_partition(ptr::null()) };\n\n\n\n let mut out_handle: esp_ota_handle_t = Default::default();\n\n\n\n esp!(unsafe { esp_ota_begin(partition, OTA_SIZE_UNKNOWN, &mut out_handle as *mut _) })?;\n\n\n\n Ok(EspOta(Update(out_handle)))\n\n }\n\n\n\n fn mark_running_slot_valid(&mut self) -> Result<(), Self::Error> {\n\n esp!(unsafe { esp_ota_mark_app_valid_cancel_rollback() })\n\n }\n\n\n\n fn mark_running_slot_invalid_and_reboot(&mut self) -> Self::Error {\n", "file_path": "src/ota.rs", "rank": 54, "score": 25.677371904152547 }, { "content": " pub fn new(\n\n nvs: Arc<EspNvs>,\n\n namespace: impl AsRef<str>,\n\n read_write: bool,\n\n ) -> Result<Self, EspError> {\n\n let c_namespace = CString::new(namespace.as_ref()).unwrap();\n\n\n\n let mut handle: nvs_handle_t = 0;\n\n esp!(unsafe {\n\n nvs_open_from_partition(\n\n nvs.0.as_ptr(),\n\n c_namespace.as_ptr(),\n\n if read_write {\n\n nvs_open_mode_t_NVS_READWRITE\n\n } else {\n\n nvs_open_mode_t_NVS_READONLY\n\n },\n\n &mut handle as *mut _,\n\n )\n\n })?;\n", "file_path": "src/nvs_storage.rs", "rank": 55, "score": 25.251065824366144 }, { "content": " Self(Default::default())\n\n }\n\n\n\n pub fn start(self, configuration: &Configuration) -> Result<Server> {\n\n let mut server = Server::new(configuration)?;\n\n\n\n for handler in self.0.apply_middleware() {\n\n server.register(handler)?;\n\n }\n\n\n\n Ok(server)\n\n }\n\n}\n\n\n\nimpl Default for ServerRegistry {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n", "file_path": "src/httpd.rs", "rank": 56, "score": 25.007952914003397 }, { "content": " read_write: bool,\n\n ) -> Result<Self, EspError> {\n\n let c_namespace = CString::new(namespace.as_ref()).unwrap();\n\n\n\n let mut handle: nvs_handle_t = 0;\n\n esp!(unsafe {\n\n nvs_open(\n\n c_namespace.as_ptr(),\n\n if read_write {\n\n nvs_open_mode_t_NVS_READWRITE\n\n } else {\n\n nvs_open_mode_t_NVS_READONLY\n\n },\n\n &mut handle as *mut _,\n\n )\n\n })?;\n\n\n\n Ok(Self(default_nvs, handle))\n\n }\n\n\n", "file_path": "src/nvs_storage.rs", "rank": 57, "score": 24.55365664230758 }, { "content": " Self::netif_unbind(self.ap_netif.as_mut())?;\n\n\n\n if let Some(conf) = conf {\n\n let mut iconf = InterfaceConfiguration::wifi_default_router();\n\n iconf.ip_configuration = InterfaceIpConfiguration::Router(conf.clone());\n\n\n\n info!(\"Setting AP interface configuration: {:?}\", iconf);\n\n\n\n let netif = EspNetif::new(self.netif_stack.clone(), &iconf)?;\n\n\n\n esp!(unsafe { esp_netif_attach_wifi_ap(netif.1) })?;\n\n esp!(unsafe { esp_wifi_set_default_wifi_ap_handlers() })?;\n\n\n\n self.ap_netif = Some(netif);\n\n\n\n info!(\"AP IP configuration done\");\n\n } else {\n\n self.ap_netif = None;\n\n\n\n info!(\"Skipping AP IP configuration (not configured)\");\n", "file_path": "src/wifi.rs", "rank": 58, "score": 24.372184763447546 }, { "content": " } else {\n\n Err(EspError::from(ESP_ERR_INVALID_SIZE as _).unwrap())\n\n }\n\n }\n\n}\n\n\n\npub struct EspSlot(esp_partition_t);\n\n\n\nimpl ota::Slot for EspSlot {\n\n type Error = EspError;\n\n\n\n fn get_label(&self) -> Result<String, Self::Error> {\n\n Ok(from_cstr_ptr(&self.0.label as *const _ as *const _))\n\n }\n\n\n\n fn get_state(&self) -> Result<ota::SlotState, Self::Error> {\n\n let mut state: esp_ota_img_states_t = Default::default();\n\n\n\n let err = unsafe { esp_ota_get_state_partition(&self.0 as *const _, &mut state as *mut _) };\n\n\n", "file_path": "src/ota.rs", "rank": 59, "score": 24.326278397260065 }, { "content": "impl EspOta<Read> {\n\n pub fn new() -> Result<Self, EspError> {\n\n unsafe {\n\n TAKEN.lock(|taken| {\n\n if *taken {\n\n Err(EspError::from(ESP_ERR_INVALID_STATE as i32).unwrap())\n\n } else {\n\n *taken = true;\n\n Ok(Self(Read))\n\n }\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl<MODE> Drop for EspOta<MODE> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n TAKEN.lock(|taken| {\n\n *taken = false;\n", "file_path": "src/ota.rs", "rank": 60, "score": 24.28545775918105 }, { "content": "\n\n Ok(Self(nvs, handle))\n\n }\n\n}\n\n\n\nimpl Drop for EspNvsStorage {\n\n fn drop(&mut self) {\n\n unsafe {\n\n nvs_close(self.1);\n\n }\n\n }\n\n}\n\n\n\nimpl Storage for EspNvsStorage {\n\n type Error = EspError;\n\n\n\n fn contains(&self, key: impl AsRef<str>) -> Result<bool, Self::Error> {\n\n let c_key = CString::new(key.as_ref()).unwrap();\n\n\n\n let dummy: u_int64_t = 0;\n", "file_path": "src/nvs_storage.rs", "rank": 61, "score": 24.17769654124742 }, { "content": "\n\n info!(\"Deinitialization complete\");\n\n\n\n Ok(())\n\n }\n\n\n\n #[allow(non_upper_case_globals)]\n\n fn do_scan(&mut self) -> Result<usize, EspError> {\n\n info!(\"About to scan for access points\");\n\n\n\n self.stop()?;\n\n\n\n unsafe {\n\n esp!(esp_wifi_set_mode(wifi_mode_t_WIFI_MODE_STA))?;\n\n esp!(esp_wifi_start())?;\n\n\n\n esp!(esp_wifi_scan_start(ptr::null_mut(), true))?;\n\n }\n\n\n\n let mut found_ap: u16 = 0;\n", "file_path": "src/wifi.rs", "rank": 62, "score": 23.587929398457128 }, { "content": "use core::{any::Any, ptr};\n\n\n\nextern crate alloc;\n\nuse alloc::sync::Arc;\n\nuse alloc::vec;\n\n\n\nuse embedded_svc::storage::Storage;\n\n\n\nuse esp_idf_sys::*;\n\n\n\nuse crate::nvs::*;\n\n\n\nuse crate::private::cstr::*;\n\n\n\npub struct EspNvsStorage(Arc<dyn Any>, nvs_handle_t);\n\n\n\nimpl EspNvsStorage {\n\n pub fn new_default(\n\n default_nvs: Arc<EspDefaultNvs>,\n\n namespace: impl AsRef<str>,\n", "file_path": "src/nvs_storage.rs", "rank": 63, "score": 23.390837916926998 }, { "content": " internal_port: u16,\n\n ) -> bool {\n\n unsafe {\n\n ip_portmap_add(\n\n protocol.get_num_proto(),\n\n Newtype::<esp_ip4_addr_t>::from(external_ip).0.addr,\n\n external_port,\n\n Newtype::<esp_ip4_addr_t>::from(internal_ip).0.addr,\n\n internal_port,\n\n ) != 0\n\n }\n\n }\n\n\n\n pub fn remove_portmap(protocol: Protocol, external_port: u16) -> bool {\n\n unsafe { ip_portmap_remove(protocol.get_num_proto(), external_port) != 0 }\n\n }\n\n}\n\n\n\nimpl Drop for EspNapt {\n\n fn drop(&mut self) {\n\n unsafe {\n\n TAKEN.lock(|taken| {\n\n *taken = false;\n\n });\n\n }\n\n }\n\n}\n", "file_path": "src/napt.rs", "rank": 64, "score": 23.27507373283147 }, { "content": " }\n\n\n\n pub fn ppp_default_client() -> Self {\n\n Self {\n\n key: \"PPP_CL_DEF\".into(),\n\n description: \"ppp\".into(),\n\n route_priority: 30,\n\n ip_configuration: InterfaceIpConfiguration::Client(Default::default()),\n\n interface_stack: InterfaceStack::Ppp,\n\n }\n\n }\n\n\n\n pub fn ppp_default_router() -> Self {\n\n Self {\n\n key: \"PPP_RT_DEF\".into(),\n\n description: \"ppprt\".into(),\n\n route_priority: 20,\n\n ip_configuration: InterfaceIpConfiguration::Router(Default::default()),\n\n interface_stack: InterfaceStack::Ppp,\n\n }\n\n }\n\n}\n\n\n\nstatic mut TAKEN: EspMutex<(bool, bool)> = EspMutex::new((false, false));\n\n\n\n#[derive(Debug)]\n", "file_path": "src/netif.rs", "rank": 65, "score": 23.123902868420092 }, { "content": "\n\n pub fn get_index(&self) -> u32 {\n\n unsafe { esp_netif_get_netif_impl_index(self.1) as _ }\n\n }\n\n\n\n pub fn get_name(&self) -> String {\n\n let mut netif_name = [0u8; 7];\n\n\n\n esp!(unsafe { esp_netif_get_netif_impl_name(self.1, netif_name.as_mut_ptr() as *mut _) })\n\n .unwrap();\n\n\n\n from_cstr(&netif_name).into()\n\n }\n\n\n\n pub fn get_dns(&self) -> ipv4::Ipv4Addr {\n\n let mut dns_info = Default::default();\n\n\n\n unsafe {\n\n esp!(esp_netif_get_dns_info(\n\n self.1,\n", "file_path": "src/netif.rs", "rank": 66, "score": 22.93123918339541 }, { "content": "\n\n Ok(Self(c_partition))\n\n }\n\n}\n\n\n\nimpl Drop for EspNvs {\n\n fn drop(&mut self) {\n\n unsafe {\n\n NONDEFAULT_LOCKED.lock(|registrations| {\n\n esp!(nvs_flash_deinit_partition(self.0.as_ptr())).unwrap();\n\n registrations.remove(self.0.as_ref());\n\n });\n\n }\n\n\n\n info!(\"Dropped\");\n\n }\n\n}\n", "file_path": "src/nvs.rs", "rank": 67, "score": 22.78329407697974 }, { "content": "\n\nimpl InterfaceConfiguration {\n\n pub fn eth_default_client() -> Self {\n\n Self {\n\n key: \"ETH_CL_DEF\".into(),\n\n description: \"eth\".into(),\n\n route_priority: 60,\n\n ip_configuration: InterfaceIpConfiguration::Client(Default::default()),\n\n interface_stack: InterfaceStack::Eth,\n\n }\n\n }\n\n\n\n pub fn eth_default_router() -> Self {\n\n Self {\n\n key: \"ETH_RT_DEF\".into(),\n\n description: \"ethrt\".into(),\n\n route_priority: 50,\n\n ip_configuration: InterfaceIpConfiguration::Router(Default::default()),\n\n interface_stack: InterfaceStack::Eth,\n\n }\n", "file_path": "src/netif.rs", "rank": 68, "score": 22.773145734838753 }, { "content": "impl TryFrom<Newtype<ip4_addr_t>> for Mask {\n\n type Error = EspError;\n\n\n\n fn try_from(esp_ip: Newtype<ip4_addr_t>) -> Result<Self, Self::Error> {\n\n let ip: ipv4::Ipv4Addr = esp_ip.into();\n\n\n\n ip.try_into()\n\n .map_err(|_| EspError::from(ESP_ERR_INVALID_ARG as i32).unwrap())\n\n }\n\n}\n", "file_path": "src/private/net.rs", "rank": 69, "score": 22.563248480153483 }, { "content": "\n\nimpl TryFrom<Newtype<esp_ip4_addr_t>> for Mask {\n\n type Error = EspError;\n\n\n\n fn try_from(esp_ip: Newtype<esp_ip4_addr_t>) -> Result<Self, Self::Error> {\n\n let ip: ipv4::Ipv4Addr = esp_ip.into();\n\n\n\n ip.try_into()\n\n .map_err(|_| EspError::from(ESP_ERR_INVALID_ARG as i32).unwrap())\n\n }\n\n}\n\n\n\nimpl From<Mask> for Newtype<ip4_addr_t> {\n\n fn from(mask: Mask) -> Self {\n\n let ip: ipv4::Ipv4Addr = mask.into();\n\n\n\n ip.into()\n\n }\n\n}\n\n\n", "file_path": "src/private/net.rs", "rank": 70, "score": 22.41092451969098 }, { "content": "\n\n fn get_running_slot<'a>(&'a self) -> Result<Self::Slot, Self::Error>\n\n where\n\n Self::Slot: 'a,\n\n {\n\n Ok(EspSlot(unsafe {\n\n *esp_ota_get_boot_partition().as_ref().unwrap()\n\n }))\n\n }\n\n\n\n fn get_update_slot<'a>(&'a self) -> Result<Self::Slot, Self::Error>\n\n where\n\n Self::Slot: 'a,\n\n {\n\n Ok(EspSlot(unsafe {\n\n *esp_ota_get_next_update_partition(ptr::null())\n\n .as_ref()\n\n .unwrap()\n\n }))\n\n }\n", "file_path": "src/ota.rs", "rank": 71, "score": 22.370579925859133 }, { "content": " let mut dns_info: esp_netif_dns_info_t = Default::default();\n\n\n\n unsafe {\n\n dns_info.ip.u_addr.ip4 = Newtype::<esp_ip4_addr_t>::from(secondary_dns).0;\n\n\n\n esp!(esp_netif_set_dns_info(\n\n self.1,\n\n esp_netif_dns_type_t_ESP_NETIF_DNS_BACKUP,\n\n &mut dns_info\n\n ))\n\n .unwrap();\n\n }\n\n }\n\n\n\n #[cfg(esp_idf_config_lwip_ipv4_napt)]\n\n pub fn enable_napt(&mut self, enable: bool) {\n\n unsafe {\n\n esp_idf_sys::ip_napt_enable_no(\n\n (esp_netif_get_netif_impl_index(self.1) - 1) as u8,\n\n if enable { 1 } else { 0 },\n", "file_path": "src/netif.rs", "rank": 72, "score": 22.332363603393258 }, { "content": " let c_key = CString::new(key.as_ref()).unwrap();\n\n let mut uvalue: u_int64_t = 0;\n\n\n\n let small: bool;\n\n let found: bool;\n\n\n\n match unsafe { nvs_get_u64(self.1, c_key.as_ptr(), uvalue as *mut _) as u32 } {\n\n ESP_ERR_NVS_NOT_FOUND => {\n\n found = false;\n\n small = false;\n\n }\n\n ESP_ERR_NVS_INVALID_LENGTH => {\n\n found = true;\n\n small = false;\n\n }\n\n result => {\n\n esp!(result)?;\n\n found = true;\n\n small = true;\n\n }\n", "file_path": "src/nvs_storage.rs", "rank": 73, "score": 21.693230414599768 }, { "content": " //esp!(nvs_flash_deinit()).unwrap(); TODO: To be checked why it fails\n\n *taken = false;\n\n });\n\n }\n\n\n\n info!(\"Dropped\");\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EspNvs(pub(crate) CString);\n\n\n\nimpl EspNvs {\n\n pub fn new(partition: impl AsRef<str>) -> Result<Self, EspError> {\n\n unsafe { NONDEFAULT_LOCKED.lock(|registrations| Self::init(partition, registrations)) }\n\n }\n\n\n\n fn init(\n\n partition: impl AsRef<str>,\n\n registrations: &mut alloc::collections::BTreeSet<CString>,\n", "file_path": "src/nvs.rs", "rank": 74, "score": 21.64880143641514 }, { "content": "pub enum InterfaceStack {\n\n Sta,\n\n Ap,\n\n Eth,\n\n Ppp,\n\n}\n\n\n\nimpl InterfaceStack {\n\n pub fn get_default_configuration(&self) -> InterfaceConfiguration {\n\n match self {\n\n Self::Sta => InterfaceConfiguration::wifi_default_client(),\n\n Self::Ap => InterfaceConfiguration::wifi_default_router(),\n\n Self::Eth => InterfaceConfiguration::eth_default_client(),\n\n Self::Ppp => InterfaceConfiguration::ppp_default_client(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\n#[cfg_attr(feature = \"use_serde\", derive(Serialize, Deserialize))]\n", "file_path": "src/netif.rs", "rank": 75, "score": 21.596496588740152 }, { "content": " pmf_cfg: wifi_pmf_config_t {\n\n capable: false,\n\n required: false,\n\n },\n\n ..Default::default()\n\n };\n\n\n\n set_str(&mut result.ssid, conf.ssid.as_str());\n\n set_str(&mut result.password, conf.password.as_str());\n\n\n\n Newtype(result)\n\n }\n\n}\n\n\n\nimpl From<&Newtype<wifi_sta_config_t>> for ClientConfiguration {\n\n fn from(conf: &Newtype<wifi_sta_config_t>) -> Self {\n\n ClientConfiguration {\n\n ssid: from_cstr(&conf.0.ssid).into(),\n\n bssid: if conf.0.bssid_set {\n\n Some(conf.0.bssid)\n", "file_path": "src/wifi.rs", "rank": 76, "score": 21.440994021603295 }, { "content": "\n\n Ok(EspOta(Read))\n\n }\n\n\n\n fn abort(self) -> Result<Self::Ota, Self::Error> {\n\n esp!(unsafe { esp_ota_abort(self.0 .0) })?;\n\n\n\n Ok(EspOta(Read))\n\n }\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl std::io::Write for EspOta<Update> {\n\n fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> {\n\n self.write_buf(buf)\n\n .map(|_| buf.len())\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))\n\n }\n\n\n\n fn flush(&mut self) -> Result<(), std::io::Error> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/ota.rs", "rank": 77, "score": 21.284328690112492 }, { "content": "\n\n unsafe fn init() -> Result<Self, EspError> {\n\n if let Some(err) = EspError::from(nvs_flash_init()) {\n\n match err.code() as u32 {\n\n ESP_ERR_NVS_NO_FREE_PAGES | ESP_ERR_NVS_NEW_VERSION_FOUND => {\n\n esp!(nvs_flash_erase())?;\n\n esp!(nvs_flash_init())?;\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n Ok(Self(PrivateData))\n\n }\n\n}\n\n\n\nimpl Drop for EspDefaultNvs {\n\n fn drop(&mut self) {\n\n unsafe {\n\n DEFAULT_TAKEN.lock(|taken| {\n", "file_path": "src/nvs.rs", "rank": 78, "score": 21.025668144787957 }, { "content": " }\n\n\n\n pub fn wifi_default_client() -> Self {\n\n Self {\n\n key: \"WIFI_STA_DEF\".into(),\n\n description: \"sta\".into(),\n\n route_priority: 100,\n\n ip_configuration: InterfaceIpConfiguration::Client(Default::default()),\n\n interface_stack: InterfaceStack::Sta,\n\n }\n\n }\n\n\n\n pub fn wifi_default_router() -> Self {\n\n Self {\n\n key: \"WIFI_AP_DEF\".into(),\n\n description: \"ap\".into(),\n\n route_priority: 10,\n\n ip_configuration: InterfaceIpConfiguration::Router(Default::default()),\n\n interface_stack: InterfaceStack::Ap,\n\n }\n", "file_path": "src/netif.rs", "rank": 79, "score": 20.72561869385686 }, { "content": " handler: Some(Server::handle),\n\n };\n\n\n\n esp!(unsafe { esp_idf_sys::httpd_register_uri_handler(self.sd, &conf) })?;\n\n\n\n info!(\n\n \"Registered Httpd IDF server handler {:?} for URI \\\"{}\\\"\",\n\n method,\n\n c_str.to_str().unwrap()\n\n );\n\n\n\n self.registrations.push((c_str, conf));\n\n\n\n Ok(())\n\n }\n\n\n\n fn unregister(&mut self, uri: CString, conf: esp_idf_sys::httpd_uri_t) -> Result<()> {\n\n unsafe {\n\n esp!(esp_idf_sys::httpd_unregister_uri_handler(\n\n self.sd,\n", "file_path": "src/httpd.rs", "rank": 80, "score": 20.567003052003727 }, { "content": " } else {\n\n None\n\n },\n\n auth_method: Newtype(conf.0.threshold.authmode).into(),\n\n password: from_cstr(&conf.0.password).into(),\n\n channel: if conf.0.channel != 0 {\n\n Some(conf.0.channel)\n\n } else {\n\n None\n\n },\n\n ip_conf: None, // This must be set at a later stage\n\n }\n\n }\n\n}\n\n\n\nimpl From<&AccessPointConfiguration> for Newtype<wifi_ap_config_t> {\n\n fn from(conf: &AccessPointConfiguration) -> Self {\n\n let mut result = wifi_ap_config_t {\n\n ssid: [0; 32],\n\n password: [0; 64],\n", "file_path": "src/wifi.rs", "rank": 81, "score": 20.389132318672626 }, { "content": " wifi_mode_t_WIFI_MODE_STA => Configuration::Client(self.get_client_conf()?),\n\n wifi_mode_t_WIFI_MODE_APSTA => {\n\n Configuration::Mixed(self.get_client_conf()?, self.get_ap_conf()?)\n\n }\n\n _ => panic!(),\n\n };\n\n\n\n info!(\"Configuration gotten: {:?}\", &conf);\n\n\n\n Ok(conf)\n\n }\n\n }\n\n\n\n fn set_configuration(&mut self, conf: &Configuration) -> Result<(), Self::Error> {\n\n info!(\"Setting configuration: {:?}\", conf);\n\n\n\n self.stop()?;\n\n\n\n let status = unsafe {\n\n match conf {\n", "file_path": "src/wifi.rs", "rank": 82, "score": 20.25006391513918 }, { "content": "\n\n info!(\"Started\");\n\n\n\n Self::netif_info(\"STA\", self.sta_netif.as_ref())?;\n\n Self::netif_info(\"AP\", self.ap_netif.as_ref())?;\n\n } else {\n\n info!(\"Status is NOT of operating type, not starting\");\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn stop(&mut self) -> Result<(), EspError> {\n\n info!(\"Stopping\");\n\n\n\n self.shared.with_lock(|shared| shared.operating = false);\n\n\n\n esp!(unsafe { esp_wifi_disconnect() }).or_else(|err| {\n\n if err.code() == esp_idf_sys::ESP_ERR_WIFI_NOT_STARTED as esp_err_t {\n\n Ok(())\n", "file_path": "src/wifi.rs", "rank": 83, "score": 20.22572905405149 }, { "content": " c_headers.push((\n\n CString::new(key.as_str()).unwrap(),\n\n CString::new(value.as_str()).unwrap(),\n\n ))\n\n }\n\n\n\n for (c_field, c_value) in &c_headers {\n\n esp!(unsafe {\n\n esp_idf_sys::httpd_resp_set_hdr(self.0, c_field.as_ptr(), c_value.as_ptr())\n\n })?;\n\n }\n\n\n\n match response.body {\n\n Body::Empty => self.send_body_bytes(&[]),\n\n Body::Bytes(vec) => self.send_body_bytes(vec.as_slice()),\n\n Body::Read(_, mut r) => self.send_body_read(&mut r),\n\n }\n\n }\n\n\n\n fn send_body_bytes(&mut self, data: &[u8]) -> anyhow::Result<()> {\n", "file_path": "src/httpd.rs", "rank": 84, "score": 19.993442821512364 }, { "content": " Newtype(dns_info.ip.u_addr.ip4).into()\n\n }\n\n }\n\n\n\n pub fn set_dns(&mut self, dns: ipv4::Ipv4Addr) {\n\n let mut dns_info: esp_netif_dns_info_t = Default::default();\n\n\n\n unsafe {\n\n dns_info.ip.u_addr.ip4 = Newtype::<esp_ip4_addr_t>::from(dns).0;\n\n\n\n esp!(esp_netif_set_dns_info(\n\n self.1,\n\n esp_netif_dns_type_t_ESP_NETIF_DNS_MAIN,\n\n &mut dns_info\n\n ))\n\n .unwrap();\n\n }\n\n }\n\n\n\n pub fn set_secondary_dns(&mut self, secondary_dns: ipv4::Ipv4Addr) {\n", "file_path": "src/netif.rs", "rank": 85, "score": 19.941237006636282 }, { "content": " ipv4::ClientConfiguration::Fixed(ref fixed_conf) => fixed_conf.dns,\n\n },\n\n match ip_conf {\n\n ipv4::ClientConfiguration::DHCP => None,\n\n ipv4::ClientConfiguration::Fixed(ref fixed_conf) => fixed_conf.secondary_dns,\n\n },\n\n ),\n\n InterfaceIpConfiguration::Router(ref ip_conf) => (\n\n esp_netif_inherent_config_t {\n\n flags: (if ip_conf.dhcp_enabled {\n\n esp_netif_flags_ESP_NETIF_DHCP_SERVER\n\n } else {\n\n 0\n\n }) | esp_netif_flags_ESP_NETIF_FLAG_AUTOUP,\n\n mac: [0; 6],\n\n ip_info: ptr::null(),\n\n get_ip_event: 0,\n\n lost_ip_event: 0,\n\n if_key: c_if_key.as_c_str().as_ptr() as _,\n\n if_desc: c_if_description.as_c_str().as_ptr() as _,\n", "file_path": "src/netif.rs", "rank": 86, "score": 19.780059062253642 }, { "content": "\n\n info!(\"Set status: {:?}\", shared.status);\n\n\n\n info!(\"Wifi event {} handled\", event_id);\n\n\n\n Ok(())\n\n }\n\n\n\n #[allow(non_upper_case_globals)]\n\n unsafe fn on_ip_event(\n\n shared: &mut Shared,\n\n event_id: c_types::c_int,\n\n event_data: *mut c_types::c_void,\n\n ) -> Result<(), EspError> {\n\n info!(\"Got IP event: {}\", event_id);\n\n\n\n shared.status = Status(\n\n match event_id as u32 {\n\n ip_event_t_IP_EVENT_STA_GOT_IP => {\n\n let event: *const ip_event_got_ip_t = mem::transmute(event_data);\n", "file_path": "src/wifi.rs", "rank": 87, "score": 19.725871513178088 }, { "content": " Self::on_wifi_event(shared, event_id, event_data)\n\n } else if event_base == IP_EVENT {\n\n Self::on_ip_event(shared, event_id, event_data)\n\n } else {\n\n warn!(\"Got unknown event base\");\n\n\n\n Ok(())\n\n }\n\n .unwrap()\n\n });\n\n }\n\n\n\n #[allow(non_upper_case_globals)]\n\n unsafe fn on_wifi_event(\n\n shared: &mut Shared,\n\n event_id: c_types::c_int,\n\n event_data: *mut c_types::c_void,\n\n ) -> Result<(), EspError> {\n\n info!(\"Got wifi event: {} \", event_id);\n\n\n", "file_path": "src/wifi.rs", "rank": 88, "score": 19.629754986459403 }, { "content": " esp_netif_flags_ESP_NETIF_FLAG_AUTOUP\n\n }\n\n },\n\n mac: [0; 6],\n\n ip_info: ptr::null(),\n\n get_ip_event: match ip_conf {\n\n ipv4::ClientConfiguration::DHCP => {\n\n if conf.interface_stack == InterfaceStack::Sta {\n\n ip_event_t_IP_EVENT_STA_GOT_IP\n\n } else {\n\n 0\n\n }\n\n }\n\n ipv4::ClientConfiguration::Fixed(_) => 0,\n\n },\n\n lost_ip_event: match ip_conf {\n\n ipv4::ClientConfiguration::DHCP => {\n\n if conf.interface_stack == InterfaceStack::Sta {\n\n ip_event_t_IP_EVENT_STA_LOST_IP\n\n } else {\n", "file_path": "src/netif.rs", "rank": 89, "score": 19.616957435965467 }, { "content": "use core::convert::{TryFrom, TryInto};\n\n\n\nuse embedded_svc::ipv4::{self, Mask};\n\nuse esp_idf_sys::*;\n\n\n\nuse crate::private::common::*;\n\n\n\nimpl From<ipv4::Ipv4Addr> for Newtype<esp_ip4_addr_t> {\n\n fn from(ip: ipv4::Ipv4Addr) -> Self {\n\n let octets = ip.octets();\n\n\n\n let addr = ((octets[0] as u32 & 0xff) << 24)\n\n | ((octets[1] as u32 & 0xff) << 16)\n\n | ((octets[2] as u32 & 0xff) << 8)\n\n | (octets[3] as u32 & 0xff);\n\n\n\n Newtype(esp_ip4_addr_t {\n\n addr: u32::from_be(addr),\n\n })\n\n }\n", "file_path": "src/private/net.rs", "rank": 90, "score": 19.59199089442854 }, { "content": "\n\n self.sta_netif = Some(netif);\n\n\n\n info!(\"STA IP configuration done\");\n\n } else {\n\n self.sta_netif = None;\n\n\n\n info!(\"Skipping STA IP configuration (not configured)\");\n\n }\n\n\n\n self.shared\n\n .with_lock(|shared| shared.client_ip_conf = conf.clone());\n\n\n\n Ok(())\n\n }\n\n\n\n fn set_router_ip_conf(\n\n &mut self,\n\n conf: &Option<ipv4::RouterConfiguration>,\n\n ) -> Result<(), EspError> {\n", "file_path": "src/wifi.rs", "rank": 91, "score": 19.36339257342441 }, { "content": " info!(\"Found access point {:?}\", ap_info);\n\n\n\n result.push(ap_info);\n\n }\n\n\n\n Ok(result)\n\n }\n\n\n\n #[allow(non_upper_case_globals)]\n\n fn get_configuration(&self) -> Result<Configuration, Self::Error> {\n\n info!(\"Getting configuration\");\n\n\n\n unsafe {\n\n let mut mode: wifi_mode_t = 0;\n\n\n\n esp!(esp_wifi_get_mode(&mut mode))?;\n\n\n\n let conf = match mode {\n\n wifi_mode_t_WIFI_MODE_NULL => Configuration::None,\n\n wifi_mode_t_WIFI_MODE_AP => Configuration::AccessPoint(self.get_ap_conf()?),\n", "file_path": "src/wifi.rs", "rank": 92, "score": 19.361865400489712 }, { "content": "}\n\n\n\nimpl From<Newtype<esp_ip4_addr_t>> for ipv4::Ipv4Addr {\n\n fn from(ip: Newtype<esp_ip4_addr_t>) -> Self {\n\n let addr = u32::to_be(ip.0.addr);\n\n\n\n let (a, b, c, d) = (\n\n ((addr >> 24) & 0xff) as u8,\n\n ((addr >> 16) & 0xff) as u8,\n\n ((addr >> 8) & 0xff) as u8,\n\n (addr & 0xff) as u8,\n\n );\n\n\n\n ipv4::Ipv4Addr::new(a, b, c, d)\n\n }\n\n}\n\n\n\nimpl From<ipv4::Ipv4Addr> for Newtype<ip4_addr_t> {\n\n fn from(ip: ipv4::Ipv4Addr) -> Self {\n\n let result: Newtype<esp_ip4_addr_t> = ip.into();\n", "file_path": "src/private/net.rs", "rank": 93, "score": 19.208009693687227 }, { "content": " ssid_len: conf.ssid.len() as u8,\n\n channel: conf.channel,\n\n authmode: Newtype::<wifi_auth_mode_t>::from(conf.auth_method).0,\n\n ssid_hidden: if conf.ssid_hidden { 1 } else { 0 },\n\n max_connection: cmp::max(conf.max_connections, 16) as u8,\n\n beacon_interval: 100,\n\n ..Default::default()\n\n };\n\n\n\n set_str(&mut result.ssid, conf.ssid.as_str());\n\n set_str(&mut result.password, conf.password.as_str());\n\n\n\n Newtype(result)\n\n }\n\n}\n\n\n\nimpl From<Newtype<wifi_ap_config_t>> for AccessPointConfiguration {\n\n fn from(conf: Newtype<wifi_ap_config_t>) -> Self {\n\n AccessPointConfiguration {\n\n ssid: if conf.0.ssid_len == 0 {\n", "file_path": "src/wifi.rs", "rank": 94, "score": 19.13606509580889 }, { "content": " esp_netif_dns_type_t_ESP_NETIF_DNS_MAIN,\n\n &mut dns_info\n\n ))\n\n .unwrap();\n\n\n\n Newtype(dns_info.ip.u_addr.ip4).into()\n\n }\n\n }\n\n\n\n pub fn get_secondary_dns(&self) -> ipv4::Ipv4Addr {\n\n let mut dns_info = Default::default();\n\n\n\n unsafe {\n\n esp!(esp_netif_get_dns_info(\n\n self.1,\n\n esp_netif_dns_type_t_ESP_NETIF_DNS_BACKUP,\n\n &mut dns_info\n\n ))\n\n .unwrap();\n\n\n", "file_path": "src/netif.rs", "rank": 95, "score": 19.089497957517768 }, { "content": " 0\n\n }\n\n }\n\n ipv4::ClientConfiguration::Fixed(_) => 0,\n\n },\n\n if_key: c_if_key.as_c_str().as_ptr() as _,\n\n if_desc: c_if_description.as_c_str().as_ptr() as _,\n\n route_prio: conf.route_priority as _,\n\n },\n\n match ip_conf {\n\n ipv4::ClientConfiguration::DHCP => None,\n\n ipv4::ClientConfiguration::Fixed(ref fixed_conf) => Some(esp_netif_ip_info_t {\n\n ip: Newtype::<esp_ip4_addr_t>::from(fixed_conf.ip).0,\n\n netmask: Newtype::<esp_ip4_addr_t>::from(fixed_conf.subnet.mask).0,\n\n gw: Newtype::<esp_ip4_addr_t>::from(fixed_conf.subnet.gateway).0,\n\n }),\n\n },\n\n false,\n\n match ip_conf {\n\n ipv4::ClientConfiguration::DHCP => None,\n", "file_path": "src/netif.rs", "rank": 96, "score": 18.985945695044354 }, { "content": " uri.as_ptr(),\n\n conf.method\n\n ))?;\n\n\n\n let _drop = Box::from_raw(conf.user_ctx as *mut _);\n\n };\n\n\n\n info!(\n\n \"Unregistered Httpd IDF server handler {:?} for URI \\\"{}\\\"\",\n\n conf.method,\n\n uri.to_str().unwrap()\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n fn stop(&mut self) -> Result<()> {\n\n if !self.sd.is_null() {\n\n while !self.registrations.is_empty() {\n\n let (uri, registration) = self.registrations.pop().unwrap();\n", "file_path": "src/httpd.rs", "rank": 97, "score": 18.755230707994947 }, { "content": " route_prio: conf.route_priority as _,\n\n },\n\n Some(esp_netif_ip_info_t {\n\n ip: Newtype::<esp_ip4_addr_t>::from(ip_conf.subnet.gateway).0,\n\n netmask: Newtype::<esp_ip4_addr_t>::from(ip_conf.subnet.mask).0,\n\n gw: Newtype::<esp_ip4_addr_t>::from(ip_conf.subnet.gateway).0,\n\n }),\n\n ip_conf.dhcp_enabled,\n\n ip_conf.dns,\n\n None, /* For APs, ESP-IDF supports setting a primary DNS only ip_conf.secondary_dns */\n\n ),\n\n };\n\n\n\n if let Some(ip_info) = ip_info.as_ref() {\n\n esp_inherent_config.ip_info = ip_info;\n\n }\n\n\n\n let cfg = esp_netif_config_t {\n\n base: &esp_inherent_config,\n\n driver: ptr::null(),\n", "file_path": "src/netif.rs", "rank": 98, "score": 18.60146549873935 }, { "content": "use core::mem;\n\nuse core::ptr;\n\n\n\nextern crate alloc;\n\nuse alloc::vec;\n\n\n\nuse ::log::*;\n\n\n\nuse mutex_trait::*;\n\n\n\nuse embedded_svc::ota::{self, OtaUpdate};\n\n\n\nuse esp_idf_sys::*;\n\n\n\nuse crate::private::{common::*, cstr::*};\n\n\n\nstatic mut TAKEN: EspMutex<bool> = EspMutex::new(false);\n\n\n\nimpl From<Newtype<&esp_app_desc_t>> for ota::FirmwareInfo {\n\n fn from(app_desc: Newtype<&esp_app_desc_t>) -> Self {\n", "file_path": "src/ota.rs", "rank": 99, "score": 18.3930152286917 } ]
Rust
src/model/wrapper.rs
MaxOhn/bathbot-cache
c9f5f406d32bc99d31d618d69ed0db84aba4833b
use serde::ser::{Serialize, SerializeStruct, Serializer}; use twilight_model::{ channel::{ thread::{PrivateThread, PublicThread}, GuildChannel, TextChannel, }, gateway::payload::incoming::MemberUpdate, guild::{Guild, Member, PartialGuild, PartialMember, Role}, id::{ChannelId, GuildId}, user::{CurrentUser, User}, }; pub struct GuildWrapper<'g>(pub &'g Guild); impl<'g> From<&'g Guild> for GuildWrapper<'g> { fn from(guild: &'g Guild) -> Self { Self(guild) } } impl<'g> Serialize for GuildWrapper<'g> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.icon.is_some() as usize; let mut guild = s.serialize_struct("CachedGuild", len)?; if let Some(ref icon) = self.0.icon { guild.serialize_field("a", icon)?; } guild.serialize_field("b", &self.0.id)?; guild.serialize_field("c", &self.0.name)?; guild.serialize_field("d", &self.0.owner_id)?; guild.end() } } pub struct PartialGuildWrapper<'g>(pub &'g PartialGuild); impl<'g> From<&'g PartialGuild> for PartialGuildWrapper<'g> { fn from(guild: &'g PartialGuild) -> Self { Self(guild) } } impl<'g> Serialize for PartialGuildWrapper<'g> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.icon.is_some() as usize; let mut guild = s.serialize_struct("CachedGuild", len)?; if let Some(ref icon) = self.0.icon { guild.serialize_field("a", icon)?; } guild.serialize_field("b", &self.0.id)?; guild.serialize_field("c", &self.0.name)?; guild.serialize_field("d", &self.0.owner_id)?; guild.end() } } pub struct CurrentUserWrapper<'u>(pub &'u CurrentUser); impl<'u> From<&'u CurrentUser> for CurrentUserWrapper<'u> { fn from(user: &'u CurrentUser) -> Self { Self(user) } } impl<'u> Serialize for CurrentUserWrapper<'u> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.avatar.is_some() as usize; let mut user = s.serialize_struct("CachedCurrentUser", len)?; if let Some(ref avatar) = self.0.avatar { user.serialize_field("a", avatar)?; } user.serialize_field("b", &self.0.discriminator)?; user.serialize_field("c", &self.0.id)?; user.serialize_field("d", &self.0.name)?; user.end() } } pub struct RoleWrapper<'r>(pub &'r Role); impl<'r> From<&'r Role> for RoleWrapper<'r> { fn from(role: &'r Role) -> Self { Self(role) } } impl<'r> Serialize for RoleWrapper<'r> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut role = s.serialize_struct("CachedRole", 4)?; role.serialize_field("a", &self.0.id)?; role.serialize_field("b", &self.0.name)?; role.serialize_field("c", &self.0.permissions)?; role.serialize_field("d", &self.0.position)?; role.end() } } pub struct MemberWrapper<'m>(pub &'m Member); impl<'m> From<&'m Member> for MemberWrapper<'m> { fn from(member: &'m Member) -> Self { Self(member) } } impl<'m> Serialize for MemberWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.nick.is_some() as usize + !self.0.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.0.guild_id)?; if let Some(ref nick) = self.0.nick { member.serialize_field("b", nick)?; } if !self.0.roles.is_empty() { member.serialize_field("c", &self.0.roles)?; } member.serialize_field("d", &self.0.user.id)?; member.end() } } pub struct PartialMemberWrapper<'m> { guild: GuildId, member: &'m PartialMember, user: &'m User, } impl<'m> From<(&'m PartialMember, GuildId, &'m User)> for PartialMemberWrapper<'m> { fn from((member, guild, user): (&'m PartialMember, GuildId, &'m User)) -> Self { Self { member, guild, user, } } } impl<'m> Serialize for PartialMemberWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.member.nick.is_some() as usize + !self.member.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.guild)?; if let Some(ref nick) = self.member.nick { member.serialize_field("b", nick)?; } if !self.member.roles.is_empty() { member.serialize_field("c", &self.member.roles)?; } member.serialize_field("d", &self.user.id)?; member.end() } } pub struct MemberUpdateWrapper<'m>(&'m MemberUpdate); impl<'m> From<&'m MemberUpdate> for MemberUpdateWrapper<'m> { fn from(member: &'m MemberUpdate) -> Self { Self(member) } } impl<'m> Serialize for MemberUpdateWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.nick.is_some() as usize + !self.0.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.0.guild_id)?; if let Some(ref nick) = self.0.nick { member.serialize_field("b", nick)?; } if !self.0.roles.is_empty() { member.serialize_field("c", &self.0.roles)?; } member.serialize_field("d", &self.0.user.id)?; member.end() } } pub struct TextChannelWrapper<'c>(pub &'c TextChannel); impl<'c> From<&'c TextChannel> for TextChannelWrapper<'c> { fn from(channel: &'c TextChannel) -> Self { Self(channel) } } impl<'c> Serialize for TextChannelWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + !self.0.permission_overwrites.is_empty() as usize; let mut channel = s.serialize_struct("CachedTextChannel", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if !self.0.permission_overwrites.is_empty() { channel.serialize_field("d", &self.0.permission_overwrites)?; } channel.end() } } pub struct PublicThreadWrapper<'c>(pub &'c PublicThread); impl<'c> From<&'c PublicThread> for PublicThreadWrapper<'c> { fn from(channel: &'c PublicThread) -> Self { Self(channel) } } impl<'c> Serialize for PublicThreadWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + self.0.parent_id.is_some() as usize; let mut channel = s.serialize_struct("CachedThread", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if let Some(ref parent_id) = self.0.parent_id { channel.serialize_field("d", parent_id)?; } channel.end() } } pub struct PrivateThreadWrapper<'c>(pub &'c PrivateThread); impl<'c> From<&'c PrivateThread> for PrivateThreadWrapper<'c> { fn from(channel: &'c PrivateThread) -> Self { Self(channel) } } impl<'c> Serialize for PrivateThreadWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + self.0.parent_id.is_some() as usize; let mut channel = s.serialize_struct("CachedThread", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if let Some(ref parent_id) = self.0.parent_id { channel.serialize_field("d", parent_id)?; } channel.end() } } pub enum BasicGuildChannel<'c> { PrivateThread(&'c PrivateThread), PublicThread(&'c PublicThread), Text(&'c TextChannel), } impl<'c> BasicGuildChannel<'c> { pub const fn guild_id(&self) -> Option<GuildId> { match self { Self::PrivateThread(c) => c.guild_id, Self::PublicThread(c) => c.guild_id, Self::Text(c) => c.guild_id, } } pub const fn id(&self) -> ChannelId { match self { Self::PrivateThread(c) => c.id, Self::PublicThread(c) => c.id, Self::Text(c) => c.id, } } pub fn from(channel: &'c GuildChannel) -> Option<Self> { match channel { GuildChannel::PrivateThread(c) => Some(Self::PrivateThread(c)), GuildChannel::PublicThread(c) => Some(Self::PublicThread(c)), GuildChannel::Text(c) => Some(Self::Text(c)), _ => None, } } } impl<'c> Serialize for BasicGuildChannel<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { match self { BasicGuildChannel::PrivateThread(c) => { s.serialize_newtype_variant("CachedGuildChannel", 0, "a", &PrivateThreadWrapper(c)) } BasicGuildChannel::PublicThread(c) => { s.serialize_newtype_variant("CachedGuildChannel", 1, "b", &PublicThreadWrapper(c)) } BasicGuildChannel::Text(c) => { s.serialize_newtype_variant("CachedGuildChannel", 2, "c", &TextChannelWrapper(c)) } } } }
use serde::ser::{Serialize, SerializeStruct, Serializer}; use twilight_model::{ channel::{ thread::{PrivateThread, PublicThread}, GuildChannel, TextChannel, }, gateway::payload::incoming::MemberUpdate, guild::{Guild, Member, PartialGuild, PartialMember, Role}, id::{ChannelId, GuildId}, user::{CurrentUser, User}, }; pub struct GuildWrapper<'g>(pub &'g Guild); impl<'g> From<&'g Guild> for GuildWrapper<'g> { fn from(guild: &'g Guild) -> Self { Self(guild) } } impl<'g> Serialize for GuildWrapper<'g> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.icon.is_some() as usize; let mut guild = s.serialize_struct("CachedGuild", len)?; if let Some(ref icon) = self.0.icon { guild.serialize_field("a", icon)?; } guild.serialize_field("b", &self.0.id)?; guild.serialize_field("c", &self.0.name)?; guild.serialize_field("d", &self.0.owner_id)?; guild.end() } } pub struct PartialGuildWrapper<'g>(pub &'g PartialGuild); impl<'g> From<&'g PartialGuild> for PartialGuildWrapper<'g> { fn from(guild: &'g PartialGuild) -> Self { Self(guild) } } impl<'g> Serialize for PartialGuildWrapper<'g> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.icon.is_some() as usize; let mut guild = s.serialize_struct("CachedGuild", len)?; if let Some(ref icon) = self.0.icon { guild.serialize_field("a", icon)?; } guild.serialize_field("b", &self.0.id)?; guild.serialize_field("c", &self.0.name)?; guild.serialize_field("d", &self.0.owner_id)?; guild.end() } } pub struct CurrentUserWrapper<'u>(pub &'u CurrentUser); impl<'u> From<&'u CurrentUser> for CurrentUserWrapper<'u> { fn from(user: &'u CurrentUser) -> Self { Self(user) } } impl<'u> Serialize for CurrentUserWrapper<'u> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 3 + self.0.avatar.is_some() as usize; let mut user = s.serialize_struct("CachedCurrentUser", len)?; if let Some(ref avatar) = self.0.avatar { user.serialize_field("a", avatar)?; } user.serialize_field("b", &self.0.discriminator)?; user.serialize_field("c", &self.0.id)?; user.serialize_field("d", &self.0.name)?; user.end() } } pub struct RoleWrapper<'r>(pub &'r Role); impl<'r> From<&'r Role> for RoleWrapper<'r> { fn from(role: &'r Role) -> Self { Self(role) } } impl<'r> Serialize for RoleWrapper<'r> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let mut role = s.serialize_struct("CachedRole", 4)?; role.serialize_field("a", &self.0.id)?; role.serialize_field("b", &self.0.name)?; role.serialize_field("c", &self.0.permissions)?; role.serialize_field("d", &self.0.position)?; role.end() } } pub struct MemberWrapper<'m>(pub &'m Member); impl<'m> From<&'m Member> for MemberWrapper<'m> { fn from(member: &'m Member) -> Self { Self(member) } } impl<'m> Serialize for MemberWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.nick.is_some() as usize + !self.0.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.0.guild_id)?; if let Some(ref nick) = self.0.nick { member.serialize_field("b", nick)?; } if !self.0.
member.end() } } pub struct PartialMemberWrapper<'m> { guild: GuildId, member: &'m PartialMember, user: &'m User, } impl<'m> From<(&'m PartialMember, GuildId, &'m User)> for PartialMemberWrapper<'m> { fn from((member, guild, user): (&'m PartialMember, GuildId, &'m User)) -> Self { Self { member, guild, user, } } } impl<'m> Serialize for PartialMemberWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.member.nick.is_some() as usize + !self.member.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.guild)?; if let Some(ref nick) = self.member.nick { member.serialize_field("b", nick)?; } if !self.member.roles.is_empty() { member.serialize_field("c", &self.member.roles)?; } member.serialize_field("d", &self.user.id)?; member.end() } } pub struct MemberUpdateWrapper<'m>(&'m MemberUpdate); impl<'m> From<&'m MemberUpdate> for MemberUpdateWrapper<'m> { fn from(member: &'m MemberUpdate) -> Self { Self(member) } } impl<'m> Serialize for MemberUpdateWrapper<'m> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.nick.is_some() as usize + !self.0.roles.is_empty() as usize; let mut member = s.serialize_struct("CachedMember", len)?; member.serialize_field("a", &self.0.guild_id)?; if let Some(ref nick) = self.0.nick { member.serialize_field("b", nick)?; } if !self.0.roles.is_empty() { member.serialize_field("c", &self.0.roles)?; } member.serialize_field("d", &self.0.user.id)?; member.end() } } pub struct TextChannelWrapper<'c>(pub &'c TextChannel); impl<'c> From<&'c TextChannel> for TextChannelWrapper<'c> { fn from(channel: &'c TextChannel) -> Self { Self(channel) } } impl<'c> Serialize for TextChannelWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + !self.0.permission_overwrites.is_empty() as usize; let mut channel = s.serialize_struct("CachedTextChannel", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if !self.0.permission_overwrites.is_empty() { channel.serialize_field("d", &self.0.permission_overwrites)?; } channel.end() } } pub struct PublicThreadWrapper<'c>(pub &'c PublicThread); impl<'c> From<&'c PublicThread> for PublicThreadWrapper<'c> { fn from(channel: &'c PublicThread) -> Self { Self(channel) } } impl<'c> Serialize for PublicThreadWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + self.0.parent_id.is_some() as usize; let mut channel = s.serialize_struct("CachedThread", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if let Some(ref parent_id) = self.0.parent_id { channel.serialize_field("d", parent_id)?; } channel.end() } } pub struct PrivateThreadWrapper<'c>(pub &'c PrivateThread); impl<'c> From<&'c PrivateThread> for PrivateThreadWrapper<'c> { fn from(channel: &'c PrivateThread) -> Self { Self(channel) } } impl<'c> Serialize for PrivateThreadWrapper<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { let len = 2 + self.0.guild_id.is_some() as usize + self.0.parent_id.is_some() as usize; let mut channel = s.serialize_struct("CachedThread", len)?; if let Some(ref guild) = self.0.guild_id { channel.serialize_field("a", guild)?; } channel.serialize_field("b", &self.0.id)?; channel.serialize_field("c", &self.0.name)?; if let Some(ref parent_id) = self.0.parent_id { channel.serialize_field("d", parent_id)?; } channel.end() } } pub enum BasicGuildChannel<'c> { PrivateThread(&'c PrivateThread), PublicThread(&'c PublicThread), Text(&'c TextChannel), } impl<'c> BasicGuildChannel<'c> { pub const fn guild_id(&self) -> Option<GuildId> { match self { Self::PrivateThread(c) => c.guild_id, Self::PublicThread(c) => c.guild_id, Self::Text(c) => c.guild_id, } } pub const fn id(&self) -> ChannelId { match self { Self::PrivateThread(c) => c.id, Self::PublicThread(c) => c.id, Self::Text(c) => c.id, } } pub fn from(channel: &'c GuildChannel) -> Option<Self> { match channel { GuildChannel::PrivateThread(c) => Some(Self::PrivateThread(c)), GuildChannel::PublicThread(c) => Some(Self::PublicThread(c)), GuildChannel::Text(c) => Some(Self::Text(c)), _ => None, } } } impl<'c> Serialize for BasicGuildChannel<'c> { fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> { match self { BasicGuildChannel::PrivateThread(c) => { s.serialize_newtype_variant("CachedGuildChannel", 0, "a", &PrivateThreadWrapper(c)) } BasicGuildChannel::PublicThread(c) => { s.serialize_newtype_variant("CachedGuildChannel", 1, "b", &PublicThreadWrapper(c)) } BasicGuildChannel::Text(c) => { s.serialize_newtype_variant("CachedGuildChannel", 2, "c", &TextChannelWrapper(c)) } } } }
roles.is_empty() { member.serialize_field("c", &self.0.roles)?; } member.serialize_field("d", &self.0.user.id)?;
function_block-random_span
[ { "content": "fn populate_members(key: &RedisKey, members: &mut RedisMembers) {\n\n match key {\n\n RedisKey::Channel { guild, .. } => {\n\n populate_member(CHANNEL_KEYS, *key, members);\n\n\n\n if let Some(guild) = guild {\n\n populate_member(format!(\"{}:{}\", GUILD_KEYS, guild), *key, members);\n\n }\n\n }\n\n RedisKey::Guild { .. } => populate_member(GUILD_KEYS, *key, members),\n\n RedisKey::Member { guild, .. } => {\n\n populate_member(MEMBER_KEYS, *key, members);\n\n populate_member(format!(\"{}:{}\", GUILD_KEYS, guild), *key, members);\n\n }\n\n RedisKey::Role { guild, .. } => {\n\n populate_member(ROLE_KEYS, *key, members);\n\n\n\n if let Some(guild) = guild {\n\n populate_member(format!(\"{}:{}\", GUILD_KEYS, guild), *key, members);\n\n }\n\n }\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "src/store.rs", "rank": 0, "score": 68512.36524664641 }, { "content": "fn populate_member(key: impl Into<Cow<'static, str>>, value: RedisKey, members: &mut RedisMembers) {\n\n members\n\n .entry(key.into())\n\n .or_insert_with(Vec::new)\n\n .push(value)\n\n}\n", "file_path": "src/store.rs", "rank": 1, "score": 56566.16471270549 }, { "content": "fn filter_member_key(key: RedisKey) -> Option<UserId> {\n\n if let RedisKey::Member { user, .. } = key {\n\n Some(user)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\npub struct CacheStats {\n\n pub channels: usize,\n\n pub guilds: usize,\n\n pub members: usize,\n\n pub roles: usize,\n\n}\n\n\n\npub enum MemberLookup {\n\n Found(CachedMember),\n\n NotChecked,\n\n NotFound,\n\n}\n", "file_path": "src/model/mod.rs", "rank": 2, "score": 48060.00105332241 }, { "content": "type RedisMembers = HashMap<Cow<'static, str>, Vec<RedisKey>>;\n\n\n", "file_path": "src/store.rs", "rank": 3, "score": 13721.629085173487 }, { "content": " if let RedisKey::Member { user, .. } = self {\n\n Some(*user)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for RedisKey {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::BotUser => f.write_str(BOT_USER_KEY),\n\n Self::Channel { channel, .. } => write!(f, \"{}:{}\", CHANNEL_KEY, channel),\n\n Self::Guild { guild } => write!(f, \"{}:{}\", GUILD_KEY, guild),\n\n Self::Member { guild, user } => write!(f, \"{}:{}:{}\", MEMBER_KEY, guild, user),\n\n Self::Role { role, .. } => write!(f, \"{}:{}\", ROLE_KEY, role),\n\n Self::Sessions => f.write_str(SESSIONS_KEY),\n\n Self::Shards => f.write_str(SHARDS_KEY),\n\n }\n\n }\n", "file_path": "src/model/redis_key.rs", "rank": 16, "score": 15.29152683314178 }, { "content": "use std::fmt;\n\n\n\nuse deadpool_redis::redis::{\n\n ErrorKind, FromRedisValue, RedisError, RedisResult, RedisWrite, ToRedisArgs, Value,\n\n};\n\nuse twilight_model::{\n\n guild::Member,\n\n id::{ChannelId, GuildId, RoleId, UserId},\n\n};\n\n\n\nuse crate::constants::{\n\n BOT_USER_KEY, CHANNEL_KEY, GUILD_KEY, MEMBER_KEY, ROLE_KEY, SESSIONS_KEY, SHARDS_KEY,\n\n};\n\n\n\nuse super::{BasicGuildChannel, CachedChannel, MemberWrapper};\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum RedisKey {\n\n BotUser,\n\n Channel {\n", "file_path": "src/model/redis_key.rs", "rank": 18, "score": 13.49903785157125 }, { "content": " guild: channel.guild_id(),\n\n channel: channel.id(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&Member> for RedisKey {\n\n fn from(member: &Member) -> Self {\n\n Self::Member {\n\n guild: member.guild_id,\n\n user: member.user.id,\n\n }\n\n }\n\n}\n\n\n\nimpl<'m> From<&MemberWrapper<'m>> for RedisKey {\n\n fn from(member: &MemberWrapper<'m>) -> Self {\n\n Self::Member {\n\n guild: member.0.guild_id,\n\n user: member.0.user.id,\n", "file_path": "src/model/redis_key.rs", "rank": 19, "score": 13.228425202096664 }, { "content": "\n\n fn text_channel_permissions(\n\n permissions: &mut Permissions,\n\n user: UserId,\n\n guild: &GuildOrId,\n\n channel: Cow<'_, CachedTextChannel>,\n\n member: CachedMember,\n\n ) {\n\n let mut everyone_allowed = Permissions::empty();\n\n let mut everyone_denied = Permissions::empty();\n\n let mut user_allowed = Permissions::empty();\n\n let mut user_denied = Permissions::empty();\n\n let mut role_allowed = Permissions::empty();\n\n let mut role_denied = Permissions::empty();\n\n\n\n for overwrite in &channel.permission_overwrites {\n\n match overwrite.kind {\n\n PermissionOverwriteType::Member(member) => {\n\n if member == user {\n\n user_allowed |= overwrite.allow;\n", "file_path": "src/util.rs", "rank": 20, "score": 13.151720242359819 }, { "content": "pub(crate) const SESSIONS_KEY: &str = \"gateway_sessions\";\n\npub(crate) const SHARDS_KEY: &str = \"gateway_shards\";\n\n\n\npub(crate) const BOT_USER_KEY: &str = \"bot_user\";\n\npub(crate) const GUILD_KEY: &str = \"guild\";\n\npub(crate) const CHANNEL_KEY: &str = \"channel\";\n\npub(crate) const ROLE_KEY: &str = \"role\";\n\npub(crate) const MEMBER_KEY: &str = \"member\";\n\n\n\npub(crate) const GUILD_KEYS: &str = \"guild_keys\";\n\npub(crate) const CHANNEL_KEYS: &str = \"channel_keys\";\n\npub(crate) const ROLE_KEYS: &str = \"role_keys\";\n\npub(crate) const MEMBER_KEYS: &str = \"member_keys\";\n\n\n\npub(crate) const OWNER_USER_ID: u64 = 219905108316520448;\n", "file_path": "src/constants.rs", "rank": 21, "score": 13.03695742100281 }, { "content": " guild: Option<GuildId>,\n\n channel: ChannelId,\n\n },\n\n Guild {\n\n guild: GuildId,\n\n },\n\n Member {\n\n guild: GuildId,\n\n user: UserId,\n\n },\n\n Role {\n\n guild: Option<GuildId>,\n\n role: RoleId,\n\n },\n\n Sessions,\n\n Shards,\n\n}\n\n\n\nimpl RedisKey {\n\n pub(crate) fn user_id(&self) -> Option<UserId> {\n", "file_path": "src/model/redis_key.rs", "rank": 22, "score": 12.831403679322463 }, { "content": "use std::{borrow::Cow, iter};\n\n\n\nuse deadpool_redis::redis::AsyncCommands;\n\nuse hashbrown::HashMap;\n\nuse serde::Serialize;\n\nuse serde_cbor::Error as CborError;\n\nuse twilight_model::{\n\n application::interaction::Interaction,\n\n channel::Channel,\n\n gateway::event::Event,\n\n guild::{Member, Role},\n\n id::GuildId,\n\n};\n\n\n\nuse crate::{\n\n constants::{CHANNEL_KEYS, GUILD_KEYS, MEMBER_KEYS, ROLE_KEYS},\n\n model::{\n\n BasicGuildChannel, GuildWrapper, MemberUpdateWrapper, MemberWrapper, PartialGuildWrapper,\n\n PartialMemberWrapper, RedisKey, RoleWrapper, SessionInfo,\n\n },\n", "file_path": "src/store.rs", "rank": 23, "score": 12.606925776395666 }, { "content": " fn from(channel: ChannelId) -> Self {\n\n Self::Channel {\n\n guild: None,\n\n channel,\n\n }\n\n }\n\n}\n\n\n\nimpl From<GuildId> for RedisKey {\n\n fn from(guild: GuildId) -> Self {\n\n Self::Guild { guild }\n\n }\n\n}\n\n\n\nimpl From<(GuildId, UserId)> for RedisKey {\n\n fn from((guild, user): (GuildId, UserId)) -> Self {\n\n Self::Member { guild, user }\n\n }\n\n}\n\n\n", "file_path": "src/model/redis_key.rs", "rank": 24, "score": 12.498542612617975 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]\n\npub struct CachedMember {\n\n #[serde(rename = \"a\")]\n\n pub guild_id: GuildId,\n\n #[serde(default, rename = \"b\", skip_serializing_if = \"Option::is_none\")]\n\n pub nick: Option<String>,\n\n #[serde(default, rename = \"c\", skip_serializing_if = \"Vec::is_empty\")]\n\n pub roles: Vec<RoleId>,\n\n #[serde(rename = \"d\")]\n\n pub user_id: UserId,\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]\n\npub struct CachedRole {\n\n #[serde(rename = \"a\")]\n\n pub id: RoleId,\n\n #[serde(rename = \"b\")]\n\n pub name: String,\n", "file_path": "src/model/mod.rs", "rank": 25, "score": 12.454177763547083 }, { "content": "mod redis_key;\n\nmod wrapper;\n\n\n\nuse std::{iter::FilterMap, vec::IntoIter};\n\n\n\npub use redis_key::RedisKey;\n\npub(crate) use wrapper::*;\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse twilight_model::{\n\n channel::permission_overwrite::PermissionOverwrite,\n\n guild::Permissions,\n\n id::{ChannelId, GuildId, RoleId, UserId},\n\n};\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub enum CachedChannel {\n\n #[serde(rename = \"a\")]\n\n PrivateThread(CachedThread),\n\n #[serde(rename = \"b\")]\n", "file_path": "src/model/mod.rs", "rank": 26, "score": 12.283005045805663 }, { "content": "use deadpool_redis::redis::{AsyncCommands, FromRedisValue};\n\nuse hashbrown::HashMap;\n\nuse serde::de::DeserializeOwned;\n\nuse twilight_model::id::{ChannelId, GuildId, RoleId, UserId};\n\n\n\nuse crate::{\n\n constants::GUILD_KEYS,\n\n model::{\n\n CachedChannel, CachedCurrentUser, CachedGuild, CachedMember, CachedRole, IntoMemberIter,\n\n RedisKey, SessionInfo,\n\n },\n\n CacheResult,\n\n};\n\n\n\nuse super::Cache;\n\n\n", "file_path": "src/fetch.rs", "rank": 27, "score": 11.906530323600773 }, { "content": " permissions |= role.permissions;\n\n }\n\n }\n\n\n\n Ok((permissions, MemberLookup::Found(member)))\n\n }\n\n\n\n pub async fn get_channel_permissions(\n\n &self,\n\n user: UserId,\n\n channel: &ChannelOrId,\n\n guild: Option<&GuildOrId>,\n\n ) -> CacheResult<Permissions> {\n\n let guild = if let Some(guild) = guild {\n\n guild\n\n } else {\n\n // Private channel\n\n let permissions = Permissions::SEND_MESSAGES\n\n | Permissions::EMBED_LINKS\n\n | Permissions::ATTACH_FILES\n", "file_path": "src/util.rs", "rank": 28, "score": 11.67099922027646 }, { "content": " | Permissions::USE_EXTERNAL_EMOJIS\n\n | Permissions::ADD_REACTIONS\n\n | Permissions::READ_MESSAGE_HISTORY;\n\n\n\n return Ok(permissions);\n\n };\n\n\n\n let (mut permissions, member) = self.get_guild_permissions(user, guild).await?;\n\n\n\n if permissions.contains(Permissions::ADMINISTRATOR) {\n\n return Ok(Permissions::all());\n\n }\n\n\n\n let channel = self.extract_channel(channel).await?;\n\n\n\n if let Some(channel) = channel {\n\n let member = match member {\n\n MemberLookup::Found(member) => Some(member),\n\n MemberLookup::NotChecked => self.member(guild.id(), user).await?,\n\n MemberLookup::NotFound => None,\n", "file_path": "src/util.rs", "rank": 29, "score": 11.644137079915009 }, { "content": " channels: conn.scard(CHANNEL_KEYS).await?,\n\n guilds: conn.scard(GUILD_KEYS).await?,\n\n members: conn.scard(MEMBER_KEYS).await?,\n\n roles: conn.scard(ROLE_KEYS).await?,\n\n };\n\n\n\n Ok(stats)\n\n }\n\n\n\n pub async fn get_guild_permissions(\n\n &self,\n\n user: UserId,\n\n guild: &GuildOrId,\n\n ) -> CacheResult<(Permissions, MemberLookup)> {\n\n if user.get() == OWNER_USER_ID {\n\n return Ok((Permissions::all(), MemberLookup::NotChecked));\n\n }\n\n\n\n match self.is_guild_owner(guild, user).await {\n\n Ok(true) => return Ok((Permissions::all(), MemberLookup::NotChecked)),\n", "file_path": "src/util.rs", "rank": 30, "score": 11.61987114468875 }, { "content": "use std::borrow::Cow;\n\n\n\nuse deadpool_redis::redis::AsyncCommands;\n\nuse twilight_model::{\n\n channel::permission_overwrite::PermissionOverwriteType, guild::Permissions, id::UserId,\n\n};\n\n\n\nuse crate::{\n\n constants::{CHANNEL_KEYS, GUILD_KEYS, MEMBER_KEYS, OWNER_USER_ID, ROLE_KEYS},\n\n model::{\n\n CacheStats, CachedChannel, CachedMember, CachedTextChannel, ChannelOrId, GuildOrId,\n\n MemberLookup, RedisKey,\n\n },\n\n CacheError, CacheResult,\n\n};\n\n\n\nuse super::Cache;\n\n\n\nimpl Cache {\n\n #[inline]\n", "file_path": "src/util.rs", "rank": 31, "score": 11.613746710467229 }, { "content": " // * Handled in bot context instead\n\n // self.set(RedisKey::BotUser, CurrentUserWrapper::from(&e.user))\n\n // .await?;\n\n }\n\n Event::RoleCreate(e) => self.cache_role(&e.role, e.guild_id).await?,\n\n Event::RoleDelete(e) => self.del(RedisKey::from((e.guild_id, e.role_id))).await?,\n\n Event::RoleUpdate(e) => self.cache_role(&e.role, e.guild_id).await?,\n\n Event::ThreadCreate(e) => self.cache_channel(e).await?,\n\n Event::ThreadDelete(e) => {\n\n if let Channel::Guild(channel) = &e.0 {\n\n if let Some(c) = BasicGuildChannel::from(channel) {\n\n self.del(RedisKey::from(&c)).await?;\n\n }\n\n }\n\n }\n\n Event::ThreadListSync(e) => {\n\n // Cache members\n\n if !e.members.is_empty() {\n\n let keys = e\n\n .members\n", "file_path": "src/store.rs", "rank": 33, "score": 11.105569395595857 }, { "content": " CacheResult,\n\n};\n\n\n\nuse super::Cache;\n\n\n\nimpl Cache {\n\n #[inline]\n\n pub async fn cache_channel(&self, channel: &Channel) -> CacheResult<()> {\n\n if let Channel::Guild(channel) = channel {\n\n if let Some(c) = BasicGuildChannel::from(channel) {\n\n self.set(RedisKey::from(&c), c).await?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n pub async fn cache_member(&self, member: &Member) -> CacheResult<()> {\n\n let wrapper = MemberWrapper::from(member);\n", "file_path": "src/store.rs", "rank": 34, "score": 10.954728337732412 }, { "content": " self.get((guild, user).into()).await\n\n }\n\n\n\n #[inline]\n\n pub async fn members(&self, guild: GuildId) -> CacheResult<IntoMemberIter> {\n\n let key = format!(\"{}:{}\", GUILD_KEYS, guild);\n\n let keys = self.get_members(key).await?;\n\n\n\n Ok(IntoMemberIter::new(keys))\n\n }\n\n\n\n #[inline]\n\n pub async fn role(&self, role: RoleId) -> FetchResult<CachedRole> {\n\n self.get(role.into()).await\n\n }\n\n\n\n #[inline]\n\n pub async fn shards(&self) -> FetchResult<u64> {\n\n self.get(RedisKey::Shards).await\n\n }\n", "file_path": "src/fetch.rs", "rank": 35, "score": 10.550261354577582 }, { "content": " };\n\n\n\n if let Some(member) = member {\n\n Self::text_channel_permissions(&mut permissions, user, guild, channel, member)\n\n }\n\n }\n\n\n\n Ok(permissions)\n\n }\n\n\n\n #[allow(clippy::needless_lifetimes)]\n\n async fn extract_channel<'c>(\n\n &self,\n\n channel: &'c ChannelOrId,\n\n ) -> CacheResult<Option<Cow<'c, CachedTextChannel>>> {\n\n let id = match channel {\n\n ChannelOrId::Channel(CachedChannel::Text(channel)) => {\n\n return Ok(Some(Cow::Borrowed(channel)))\n\n }\n\n ChannelOrId::Channel(\n", "file_path": "src/util.rs", "rank": 37, "score": 10.45020522768944 }, { "content": " Self::Text(c) => c.id,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn name(&self) -> &str {\n\n match self {\n\n Self::PrivateThread(c) => c.name.as_str(),\n\n Self::PublicThread(c) => c.name.as_str(),\n\n Self::Text(c) => c.name.as_str(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct CachedTextChannel {\n\n #[serde(default, rename = \"a\", skip_serializing_if = \"Option::is_none\")]\n\n pub guild_id: Option<GuildId>,\n\n #[serde(rename = \"b\")]\n\n pub id: ChannelId,\n", "file_path": "src/model/mod.rs", "rank": 38, "score": 10.33402079852008 }, { "content": " #[serde(default, rename = \"a\", skip_serializing_if = \"Option::is_none\")]\n\n pub icon: Option<String>,\n\n #[serde(rename = \"b\")]\n\n pub id: GuildId,\n\n #[serde(rename = \"c\")]\n\n pub name: String,\n\n #[serde(rename = \"d\")]\n\n pub owner_id: UserId,\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]\n\npub struct CachedCurrentUser {\n\n #[serde(default, rename = \"a\", skip_serializing_if = \"Option::is_none\")]\n\n pub avatar: Option<String>,\n\n #[serde(rename = \"b\")]\n\n pub discriminator: u16,\n\n #[serde(rename = \"c\")]\n\n pub id: UserId,\n\n #[serde(rename = \"d\")]\n\n pub name: String,\n", "file_path": "src/model/mod.rs", "rank": 40, "score": 10.221455790795869 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<(GuildId, RoleId)> for RedisKey {\n\n fn from((guild, role): (GuildId, RoleId)) -> Self {\n\n Self::Role {\n\n guild: Some(guild),\n\n role,\n\n }\n\n }\n\n}\n\n\n\nimpl From<RoleId> for RedisKey {\n\n fn from(role: RoleId) -> Self {\n\n Self::Role { guild: None, role }\n\n }\n\n}\n\n\n\nimpl From<ChannelId> for RedisKey {\n", "file_path": "src/model/redis_key.rs", "rank": 41, "score": 10.188877126415091 }, { "content": "\n\nimpl From<GuildId> for GuildOrId {\n\n fn from(id: GuildId) -> Self {\n\n Self::Id(id)\n\n }\n\n}\n\n\n\npub enum ChannelOrId {\n\n Channel(CachedChannel),\n\n Id(ChannelId),\n\n}\n\n\n\nimpl From<CachedChannel> for ChannelOrId {\n\n fn from(channel: CachedChannel) -> Self {\n\n Self::Channel(channel)\n\n }\n\n}\n\n\n\nimpl From<ChannelId> for ChannelOrId {\n\n fn from(id: ChannelId) -> Self {\n", "file_path": "src/model/mod.rs", "rank": 42, "score": 9.622529448822018 }, { "content": " Ok(false) => {}\n\n Err(CacheError::MissingGuild) => {\n\n return Ok((Permissions::empty(), MemberLookup::NotChecked))\n\n }\n\n Err(err) => return Err(err),\n\n }\n\n\n\n let member = match self.member(guild.id(), user).await? {\n\n Some(member) => member,\n\n None => return Ok((Permissions::empty(), MemberLookup::NotFound)),\n\n };\n\n\n\n let mut permissions = Permissions::empty();\n\n\n\n for &role_id in &member.roles {\n\n if let Some(role) = self.role(role_id).await? {\n\n if role.permissions.contains(Permissions::ADMINISTRATOR) {\n\n return Ok((Permissions::all(), MemberLookup::Found(member)));\n\n }\n\n\n", "file_path": "src/util.rs", "rank": 43, "score": 9.574407916918872 }, { "content": "}\n\n\n\nimpl ToRedisArgs for RedisKey {\n\n fn write_redis_args<W: ?Sized + RedisWrite>(&self, out: &mut W) {\n\n out.write_arg_fmt(self)\n\n }\n\n}\n\n\n\nimpl From<&CachedChannel> for RedisKey {\n\n fn from(channel: &CachedChannel) -> Self {\n\n Self::Channel {\n\n guild: channel.guild_id(),\n\n channel: channel.id(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'c> From<&BasicGuildChannel<'c>> for RedisKey {\n\n fn from(channel: &BasicGuildChannel<'c>) -> Self {\n\n Self::Channel {\n", "file_path": "src/model/redis_key.rs", "rank": 44, "score": 9.334938931524125 }, { "content": " PublicThread(CachedThread),\n\n #[serde(rename = \"c\")]\n\n Text(CachedTextChannel),\n\n}\n\n\n\nimpl CachedChannel {\n\n #[inline]\n\n pub const fn guild_id(&self) -> Option<GuildId> {\n\n match self {\n\n Self::PrivateThread(c) => c.guild_id,\n\n Self::PublicThread(c) => c.guild_id,\n\n Self::Text(c) => c.guild_id,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub const fn id(&self) -> ChannelId {\n\n match self {\n\n Self::PrivateThread(c) => c.id,\n\n Self::PublicThread(c) => c.id,\n", "file_path": "src/model/mod.rs", "rank": 45, "score": 9.258251873072348 }, { "content": " Self::Id(id)\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct CacheConfig {\n\n /// Specifies the Time-To-Live in seconds for cached members until they expire\n\n pub member_ttl: Option<usize>,\n\n}\n", "file_path": "src/model/mod.rs", "rank": 46, "score": 9.012144384872434 }, { "content": "\n\n if let Some(ttl) = self.config.member_ttl {\n\n self.set_with_expire(RedisKey::from(member), wrapper, ttl)\n\n .await\n\n } else {\n\n self.set(member.into(), wrapper).await\n\n }\n\n }\n\n\n\n #[inline]\n\n pub async fn cache_role(&self, role: &Role, guild: GuildId) -> CacheResult<()> {\n\n self.set(RedisKey::from((guild, role.id)), RoleWrapper::from(role))\n\n .await\n\n }\n\n\n\n #[inline]\n\n pub async fn cache_shards(&self, shards: u64) -> CacheResult<()> {\n\n self.set(RedisKey::Shards, shards).await\n\n }\n\n\n", "file_path": "src/store.rs", "rank": 47, "score": 8.697756371817462 }, { "content": " #[serde(rename = \"c\")]\n\n pub name: String,\n\n #[serde(default, rename = \"d\", skip_serializing_if = \"Vec::is_empty\")]\n\n pub permission_overwrites: Vec<PermissionOverwrite>,\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct CachedThread {\n\n #[serde(default, rename = \"a\", skip_serializing_if = \"Option::is_none\")]\n\n pub guild_id: Option<GuildId>,\n\n #[serde(rename = \"b\")]\n\n pub id: ChannelId,\n\n #[serde(rename = \"c\")]\n\n pub name: String,\n\n #[serde(default, rename = \"d\", skip_serializing_if = \"Option::is_none\")]\n\n pub parent_id: Option<ChannelId>,\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct CachedGuild {\n", "file_path": "src/model/mod.rs", "rank": 48, "score": 8.385241552099894 }, { "content": " Some(MEMBER_KEY) => {\n\n let guild = split.next().map(str::parse).map(|res| res.map(GuildId));\n\n\n\n let user = split\n\n .next()\n\n .map(str::parse)\n\n .map(|res| res.map(UserId))\n\n .filter(|_| split.next().is_none());\n\n\n\n if let (Some(Ok(guild)), Some(Ok(user))) = (guild, user) {\n\n return Ok(RedisKey::Member { guild, user });\n\n }\n\n }\n\n Some(ROLE_KEY) => {\n\n let parse = split\n\n .next()\n\n .map(str::parse)\n\n .map(|res| res.map(RoleId))\n\n .filter(|_| split.next().is_none());\n\n\n", "file_path": "src/model/redis_key.rs", "rank": 50, "score": 8.1389839297525 }, { "content": "\n\npub enum GuildOrId {\n\n Guild(CachedGuild),\n\n Id(GuildId),\n\n}\n\n\n\nimpl GuildOrId {\n\n pub fn id(&self) -> GuildId {\n\n match self {\n\n Self::Guild(guild) => guild.id,\n\n Self::Id(id) => *id,\n\n }\n\n }\n\n}\n\n\n\nimpl From<CachedGuild> for GuildOrId {\n\n fn from(guild: CachedGuild) -> Self {\n\n Self::Guild(guild)\n\n }\n\n}\n", "file_path": "src/model/mod.rs", "rank": 51, "score": 7.822938187539329 }, { "content": " pub async fn is_guild_owner(&self, guild: &GuildOrId, user: UserId) -> CacheResult<bool> {\n\n match guild {\n\n GuildOrId::Guild(guild) => Ok(guild.owner_id == user),\n\n GuildOrId::Id(id) => {\n\n let guild = self.guild(*id).await?.ok_or(CacheError::MissingGuild)?;\n\n\n\n Ok(guild.owner_id == user)\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n pub async fn contains(&self, key: impl Into<RedisKey>) -> CacheResult<bool> {\n\n Ok(self.redis.get().await?.exists(key.into()).await?)\n\n }\n\n\n\n pub async fn stats(&self) -> CacheResult<CacheStats> {\n\n let mut conn = self.redis.get().await?;\n\n\n\n let stats = CacheStats {\n", "file_path": "src/util.rs", "rank": 52, "score": 7.759563832232753 }, { "content": " .iter()\n\n .filter_map(|c| {\n\n if let Channel::Guild(channel) = c {\n\n BasicGuildChannel::from(channel)\n\n } else {\n\n None\n\n }\n\n })\n\n .map(|channel| (RedisKey::from(&channel), channel));\n\n\n\n self.set_all(keys).await?;\n\n }\n\n }\n\n Event::ThreadMemberUpdate(e) => {\n\n if let Some(member) = &e.member {\n\n self.cache_member(member).await?;\n\n }\n\n }\n\n Event::ThreadMembersUpdate(e) => {\n\n if !e.added_members.is_empty() {\n", "file_path": "src/store.rs", "rank": 53, "score": 7.7317159604531485 }, { "content": " user_denied |= overwrite.deny;\n\n }\n\n }\n\n PermissionOverwriteType::Role(role) => {\n\n if role.0 == guild.id().0 {\n\n everyone_allowed |= overwrite.allow;\n\n everyone_denied |= overwrite.deny\n\n } else if member.roles.contains(&role) {\n\n role_allowed |= overwrite.allow;\n\n role_denied |= overwrite.deny;\n\n }\n\n }\n\n }\n\n }\n\n\n\n *permissions &= !everyone_denied;\n\n *permissions |= everyone_allowed;\n\n\n\n *permissions &= !role_denied;\n\n *permissions |= role_allowed;\n\n\n\n *permissions &= !user_denied;\n\n *permissions |= user_allowed;\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 54, "score": 7.690887780495224 }, { "content": " // Cache the guild itself\n\n self.set(e.id.into(), GuildWrapper::from(&e.0)).await?;\n\n }\n\n Event::GuildDelete(e) => self.clear_guild(e.id).await?,\n\n Event::GuildUpdate(e) => {\n\n self.set(e.id.into(), PartialGuildWrapper::from(&e.0))\n\n .await?\n\n }\n\n Event::InteractionCreate(e) => {\n\n let (guild, member) = match &e.0 {\n\n Interaction::ApplicationCommand(data) => (data.guild_id, &data.member),\n\n Interaction::MessageComponent(data) => (data.guild_id, &data.member),\n\n _ => return Ok(()),\n\n };\n\n\n\n if let (Some(member), Some(guild)) = (member, guild) {\n\n if let Some(user) = &member.user {\n\n let key = RedisKey::from((guild, user.id));\n\n let member = PartialMemberWrapper::from((member, guild, user));\n\n\n", "file_path": "src/store.rs", "rank": 55, "score": 7.661939114943868 }, { "content": " // Cache channels\n\n if !e.channels.is_empty() {\n\n let channels = e\n\n .channels\n\n .iter()\n\n .filter_map(BasicGuildChannel::from)\n\n .map(|channel| (RedisKey::from(&channel), channel));\n\n\n\n self.set_all(channels).await?;\n\n }\n\n\n\n // Cache roles\n\n if !e.roles.is_empty() {\n\n let roles = e\n\n .roles\n\n .iter()\n\n .map(|role| (RedisKey::from((e.id, role.id)), RoleWrapper::from(role)));\n\n\n\n self.set_all(roles).await?;\n\n }\n", "file_path": "src/store.rs", "rank": 56, "score": 7.640390589126277 }, { "content": " #[serde(rename = \"c\")]\n\n pub permissions: Permissions,\n\n #[serde(rename = \"d\")]\n\n pub position: i64,\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct SessionInfo {\n\n #[serde(rename = \"a\")]\n\n pub session_id: String,\n\n #[serde(rename = \"b\")]\n\n pub sequence: u64,\n\n}\n\n\n\npub struct IntoMemberIter {\n\n keys: Vec<RedisKey>,\n\n}\n\n\n\nimpl IntoMemberIter {\n\n pub(crate) fn new(keys: Vec<RedisKey>) -> Self {\n", "file_path": "src/model/mod.rs", "rank": 57, "score": 7.492065443839042 }, { "content": " #[inline]\n\n pub async fn cache_sessions(&self, sessions: &HashMap<u64, SessionInfo>) -> CacheResult<()> {\n\n self.set_with_expire(RedisKey::Sessions, sessions, 300)\n\n .await\n\n }\n\n\n\n pub async fn update(&self, event: &Event) -> CacheResult<()> {\n\n match event {\n\n Event::ChannelCreate(e) => self.cache_channel(e).await?,\n\n Event::ChannelDelete(e) => {\n\n if let Channel::Guild(channel) = &e.0 {\n\n if let Some(c) = BasicGuildChannel::from(channel) {\n\n self.del(RedisKey::from(&c)).await?;\n\n }\n\n }\n\n }\n\n Event::ChannelUpdate(e) => self.cache_channel(e).await?,\n\n Event::GuildCreate(e) => {\n\n self.clear_guild(e.id).await?;\n\n\n", "file_path": "src/store.rs", "rank": 58, "score": 6.6274646899110685 }, { "content": " Self { keys }\n\n }\n\n}\n\n\n\nimpl IntoIterator for IntoMemberIter {\n\n type Item = UserId;\n\n\n\n #[allow(clippy::type_complexity)]\n\n type IntoIter = FilterMap<IntoIter<RedisKey>, fn(RedisKey) -> Option<UserId>>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.keys.into_iter().filter_map(filter_member_key)\n\n }\n\n}\n\n\n", "file_path": "src/model/mod.rs", "rank": 59, "score": 6.535080782341532 }, { "content": "#![deny(clippy::all, nonstandard_style, rust_2018_idioms, unused, warnings)]\n\n\n\nuse std::fmt::Display;\n\n\n\nuse deadpool_redis::{Config, Pool, PoolConfig};\n\n\n\nmod constants;\n\nmod error;\n\nmod fetch;\n\nmod store;\n\nmod util;\n\n\n\npub mod model;\n\n\n\nuse model::CacheConfig;\n\n\n\npub use error::{CacheError, CacheResult};\n\nuse twilight_model::id::UserId;\n\n\n\npub struct Cache {\n", "file_path": "src/lib.rs", "rank": 60, "score": 6.474939597678702 }, { "content": " if let Some(ttl) = self.config.member_ttl {\n\n self.set_with_expire(key, member, ttl).await?;\n\n } else {\n\n self.set(key, member).await?;\n\n }\n\n }\n\n }\n\n }\n\n Event::MemberAdd(e) => self.cache_member(e).await?,\n\n Event::MemberRemove(e) => self.del(RedisKey::from((e.guild_id, e.user.id))).await?,\n\n Event::MemberUpdate(e) => {\n\n let key = RedisKey::from((e.guild_id, e.user.id));\n\n let member = MemberUpdateWrapper::from(e.as_ref());\n\n\n\n if let Some(ttl) = self.config.member_ttl {\n\n self.set_with_expire(key, member, ttl).await?;\n\n } else {\n\n self.set(key, member).await?;\n\n }\n\n }\n", "file_path": "src/store.rs", "rank": 61, "score": 6.1369168057284575 }, { "content": " conn.sadd(key.as_ref(), value).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn set_with_expire<T>(&self, key: RedisKey, value: T, seconds: usize) -> CacheResult<()>\n\n where\n\n T: Serialize,\n\n {\n\n let bytes = serde_cbor::to_vec(&value)?;\n\n let mut conn = self.redis.get().await?;\n\n\n\n // Don't expire the cached member data of the bot itself\n\n if key.user_id().filter(|id| id == &self.bot_id).is_none() {\n\n conn.set_ex(key, bytes, seconds).await?;\n\n } else {\n\n conn.set(key, bytes).await?;\n\n }\n\n\n", "file_path": "src/store.rs", "rank": 62, "score": 5.827025372144668 }, { "content": " redis: Pool,\n\n config: CacheConfig,\n\n bot_id: UserId,\n\n}\n\n\n\nimpl Cache {\n\n pub fn new(host: impl Display, port: impl Display, bot_id: UserId) -> CacheResult<Self> {\n\n Self::with_config(host, port, bot_id, CacheConfig::default())\n\n }\n\n\n\n pub fn with_config(\n\n host: impl Display,\n\n port: impl Display,\n\n bot_id: UserId,\n\n config: CacheConfig,\n\n ) -> CacheResult<Self> {\n\n let redis_config = Config {\n\n url: Some(format!(\"redis://{}:{}\", host, port)),\n\n connection: None,\n\n pool: Some(PoolConfig::new(4)),\n", "file_path": "src/lib.rs", "rank": 63, "score": 5.749412987252224 }, { "content": " where\n\n I: IntoIterator<Item = (RedisKey, T)>,\n\n T: Serialize,\n\n {\n\n let mut members = HashMap::new();\n\n\n\n let keys = keys\n\n .into_iter()\n\n .inspect(|(key, _)| populate_members(key, &mut members))\n\n .map(|(key, value)| serde_cbor::to_vec(&value).map(|value| (key, value)))\n\n .collect::<Result<Vec<(RedisKey, Vec<u8>)>, CborError>>()?;\n\n\n\n if keys.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut conn = self.redis.get().await?;\n\n conn.set_multiple(&keys).await?;\n\n\n\n for (key, value) in members {\n", "file_path": "src/store.rs", "rank": 64, "score": 5.727172865665823 }, { "content": "use deadpool_redis::{redis::RedisError, CreatePoolError, PoolError};\n\nuse serde_cbor::Error as CborError;\n\nuse thiserror::Error;\n\n\n\npub type CacheResult<T> = Result<T, CacheError>;\n\n\n\n#[derive(Debug, Error)]\n\npub enum CacheError {\n\n #[error(\"cbor error\")]\n\n Cbor(#[from] CborError),\n\n #[error(\"failed to create redis pool\")]\n\n CreatePool(#[from] CreatePoolError),\n\n #[error(\"guild is not cached\")]\n\n MissingGuild,\n\n #[error(\"redis pool error\")]\n\n Pool(#[from] PoolError),\n\n #[error(\"redis error\")]\n\n Redis(#[from] RedisError),\n\n}\n", "file_path": "src/error.rs", "rank": 65, "score": 5.255446822011766 }, { "content": " let keys = e\n\n .added_members\n\n .iter()\n\n .filter_map(|member| member.member.as_ref())\n\n .map(MemberWrapper::from)\n\n .map(|member| (RedisKey::from(&member), member));\n\n\n\n if let Some(ttl) = self.config.member_ttl {\n\n let keys = keys\n\n .map(|(key, member)| Ok((key, serde_cbor::to_vec(&member)?)))\n\n .collect::<Result<Vec<_>, CborError>>()?;\n\n\n\n self.set_all_with_expire(&keys, ttl).await?;\n\n } else {\n\n self.set_all(keys).await?;\n\n }\n\n }\n\n }\n\n Event::ThreadUpdate(e) => self.cache_channel(e).await?,\n\n Event::UserUpdate(_e) => {\n", "file_path": "src/store.rs", "rank": 66, "score": 5.250848230297294 }, { "content": " Ok(())\n\n }\n\n\n\n async fn set_all_with_expire(\n\n &self,\n\n keys: &[(RedisKey, Vec<u8>)],\n\n seconds: usize,\n\n ) -> CacheResult<()> {\n\n if keys.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut conn = self.redis.get().await?;\n\n conn.set_multiple(keys).await?;\n\n\n\n for (key, _) in keys {\n\n // Don't expire the cached member data of the bot itself\n\n if key.user_id().filter(|id| id == &self.bot_id).is_none() {\n\n conn.expire(key, seconds).await?;\n\n }\n", "file_path": "src/store.rs", "rank": 67, "score": 5.234535323733261 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n async fn clear_guild(&self, guild: GuildId) -> CacheResult<()> {\n\n let members = self\n\n .get_members::<RedisKey>(format!(\"{}:{}\", GUILD_KEYS, guild))\n\n .await?;\n\n\n\n self.del_all(members).await?;\n\n self.del(RedisKey::Guild { guild }).await?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/store.rs", "rank": 68, "score": 5.182561453503956 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn del(&self, key: RedisKey) -> CacheResult<()> {\n\n let mut members = HashMap::new();\n\n populate_members(&key, &mut members);\n\n\n\n let mut conn = self.redis.get().await?;\n\n conn.del(key).await?;\n\n\n\n for (key, value) in members {\n\n conn.srem(key.as_ref(), value).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn del_all<I>(&self, keys: I) -> CacheResult<()>\n", "file_path": "src/store.rs", "rank": 69, "score": 5.008538343273526 }, { "content": " if let Some(Ok(channel)) = parse {\n\n let key = RedisKey::Channel {\n\n guild: None,\n\n channel,\n\n };\n\n\n\n return Ok(key);\n\n }\n\n }\n\n Some(GUILD_KEY) => {\n\n let parse = split\n\n .next()\n\n .map(str::parse)\n\n .map(|res| res.map(GuildId))\n\n .filter(|_| split.next().is_none());\n\n\n\n if let Some(Ok(guild)) = parse {\n\n return Ok(RedisKey::Guild { guild });\n\n }\n\n }\n", "file_path": "src/model/redis_key.rs", "rank": 70, "score": 4.953989412308702 }, { "content": " where\n\n I: IntoIterator<Item = RedisKey>,\n\n {\n\n let mut members = HashMap::new();\n\n\n\n let keys = keys\n\n .into_iter()\n\n .inspect(|key| populate_members(key, &mut members))\n\n .collect::<Vec<RedisKey>>();\n\n\n\n if keys.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let mut conn = self.redis.get().await?;\n\n conn.del(keys).await?;\n\n\n\n for (key, value) in members {\n\n conn.srem(key.as_ref(), value).await?;\n\n }\n", "file_path": "src/store.rs", "rank": 71, "score": 4.942225371957898 }, { "content": " Event::MemberChunk(e) => {\n\n let keys = e\n\n .members\n\n .iter()\n\n .map(MemberWrapper::from)\n\n .map(|member| (RedisKey::from(&member), member));\n\n\n\n if let Some(ttl) = self.config.member_ttl {\n\n let keys = keys\n\n .map(|(key, member)| Ok((key, serde_cbor::to_vec(&member)?)))\n\n .collect::<Result<Vec<_>, CborError>>()?;\n\n\n\n self.set_all_with_expire(&keys, ttl).await?;\n\n } else {\n\n self.set_all(keys).await?;\n\n }\n\n }\n\n Event::MessageCreate(e) => {\n\n if let (Some(member), Some(guild)) = (&e.member, e.guild_id) {\n\n let key = RedisKey::from((guild, e.author.id));\n", "file_path": "src/store.rs", "rank": 72, "score": 4.766269785884518 }, { "content": " let member = PartialMemberWrapper::from((member, guild, &e.author));\n\n\n\n if let Some(ttl) = self.config.member_ttl {\n\n self.set_with_expire(key, member, ttl).await?;\n\n } else {\n\n self.set(key, member).await?;\n\n }\n\n }\n\n }\n\n Event::ReactionAdd(e) => {\n\n if let Some(member) = &e.member {\n\n self.cache_member(member).await?;\n\n }\n\n }\n\n Event::ReactionRemove(e) => {\n\n if let Some(member) = &e.member {\n\n self.cache_member(member).await?;\n\n }\n\n }\n\n Event::Ready(_e) => {\n", "file_path": "src/store.rs", "rank": 73, "score": 4.34102208502356 }, { "content": " .iter()\n\n .filter_map(|member| member.member.as_ref())\n\n .map(MemberWrapper::from)\n\n .map(|member| (RedisKey::from(&member), member));\n\n\n\n if let Some(ttl) = self.config.member_ttl {\n\n let keys = keys\n\n .map(|(key, member)| Ok((key, serde_cbor::to_vec(&member)?)))\n\n .collect::<Result<Vec<_>, CborError>>()?;\n\n\n\n self.set_all_with_expire(&keys, ttl).await?;\n\n } else {\n\n self.set_all(keys).await?;\n\n }\n\n }\n\n\n\n // Cache channels\n\n if !e.threads.is_empty() {\n\n let keys = e\n\n .threads\n", "file_path": "src/store.rs", "rank": 74, "score": 4.176786916906359 }, { "content": "impl FromRedisValue for RedisKey {\n\n fn from_redis_value(v: &Value) -> RedisResult<Self> {\n\n if let Value::Data(data) = v {\n\n let s = std::str::from_utf8(data).map_err(|_| {\n\n let kind = ErrorKind::ResponseError;\n\n let description = \"Response was invalid utf8 data\";\n\n\n\n RedisError::from((kind, description))\n\n })?;\n\n\n\n let mut split = s.split(':');\n\n\n\n match split.next() {\n\n Some(CHANNEL_KEY) => {\n\n let parse = split\n\n .next()\n\n .map(str::parse)\n\n .map(|res| res.map(ChannelId))\n\n .filter(|_| split.next().is_none());\n\n\n", "file_path": "src/model/redis_key.rs", "rank": 75, "score": 4.13411011292753 }, { "content": "\n\n #[inline]\n\n pub async fn sessions(&self) -> FetchResult<HashMap<u64, SessionInfo>> {\n\n self.get(RedisKey::Sessions).await\n\n }\n\n\n\n async fn get<T>(&self, key: RedisKey) -> FetchResult<T>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n let mut conn = self.redis.get().await?;\n\n let res: Option<Vec<u8>> = conn.get(key).await?;\n\n let opt = res.map(|value| serde_cbor::from_slice(&value));\n\n\n\n Ok(opt.transpose()?)\n\n }\n\n\n\n pub(crate) async fn get_members<T>(&self, key: String) -> CacheResult<Vec<T>>\n\n where\n\n T: FromRedisValue,\n\n {\n\n let mut conn = self.redis.get().await?;\n\n\n\n Ok(conn.smembers(key).await?)\n\n }\n\n}\n", "file_path": "src/fetch.rs", "rank": 76, "score": 4.034276712437903 }, { "content": " // * Handled in bot context instead\n\n // self.set(RedisKey::BotUser, CurrentUserWrapper::from(&e.0))\n\n // .await?\n\n }\n\n _ => {}\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn set<T>(&self, key: RedisKey, value: T) -> CacheResult<()>\n\n where\n\n T: Serialize,\n\n {\n\n self.set_all(iter::once((key, value))).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n async fn set_all<I, T>(&self, keys: I) -> CacheResult<()>\n", "file_path": "src/store.rs", "rank": 77, "score": 3.596921514511876 }, { "content": " if let Some(Ok(role)) = parse {\n\n return Ok(RedisKey::Role { guild: None, role });\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n let kind = ErrorKind::TypeError;\n\n let description = \"Response string was of incompatible format\";\n\n let detail = format!(\n\n r#\"Response string could not be parsed as RedisKey (response was \"{}\")\"#,\n\n s\n\n );\n\n\n\n Err((kind, description, detail).into())\n\n } else {\n\n let kind = ErrorKind::TypeError;\n\n let description = \"Response was of incompatible type\";\n\n let detail = format!(\n\n \"Response type not RedisKey compatible (response was {:?})\",\n\n v\n\n );\n\n\n\n Err((kind, description, detail).into())\n\n }\n\n }\n\n}\n", "file_path": "src/model/redis_key.rs", "rank": 78, "score": 3.3866548892986827 }, { "content": " CachedChannel::PrivateThread(channel) | CachedChannel::PublicThread(channel),\n\n ) => channel.id,\n\n ChannelOrId::Id(id) => *id,\n\n };\n\n\n\n match self.channel(id).await? {\n\n Some(CachedChannel::Text(c)) => return Ok(Some(Cow::Owned(c))),\n\n Some(CachedChannel::PrivateThread(thread))\n\n | Some(CachedChannel::PublicThread(thread)) => {\n\n if let Some(parent) = thread.parent_id {\n\n if let Some(CachedChannel::Text(channel)) = self.channel(parent).await? {\n\n return Ok(Some(Cow::Owned(channel)));\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n Ok(None)\n\n }\n", "file_path": "src/util.rs", "rank": 79, "score": 3.2324977122463365 }, { "content": "\n\n // Cache members\n\n if !e.members.is_empty() {\n\n let members = e\n\n .members\n\n .iter()\n\n .map(MemberWrapper::from)\n\n .map(|member| (RedisKey::from(&member), member));\n\n\n\n if let Some(ttl) = self.config.member_ttl {\n\n let keys = members\n\n .map(|(key, member)| Ok((key, serde_cbor::to_vec(&member)?)))\n\n .collect::<Result<Vec<_>, CborError>>()?;\n\n\n\n self.set_all_with_expire(&keys, ttl).await?;\n\n } else {\n\n self.set_all(members).await?;\n\n }\n\n }\n\n\n", "file_path": "src/store.rs", "rank": 80, "score": 3.1149269510734103 }, { "content": " };\n\n\n\n let redis = redis_config.create_pool(None)?;\n\n\n\n Ok(Self {\n\n redis,\n\n config,\n\n bot_id,\n\n })\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 81, "score": 2.3087364741894563 } ]
Rust
crates/modor/src/system_params/mod.rs
modor-engine/modor
447ae453030de44ed93a2ab03a66261080304ce4
use crate::storages::archetypes::EntityLocation; use crate::storages::components::ComponentTypeIdx; use crate::storages::core::CoreStorage; use crate::storages::systems::SystemProperties; use crate::system_params::internal::{QuerySystemParamWithLifetime, SystemParamWithLifetime}; use crate::{SystemData, SystemInfo}; pub(crate) mod components; pub(crate) mod components_mut; pub(crate) mod entity; pub(crate) mod optional_components; pub(crate) mod optional_components_mut; pub(crate) mod optional_singletons; pub(crate) mod optional_singletons_mut; pub(crate) mod queries; pub(crate) mod singletons; pub(crate) mod singletons_mut; pub(crate) mod tuples; pub(crate) mod world; pub trait SystemParam: for<'a> SystemParamWithLifetime<'a> { #[doc(hidden)] type Tuple: SystemParam; #[doc(hidden)] type InnerTuple: SystemParam; #[doc(hidden)] fn properties(core: &mut CoreStorage) -> SystemProperties; #[doc(hidden)] fn lock<'a>( data: SystemData<'a>, info: SystemInfo<'a>, ) -> <Self as SystemParamWithLifetime<'a>>::Guard; #[doc(hidden)] fn borrow_guard<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::Guard, ) -> <Self as SystemParamWithLifetime<'a>>::GuardBorrow where 'b: 'a; #[doc(hidden)] fn stream<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as SystemParamWithLifetime<'a>>::Stream where 'b: 'a; #[doc(hidden)] fn stream_next<'a, 'b>( stream: &'a mut <Self as SystemParamWithLifetime<'b>>::Stream, ) -> Option<<Self as SystemParamWithLifetime<'a>>::Param> where 'b: 'a; } pub trait QuerySystemParam: SystemParam + for<'a> QuerySystemParamWithLifetime<'a> { #[doc(hidden)] fn filtered_component_type_idxs(data: SystemData<'_>) -> Vec<ComponentTypeIdx>; #[doc(hidden)] fn query_iter<'a, 'b>( guard: &'a <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as QuerySystemParamWithLifetime<'a>>::Iter where 'b: 'a; #[doc(hidden)] fn query_iter_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as QuerySystemParamWithLifetime<'a>>::IterMut where 'b: 'a; #[doc(hidden)] fn get<'a, 'b>( guard: &'a <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location: EntityLocation, ) -> Option<<Self as QuerySystemParamWithLifetime<'a>>::ConstParam> where 'b: 'a; #[doc(hidden)] fn get_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location: EntityLocation, ) -> Option<<Self as SystemParamWithLifetime<'a>>::Param> where 'b: 'a; #[doc(hidden)] fn get_both_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location1: EntityLocation, location2: EntityLocation, ) -> ( Option<<Self as SystemParamWithLifetime<'a>>::Param>, Option<<Self as SystemParamWithLifetime<'a>>::Param>, ) where 'b: 'a; } pub(crate) mod internal { use crate::SystemParam; use std::any::Any; pub trait SystemParamWithLifetime<'a> { type Param: 'a; type Guard: 'a; type GuardBorrow: 'a; type Stream: 'a; } pub trait QuerySystemParamWithLifetime<'a>: SystemParamWithLifetime<'a> { type ConstParam: 'a + SystemParamWithLifetime<'a>; type Iter: 'a + Sync + Send + Iterator<Item = <Self::ConstParam as SystemParamWithLifetime<'a>>::Param> + DoubleEndedIterator + ExactSizeIterator; type IterMut: 'a + Sync + Send + Iterator<Item = <Self as SystemParamWithLifetime<'a>>::Param> + DoubleEndedIterator + ExactSizeIterator; } pub trait LockableSystemParam: SystemParam { type LockedType: Any; type Mutability: Mutability; } #[allow(unreachable_pub)] pub trait Mutability {} pub struct Const; impl Mutability for Const {} pub struct Mut; impl Mutability for Mut {} } pub(crate) mod utils { use crate::storages::archetypes::{ArchetypeEntityPos, ArchetypeIdx, EntityLocation}; use typed_index_collections::TiVec; pub(crate) fn get_both_mut<T>( data: &mut TiVec<ArchetypeIdx, TiVec<ArchetypeEntityPos, T>>, location1: EntityLocation, location2: EntityLocation, ) -> (Option<&mut T>, Option<&mut T>) { if location1.idx == location2.idx { if location1.idx >= data.next_key() { (None, None) } else { get_both_mut_internal(&mut data[location1.idx], location1.pos, location2.pos) } } else { let (sub_data1, sub_data2) = get_both_mut_internal(data, location1.idx, location2.idx); ( sub_data1.and_then(|d| d.get_mut(location1.pos)), sub_data2.and_then(|d| d.get_mut(location2.pos)), ) } } fn get_both_mut_internal<K, T>( data: &mut TiVec<K, T>, key1: K, key2: K, ) -> (Option<&mut T>, Option<&mut T>) where K: Ord + From<usize> + Copy, usize: From<K>, { if key2 >= data.next_key() { (data.get_mut(key1), None) } else if key1 >= data.next_key() { (None, data.get_mut(key2)) } else if key1 > key2 { let (left, right) = data.split_at_mut(key1); (Some(&mut right[K::from(0)]), Some(&mut left[key2])) } else { let (left, right) = data.split_at_mut(key2); (Some(&mut left[key1]), Some(&mut right[K::from(0)])) } } }
use crate::storages::archetypes::EntityLocation; use crate::storages::components::ComponentTypeIdx; use crate::storages::core::CoreStorage; use crate::storages::systems::SystemProperties; use crate::system_params::internal::{QuerySystemParamWithLifetime, SystemParamWithLifetime}; use crate::{SystemData, SystemInfo}; pub(crate) mod components; pub(crate) mod components_mut; pub(crate) mod entity; pub(crate) mod optional_components; pub(crate) mod optional_components_mut; pub(crate) mod optional_singletons; pub(crate) mod optional_singletons_mut; pub(crate) mod queries; pub(crate) mod singletons; pub(crate) mod singletons_mut; pub(crate) mod tuples; pub(crate) mod world; pub trait SystemParam: for<'a> SystemParamWithLifetime<'a> { #[doc(hidden)] type Tuple: SystemParam; #[doc(hidden)] type InnerTuple: SystemParam; #[doc(hidden)] fn properties(core: &mut CoreStorage) -> SystemProperties; #[doc(hidden)] fn lock<'a>( data: SystemData<'a>, info: SystemInfo<'a>, ) -> <Self as SystemParamWithLifetime<'a>>::Guard; #[doc(hidden)] fn borrow_guard<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::Guard, ) -> <Self as SystemParamWithLifetime<'a>>::GuardBorrow where 'b: 'a; #[doc(hidden)] fn stream<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as SystemParamWithLifetime<'a>>::Stream where 'b: 'a; #[doc(hidden)] fn stream_next<'a, 'b>( stream: &'a mut <Self as SystemParamWithLifetime<'b>>::Stream, ) -> Option<<Self as SystemParamWithLifetime<'a>>::Param> where 'b: 'a; } pub trait QuerySystemParam: SystemParam + for<'a> QuerySystemParamWithLifetime<'a> { #[doc(hidden)] fn filtered_component_type_idxs(data: SystemData<'_>) -> Vec<ComponentTypeIdx>; #[doc(hidden)] fn query_iter<'a, 'b>( guard: &'a <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as QuerySystemParamWithLifetime<'a>>::Iter where 'b: 'a; #[doc(hidden)] fn query_iter_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, ) -> <Self as QuerySystemParamWithLifetime<'a>>::IterMut where 'b: 'a; #[doc(hidden)] fn get<'a, 'b>( guard: &'a <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location: EntityLocation, ) -> Option<<Self as QuerySystemParamWithLifetime<'a>>::ConstParam> where 'b: 'a; #[doc(hidden)] fn get_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location: EntityLocation, ) -> Option<<Self as SystemParamWithLifetime<'a>>::Param> where 'b: 'a; #[doc(hidden)] fn get_both_mut<'a, 'b>( guard: &'a mut <Self as SystemParamWithLifetime<'b>>::GuardBorrow, location1: EntityLocation, location2: EntityLocation, ) -> ( Option<<Self as SystemParamWithLifetime<'a>>::Param>, Option<<Self as SystemParamWithLifetime<'a>>::Param>, ) where 'b: 'a; } pub(crate) mod internal { use crate::SystemParam; use std::any::Any; pub trait SystemParamWithLifetime<'a> { type Param: 'a; type Guard: 'a; type GuardBorrow: 'a; type Stream: 'a; } pub trait QuerySystemParamWithLifetime<'a>: SystemParamWithLifetime<'a> { type ConstParam: 'a + SystemParamWithLifetime<'a>; type Iter: 'a + Sync + Send + Iterator<Item = <Self::ConstParam as SystemParamWithLifetime<'a>>::Param> + DoubleEndedIterator + ExactSizeIterator; type IterMut: 'a + Sync + Send + Iterator<Item = <Self as SystemParamWithLifetime<'a>>::Param> + DoubleEndedIterator + ExactSizeIterator; } pub trait LockableSystemParam: SystemParam { type LockedType: Any; type Mutability: Mutability; } #[allow(unreachable_pub)] pub trait Mutability {} pub struct Const; impl Mutability for Const {} pub struct Mut; impl Mutability for Mut {} } pub(crate) mod utils { use crate::storages::archetypes::{ArchetypeEntityPos, ArchetypeIdx, EntityLocation}; use typed_index_collections::TiVec; pub(crate) fn get_both_mut<T>( data: &mut TiVec<ArchetypeIdx, TiVec<ArchetypeEntityPos, T>>, location1: EntityLocation, location2: EntityLocation, ) -> (Option<&mut T>, Option<&mut T>) { if location1.idx == location2.idx { if location1.idx >= data.next_key() { (None, None) } else { get_both_mut_internal(&mut data[location1.idx], location1.pos, location2.pos) } } else { let (sub_data1, sub_data2) = get_both_mut_internal(data, location1.idx, location2.idx); (
fn get_both_mut_internal<K, T>( data: &mut TiVec<K, T>, key1: K, key2: K, ) -> (Option<&mut T>, Option<&mut T>) where K: Ord + From<usize> + Copy, usize: From<K>, { if key2 >= data.next_key() { (data.get_mut(key1), None) } else if key1 >= data.next_key() { (None, data.get_mut(key2)) } else if key1 > key2 { let (left, right) = data.split_at_mut(key1); (Some(&mut right[K::from(0)]), Some(&mut left[key2])) } else { let (left, right) = data.split_at_mut(key2); (Some(&mut left[key1]), Some(&mut right[K::from(0)])) } } }
sub_data1.and_then(|d| d.get_mut(location1.pos)), sub_data2.and_then(|d| d.get_mut(location2.pos)), ) } }
function_block-function_prefix_line
[ { "content": "/// A trait for defining the main component of an entity type.\n\n///\n\n/// This trait shouldn't be directly implemented.<br>\n\n/// Instead, you can use [`entity`](macro@crate::entity) and [`singleton`](macro@crate::singleton)\n\n/// proc macros.\n\npub trait EntityMainComponent: Sized + Any + Sync + Send {\n\n #[doc(hidden)]\n\n type Type: EntityType;\n\n\n\n #[doc(hidden)]\n\n fn on_update(runner: SystemRunner<'_>) -> SystemRunner<'_>;\n\n}\n\n\n", "file_path": "crates/modor/src/entities.rs", "rank": 0, "score": 358673.0200811331 }, { "content": " #[allow(unused_variables)]\n\n pub trait BuildEntityPart: Sized + Any + Sync + Send {\n\n fn create_archetype(\n\n &mut self,\n\n core: &mut CoreStorage,\n\n archetype_idx: ArchetypeIdx,\n\n ) -> ArchetypeIdx {\n\n archetype_idx\n\n }\n\n\n\n fn add_components(&mut self, core: &mut CoreStorage, location: EntityLocation) {}\n\n\n\n fn create_other_entities(self, core: &mut CoreStorage, parent_idx: Option<EntityIdx>) {}\n\n }\n\n\n\n impl BuildEntityPart for () {}\n\n\n\n pub struct MainComponentPart<E> {\n\n pub(super) component_part: ComponentPart<E, E>,\n\n }\n\n\n", "file_path": "crates/modor/src/entities.rs", "rank": 1, "score": 317255.0568685724 }, { "content": "struct EntityWithNotRegisteredComponentTypeDeleted;\n\n\n\n#[entity]\n\nimpl EntityWithNotRegisteredComponentTypeDeleted {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).inherit_from(Parent::build(id))\n\n }\n\n\n\n #[run]\n\n fn delete_component(entity: Entity<'_>, mut world: World<'_>) {\n\n world.delete_component::<i64>(entity.id());\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 5, "score": 277518.24639240355 }, { "content": "trait ComponentArchetypeLock: Any + Sync + Send {\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any;\n\n\n\n fn move_component(&mut self, src_location: EntityLocation, dst_archetype_idx: ArchetypeIdx);\n\n\n\n fn delete_component(&mut self, location: EntityLocation);\n\n}\n\n\n\npub(crate) type ComponentArchetypes<C> = TiVec<ArchetypeIdx, TiVec<ArchetypeEntityPos, C>>;\n\n\n\nimpl<C> ComponentArchetypeLock for RwLock<ComponentArchetypes<C>>\n\nwhere\n\n C: Any + Sync + Send,\n\n{\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n", "file_path": "crates/modor/src/storages/components.rs", "rank": 7, "score": 265250.1218157621 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn use_single_mut() {\n\n let mut app: TestApp = App::new().with_entity(Number::build(10)).into();\n\n let tester1_id = app.create_entity(Tester::build());\n\n let tester2_id = app.create_entity(Tester::build());\n\n app.update();\n\n app.assert_entity(tester1_id).has(|t: &Tester| {\n\n assert!(t.done_existing);\n\n assert!(!t.done_missing);\n\n });\n\n app.assert_entity(tester2_id).has(|t: &Tester| {\n\n assert!(t.done_existing);\n\n assert!(!t.done_missing);\n\n });\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/singletons_mut.rs", "rank": 9, "score": 251070.33031222466 }, { "content": " pub trait SealedEntityType {}\n\n\n", "file_path": "crates/modor/src/entities.rs", "rank": 10, "score": 250548.92122435852 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn iterate_on_component_reference() {\n\n let mut app: TestApp = App::new()\n\n .with_entity(QueryTester::build())\n\n .with_entity(StreamCollector::build())\n\n .with_entity(Number::build(1))\n\n .with_entity(OtherNumber::build(10))\n\n .with_entity(Number::build(2))\n\n .with_entity(Number::build_without_value())\n\n .with_entity(Number::build_with_additional_component(3))\n\n .into();\n\n app.update();\n\n app.assert_singleton::<StreamCollector>()\n\n .has(|c: &StreamCollector| assert_eq!(c.0, [1, 2, 3]));\n\n app.assert_singleton::<QueryTester>()\n\n .has(|c: &QueryTester| assert!(c.done));\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/components_mut.rs", "rank": 11, "score": 250097.36339998594 }, { "content": "#[doc(hidden)]\n\npub trait EntityType: Any + SealedEntityType {}\n\n\n\n#[doc(hidden)]\n\npub struct NotSingleton;\n\n\n\nimpl SealedEntityType for NotSingleton {}\n\n\n\nimpl EntityType for NotSingleton {}\n\n\n\n#[doc(hidden)]\n\npub struct Singleton;\n\n\n\nimpl SealedEntityType for Singleton {}\n\n\n\nimpl EntityType for Singleton {}\n\n\n", "file_path": "crates/modor/src/entities.rs", "rank": 12, "score": 249962.53644646608 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn use_single_mut() {\n\n let mut app: TestApp = App::new().with_entity(Number::build(10)).into();\n\n let tester1_id = app.create_entity(Tester::build());\n\n let tester2_id = app.create_entity(Tester::build());\n\n app.update();\n\n app.assert_entity(tester1_id).has(|t: &Tester| {\n\n assert!(t.done_existing);\n\n assert!(t.done_missing);\n\n });\n\n app.assert_entity(tester2_id).has(|t: &Tester| {\n\n assert!(t.done_existing);\n\n assert!(t.done_missing);\n\n });\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_singletons_mut.rs", "rank": 13, "score": 247963.6029995212 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn iterate_on_component_reference() {\n\n let mut app: TestApp = App::new()\n\n .with_entity(QueryTester::build())\n\n .with_entity(StreamCollector::build())\n\n .with_entity(Number::build(1))\n\n .with_entity(OtherNumber::build(10))\n\n .with_entity(Number::build(2))\n\n .with_entity(Number::build_without_value())\n\n .with_entity(Number::build_with_additional_component(3))\n\n .into();\n\n app.update();\n\n app.assert_singleton::<StreamCollector>()\n\n .has(|c: &StreamCollector| assert_eq!(c.0, [None, Some(1), Some(2), Some(3)]));\n\n app.assert_singleton::<QueryTester>()\n\n .has(|c: &QueryTester| assert!(c.done));\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_components_mut.rs", "rank": 14, "score": 247001.76000270754 }, { "content": "struct SingletonWithComponentAdded;\n\n\n\n#[singleton]\n\nimpl SingletonWithComponentAdded {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).inherit_from(Parent::build(id))\n\n }\n\n\n\n #[run]\n\n fn add_component(parent: &Parent, entity: Entity<'_>, mut world: World<'_>) {\n\n world.add_component(entity.id(), format!(\"id: {}\", parent.0));\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 15, "score": 243724.26621133042 }, { "content": "struct QueryTester {\n\n done: bool,\n\n}\n\n\n\n#[singleton]\n\nimpl QueryTester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self { done: false })\n\n }\n\n\n\n #[run]\n\n fn run(&mut self, mut query: Query<'_, &mut Value, With<Number>>) {\n\n assert_iter(query.iter().map(|v| v.0), [1, 2, 3]);\n\n assert_iter(query.iter_mut().map(|v| v.0), [1, 2, 3]);\n\n assert_iter(query.iter().rev().map(|v| v.0), [3, 2, 1]);\n\n assert_iter(query.iter_mut().rev().map(|v| v.0), [3, 2, 1]);\n\n assert_eq!(query.get(10).map(|v| v.0), None);\n\n assert_eq!(query.get_mut(10).map(|v| v.0), None);\n\n assert_eq!(query.get(5).map(|v| v.0), None);\n\n assert_eq!(query.get_mut(5).map(|v| v.0), None);\n", "file_path": "crates/modor/tests/integration/system_params/components_mut.rs", "rank": 16, "score": 243516.9008772845 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn iterate_on_component_reference() {\n\n let mut app: TestApp = App::new()\n\n .with_entity(QueryTester::build())\n\n .with_entity(StreamCollector::build())\n\n .with_entity(Number::build(1))\n\n .with_entity(OtherNumber::build(10))\n\n .with_entity(Number::build(2))\n\n .with_entity(Number::build_without_value())\n\n .with_entity(Number::build_with_additional_component(3))\n\n .into();\n\n app.update();\n\n app.assert_singleton::<StreamCollector>()\n\n .has(|c: &StreamCollector| assert_eq!(c.0, [5, 2, 4, 6]));\n\n app.assert_singleton::<QueryTester>()\n\n .has(|c: &QueryTester| assert!(c.done));\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/entity.rs", "rank": 17, "score": 242097.0344327707 }, { "content": "#[allow(missing_docs)]\n\n#[proc_macro_attribute]\n\n#[proc_macro_error::proc_macro_error]\n\npub fn singleton(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n implement_entity_main_component(item, true)\n\n}\n\n\n", "file_path": "crates/modor_derive/src/lib.rs", "rank": 18, "score": 240408.10445341078 }, { "content": "struct UnregisteredSingletonWithComponentAdded;\n\n\n\n#[singleton]\n\nimpl UnregisteredSingletonWithComponentAdded {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .with(SingletonWithComponentAdded)\n\n .inherit_from(Parent::build(id))\n\n }\n\n\n\n #[run]\n\n fn add_component(parent: &Parent, entity: Entity<'_>, mut world: World<'_>) {\n\n world.add_component(entity.id(), format!(\"id: {}\", parent.0));\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 19, "score": 240376.30723187068 }, { "content": "struct QueryTester {\n\n done: bool,\n\n}\n\n\n\n#[singleton]\n\nimpl QueryTester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self { done: false })\n\n }\n\n\n\n #[run]\n\n fn collect(&mut self, mut query: Query<'_, Option<&mut Value>, With<Number>>) {\n\n let values = [None, Some(1), Some(2), Some(3)];\n\n assert_iter(query.iter().map(|v| v.map(|v| v.0)), values);\n\n assert_iter(query.iter_mut().map(|v| v.map(|v| v.0)), values);\n\n let rev_values = [Some(3), Some(2), Some(1), None];\n\n assert_iter(query.iter().rev().map(|v| v.map(|v| v.0)), rev_values);\n\n assert_iter(query.iter_mut().rev().map(|v| v.map(|v| v.0)), rev_values);\n\n assert_eq!(query.get(10).map(|v| v.map(|v| v.0)), None);\n\n assert_eq!(query.get_mut(10).map(|v| v.map(|v| v.0)), None);\n", "file_path": "crates/modor/tests/integration/system_params/optional_components_mut.rs", "rank": 20, "score": 240172.22921345974 }, { "content": "/// A trait implemented for all valid filters that can be applied to a [`Query`](crate::Query).\n\npub trait QueryFilter: 'static {\n\n #[doc(hidden)]\n\n fn register(core: &mut CoreStorage);\n\n\n\n #[doc(hidden)]\n\n fn filtered_component_type_idxs(data: SystemData<'_>) -> Vec<ComponentTypeIdx>;\n\n}\n\n\n\n/// A filter for restricting a [`Query`](crate::Query) to entities containing an component\n\n/// of type `C`.\n\n///\n\n/// You can group multiple `With` in a tuple to restrict according to multiple component types.<br>\n\n/// A maximum of 10 filters is supported in tuples.\n\n/// If you need more filters for a query, you can use nested tuples.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// # use modor::{Query, With, Entity};\n\n/// #\n", "file_path": "crates/modor/src/system_params/queries.rs", "rank": 21, "score": 239968.32836083343 }, { "content": "struct EntityWithMissingComponentDeleted;\n\n\n\n#[entity]\n\nimpl EntityWithMissingComponentDeleted {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).inherit_from(Parent::build(id))\n\n }\n\n\n\n #[run]\n\n fn delete_component(entity: Entity<'_>, mut world: World<'_>) {\n\n world.delete_component::<String>(entity.id());\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 23, "score": 238911.94420784048 }, { "content": "struct EntityWithExistingComponentAdded;\n\n\n\n#[entity]\n\nimpl EntityWithExistingComponentAdded {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .with(String::from(\"empty\"))\n\n .inherit_from(Parent::build(id))\n\n }\n\n\n\n #[run]\n\n fn add_component(parent: &Parent, entity: Entity<'_>, mut world: World<'_>) {\n\n world.add_component(entity.id(), format!(\"id: {}\", parent.0));\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 24, "score": 238911.94420784048 }, { "content": "struct EntityWithMissingComponentAdded;\n\n\n\n#[entity]\n\nimpl EntityWithMissingComponentAdded {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).inherit_from(Parent::build(id))\n\n }\n\n\n\n #[run]\n\n fn add_component(parent: &Parent, entity: Entity<'_>, mut world: World<'_>) {\n\n world.add_component(entity.id(), format!(\"id: {}\", parent.0));\n\n world.add_component(101, format!(\"id: {}\", parent.0)); // not existing entity\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 25, "score": 238911.94420784048 }, { "content": "struct EntityWithExistingComponentDeleted;\n\n\n\n#[entity]\n\nimpl EntityWithExistingComponentDeleted {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .inherit_from(Parent::build(id))\n\n .with(String::from(\"existing\"))\n\n }\n\n\n\n #[run]\n\n fn delete_component(entity: Entity<'_>, mut world: World<'_>) {\n\n world.delete_component::<String>(entity.id());\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 26, "score": 238911.94420784048 }, { "content": "struct StreamCollector(Vec<u32>);\n\n\n\n#[singleton]\n\nimpl StreamCollector {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self(vec![]))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/components_mut.rs", "rank": 27, "score": 231883.7591620156 }, { "content": " pub trait SystemParamWithMutabilityIssue<Z> {}\n\n\n\n macro_rules! impl_system_param_with_mutability_issue {\n\n (($param:ident, $index:tt) $(,($params:ident, $indexes:tt))*) => {\n\n impl<P, $param, $($params,)* Z>\n\n SystemParamWithMutabilityIssue<((), Z, ($param, $($params),*))>\n\n for P\n\n where\n\n P: SystemParam<InnerTuple = ($param, $($params),*)>,\n\n $param: IncompatibleSystemParam<($($params,)*), Z>,\n\n {\n\n }\n\n\n\n impl<P, $param, $($params,)* Z>\n\n SystemParamWithMutabilityIssue<(((),), Z, ($param, $($params),*))>\n\n for P\n\n where\n\n P: SystemParam<InnerTuple = ($param, $($params),*)>,\n\n $param: SystemParamWithMutabilityIssue<Z>,\n\n {\n", "file_path": "crates/modor/src/system_checks.rs", "rank": 28, "score": 227360.16707325578 }, { "content": "fn implement_entity_main_component(item: TokenStream, is_singleton: bool) -> TokenStream {\n\n let item = parse_macro_input!(item as ItemImpl);\n\n let crate_ident = crate_name::find_crate_ident(item.span());\n\n let cleaned_block = impl_block::clean(&item);\n\n let type_name = &item.self_ty;\n\n let entity_type = if is_singleton {\n\n quote!(#crate_ident::Singleton)\n\n } else {\n\n quote!(#crate_ident::NotSingleton)\n\n };\n\n let update_statement = systems::generate_update_statement(&item);\n\n let output = quote! {\n\n #cleaned_block\n\n\n\n impl #crate_ident::EntityMainComponent for #type_name {\n\n type Type = #entity_type;\n\n\n\n fn on_update(runner: #crate_ident::SystemRunner<'_>) -> #crate_ident::SystemRunner<'_> {\n\n #update_statement\n\n }\n\n }\n\n };\n\n output.into()\n\n}\n", "file_path": "crates/modor_derive/src/lib.rs", "rank": 29, "score": 225718.5879836877 }, { "content": "fn assert_iter<T, E, I1, I2>(mut actual: I1, expected: E)\n\nwhere\n\n T: PartialEq + Debug,\n\n I1: Iterator<Item = T> + ExactSizeIterator,\n\n I2: ExactSizeIterator + Iterator<Item = T>,\n\n E: IntoIterator<Item = T, IntoIter = I2>,\n\n{\n\n let expected_iter = expected.into_iter();\n\n let expected_len = expected_iter.len();\n\n for (pos, expected_item) in expected_iter.enumerate() {\n\n assert_eq!(\n\n actual.len(),\n\n expected_len - pos,\n\n \"wrong size at position {}\",\n\n pos\n\n );\n\n assert_eq!(\n\n actual.next(),\n\n Some(expected_item),\n\n \"wrong item at position {}\",\n", "file_path": "crates/modor/tests/integration/system_params/mod.rs", "rank": 30, "score": 224490.8087935304 }, { "content": "struct StreamCollector(Vec<Option<u32>>);\n\n\n\n#[singleton]\n\nimpl StreamCollector {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self(vec![]))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_components_mut.rs", "rank": 31, "score": 223660.91549641924 }, { "content": "fn system_call_iter(impl_block: &ItemImpl) -> impl Iterator<Item = TokenStream> + '_ {\n\n impl_block\n\n .items\n\n .iter()\n\n .filter_map(|i| {\n\n if let ImplItem::Method(method) = i {\n\n let attributes = supported_attributes(&method.attrs);\n\n return match attributes.len().cmp(&1) {\n\n Ordering::Equal => Some(generate_system_call(method, &attributes[0])),\n\n Ordering::Less => None,\n\n Ordering::Greater => {\n\n emit_error!(attributes[1].span(), \"found more than one `run*` attribute\");\n\n None\n\n }\n\n };\n\n }\n\n None\n\n })\n\n .flatten()\n\n}\n\n\n", "file_path": "crates/modor_derive/src/systems.rs", "rank": 32, "score": 222529.34907278695 }, { "content": "#[allow(missing_docs)]\n\n#[proc_macro_attribute]\n\n#[proc_macro_error::proc_macro_error]\n\npub fn entity(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n implement_entity_main_component(item, false)\n\n}\n\n\n", "file_path": "crates/modor_derive/src/lib.rs", "rank": 33, "score": 216063.82971345284 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn use_world() {\n\n let mut app = TestApp::new();\n\n let entity10_id = app.create_entity(EntityToDelete::build(10));\n\n let entity11_id = app.create_entity(ParentEntityToDelete::build(11));\n\n let entity12_id = app.create_entity(ParentOfEntityToDelete::build(12));\n\n let entity20_id = app.create_entity(EntityWithMissingComponentAdded::build(20));\n\n let entity21_id = app.create_entity(EntityWithExistingComponentAdded::build(21));\n\n let entity22_id = app.create_entity(SingletonWithComponentAdded::build(22));\n\n let entity23_id = app.create_entity(UnregisteredSingletonWithComponentAdded::build(23));\n\n let entity30_id = app.create_entity(EntityWithExistingComponentDeleted::build(30));\n\n let entity31_id = app.create_entity(EntityWithExistingComponentDeleted::build(31));\n\n let entity40_id = app.create_entity(EntityWithMissingComponentDeleted::build(40));\n\n let entity50_id = app.create_entity(EntityWithNotRegisteredComponentTypeDeleted::build(50));\n\n let entity60_id = app.create_entity(EntityWithAddedChild::build(60));\n\n app.update();\n\n app.assert_entity(entity10_id).does_not_exist();\n\n app.assert_entity(entity11_id).does_not_exist();\n\n app.assert_entity(entity11_id + 1).does_not_exist();\n\n app.assert_entity(entity12_id)\n\n .has_children(|c| assert_eq!(c, []));\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 34, "score": 214603.5922636293 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn use_query() {\n\n let mut app: TestApp = App::new()\n\n .with_entity(Tester::build())\n\n .with_entity(Level1::build(10, 20))\n\n .with_entity(Level1::build(30, 40))\n\n .into();\n\n app.update();\n\n app.assert_singleton::<Tester>()\n\n .has(|t: &Tester| assert_eq!(t.done_count, 7));\n\n}\n", "file_path": "crates/modor/tests/integration/system_params/queries.rs", "rank": 35, "score": 214527.87714264606 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn iteration_on_tuple() {\n\n let mut app: TestApp = App::new().with_entity(QueryTester::build()).into();\n\n let values1_id = app.create_entity(Values::build(true, true));\n\n let values2_id = app.create_entity(Values::build(true, false));\n\n let values3_id = app.create_entity(Values::build(false, true));\n\n app.update();\n\n app.assert_entity(values1_id).has(|v: &Values| {\n\n assert!(v.empty_done);\n\n assert!(v.one_item_done);\n\n assert!(v.two_item_done);\n\n assert!(v.more_than_two_item_done);\n\n });\n\n app.assert_entity(values2_id).has(|v: &Values| {\n\n assert!(v.empty_done);\n\n assert!(v.one_item_done);\n\n assert!(v.two_item_done);\n\n assert!(!v.more_than_two_item_done);\n\n });\n\n app.assert_entity(values3_id).has(|v: &Values| {\n\n assert!(v.empty_done);\n", "file_path": "crates/modor/tests/integration/system_params/tuples.rs", "rank": 36, "score": 214433.14551231707 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn use_entity() {\n\n let mut app: TestApp = App::new().with_entity(Parent::build()).into();\n\n app.update();\n\n app.assert_singleton::<Parent>()\n\n .has(|p: &Parent| assert!(p.done));\n\n}\n", "file_path": "crates/modor/tests/integration/system_params/entity.rs", "rank": 37, "score": 212425.73519363673 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn iterate_on_component_reference() {\n\n let mut app: TestApp = App::new()\n\n .with_entity(QueryTester::build())\n\n .with_entity(StreamCollector::build())\n\n .with_entity(Number::build(1))\n\n .with_entity(OtherNumber::build(10))\n\n .with_entity(Number::build(2))\n\n .with_entity(Number::build_without_value())\n\n .with_entity(Number::build_with_additional_component(3))\n\n .into();\n\n app.update();\n\n app.assert_singleton::<StreamCollector>()\n\n .has(|c: &StreamCollector| assert_eq!(c.0, [1, 2, 3]));\n\n app.assert_singleton::<QueryTester>()\n\n .has(|c: &QueryTester| assert!(c.done));\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/components.rs", "rank": 38, "score": 210661.47877995876 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn iterate_on_component_reference() {\n\n let mut app: TestApp = App::new()\n\n .with_entity(QueryTester::build())\n\n .with_entity(StreamCollector::build())\n\n .with_entity(Number::build(1))\n\n .with_entity(OtherNumber::build(10))\n\n .with_entity(Number::build(2))\n\n .with_entity(Number::build_without_value())\n\n .with_entity(Number::build_with_additional_component(3))\n\n .into();\n\n app.update();\n\n app.assert_singleton::<StreamCollector>()\n\n .has(|c: &StreamCollector| assert_eq!(c.0, [None, Some(1), Some(2), Some(3)]));\n\n app.assert_singleton::<QueryTester>()\n\n .has(|c: &QueryTester| assert!(c.done));\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_components.rs", "rank": 39, "score": 208148.0510490289 }, { "content": "/// A trait implemented for any system with mutability issue.\n\n///\n\n/// There is a mutability issue when two parameters of the system lock the same resource of the\n\n/// engine, and at least one of them locks the resource mutably\n\n/// (e.g. there are two parameters of type `&C` and `&mut C`).\n\n///\n\n/// This trait is used by the [`entity`](macro@crate::entity) and\n\n/// [`singleton`](macro@crate::singleton) proc macros to detect invalid systems.\n\npub trait SystemWithParamMutabilityIssue<S, Z>: Sized + SealedChecker {\n\n // coverage: off (method only used for compile time checking)\n\n #[doc(hidden)]\n\n #[must_use]\n\n fn check_param_mutability(self) -> Self {\n\n self\n\n }\n\n // coverage: on\n\n}\n\n\n\nimpl<S, P, Z> SystemWithParamMutabilityIssue<S, Z> for SystemParamMutabilityChecker<S, P>\n\nwhere\n\n S: System<P>,\n\n P: SystemParam + SystemParamWithMutabilityIssue<Z>,\n\n{\n\n}\n\n\n\nmod internal {\n\n use crate::system_params::internal::{Const, LockableSystemParam, Mut};\n\n use crate::SystemParam;\n\n\n", "file_path": "crates/modor/src/system_checks.rs", "rank": 40, "score": 205780.76092154946 }, { "content": "struct QueryTester {\n\n empty_done: bool,\n\n one_item_done: bool,\n\n two_item_done: bool,\n\n more_than_two_item_done: bool,\n\n}\n\n\n\n#[singleton]\n\nimpl QueryTester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self {\n\n empty_done: false,\n\n one_item_done: false,\n\n two_item_done: false,\n\n more_than_two_item_done: false,\n\n })\n\n }\n\n\n\n #[run]\n\n fn run_empty(&mut self, mut query: Query<'_, (), With<Values>>) {\n", "file_path": "crates/modor/tests/integration/system_params/tuples.rs", "rank": 41, "score": 204612.27892841163 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn use_single() {\n\n let mut app: TestApp = App::new().with_entity(Number::build(10)).into();\n\n let tester1_id = app.create_entity(Tester::build());\n\n let tester2_id = app.create_entity(Tester::build());\n\n app.update();\n\n app.assert_entity(tester1_id).has(|t: &Tester| {\n\n assert!(t.done_existing);\n\n assert!(!t.done_missing);\n\n });\n\n app.assert_entity(tester2_id).has(|t: &Tester| {\n\n assert!(t.done_existing);\n\n assert!(!t.done_missing);\n\n });\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/singletons.rs", "rank": 42, "score": 204435.56583503782 }, { "content": "struct Tester {\n\n done_existing: bool,\n\n done_missing: bool,\n\n}\n\n\n\n#[entity]\n\nimpl Tester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self {\n\n done_existing: false,\n\n done_missing: false,\n\n })\n\n }\n\n\n\n #[run]\n\n fn run_existing(&mut self, number: SingleMut<'_, Number>) {\n\n assert_eq!(number.0, 10);\n\n assert_eq!(number.entity().id(), 0);\n\n self.done_existing = true;\n\n #[cfg(not(target_arch = \"wasm32\"))]\n", "file_path": "crates/modor/tests/integration/system_params/singletons_mut.rs", "rank": 43, "score": 204022.80247593747 }, { "content": "struct QueryTester {\n\n done: bool,\n\n}\n\n\n\n#[singleton]\n\nimpl QueryTester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self { done: false })\n\n }\n\n\n\n #[run]\n\n fn run(&mut self, mut query: Query<'_, &Value, With<Number>>) {\n\n assert_iter(query.iter().map(|v| v.0), [1, 2, 3]);\n\n assert_iter(query.iter_mut().map(|v| v.0), [1, 2, 3]);\n\n assert_iter(query.iter().rev().map(|v| v.0), [3, 2, 1]);\n\n assert_iter(query.iter_mut().rev().map(|v| v.0), [3, 2, 1]);\n\n assert_eq!(query.get(10).map(|v| v.0), None);\n\n assert_eq!(query.get_mut(10).map(|v| v.0), None);\n\n assert_eq!(query.get(5).map(|v| v.0), None);\n\n assert_eq!(query.get_mut(5).map(|v| v.0), None);\n", "file_path": "crates/modor/tests/integration/system_params/components.rs", "rank": 44, "score": 203648.2012754781 }, { "content": "struct Number;\n\n\n\n#[entity]\n\nimpl Number {\n\n fn build(value: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).with(Value(value))\n\n }\n\n\n\n fn build_without_value() -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n }\n\n\n\n fn build_with_additional_component(value: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .with(Value(value))\n\n .with(String::from(\"other\"))\n\n }\n\n\n\n #[run]\n\n fn collect(value: &mut Value, mut collector: SingleMut<'_, StreamCollector>) {\n\n collector.0.push(value.0);\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n spin_sleep::sleep(std::time::Duration::from_millis(50));\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/components_mut.rs", "rank": 45, "score": 203297.50585411122 }, { "content": "struct OtherNumber;\n\n\n\n#[entity]\n\nimpl OtherNumber {\n\n fn build(value: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).with(Value(value))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/components_mut.rs", "rank": 46, "score": 203297.50585411122 }, { "content": "struct Singleton;\n\n\n\n#[singleton]\n\nimpl Singleton {}\n\n\n\nassert_impl_all!(App: Send, Unpin);\n\nassert_impl_all!(DependsOn<Action>: Sync, Send, UnwindSafe, RefUnwindSafe, Unpin);\n\nassert_impl_all!(ChildBuilder<'_>: Send, Unpin);\n\nassert_impl_all!(Entity<'_>: Sync, Send, Unpin);\n\nassert_impl_all!(EntityBuilder<Singleton, (), ()>: Sync, Send, UnwindSafe, RefUnwindSafe, Unpin);\n\nassert_impl_all!(Query<'_, ()>: Sync, Send, Unpin);\n\nassert_impl_all!(Single<'_, Singleton>: Sync, Send, Unpin);\n\nassert_impl_all!(SingleMut<'_, Singleton>: Sync, Send, Unpin);\n\nassert_impl_all!(With<u32>: Sync, Send, UnwindSafe, RefUnwindSafe, Unpin);\n\nassert_impl_all!(World<'_>: Sync, Send, Unpin);\n", "file_path": "crates/modor/tests/compilation/trait_implementations.rs", "rank": 47, "score": 203280.36546657648 }, { "content": "#[allow(clippy::wildcard_enum_match_arm, clippy::cast_possible_truncation)]\n\npub fn runner(mut app: App) {\n\n configure_logging();\n\n #[cfg(not(target_os = \"android\"))]\n\n let mut gilrs = init_gamepads(&mut app);\n\n let event_loop = EventLoop::new();\n\n let mut window = None;\n\n app.run_for_singleton(|i: &mut WindowInit| window = Some(i.create_window(&event_loop)));\n\n let window = window.expect(\"`GraphicsModule` entity not found or created in windowless mode\");\n\n let mut previous_update_end = Instant::now();\n\n event_loop.run(move |event, _, control_flow| match event {\n\n Event::Resumed => {\n\n app.run_for_singleton(|w: &mut WindowInit| w.create_renderer(&window));\n\n app.run_for_singleton(|w: &mut Window| w.update_renderer(&window));\n\n }\n\n Event::MainEventsCleared => window.request_redraw(),\n\n Event::RedrawRequested(window_id) if window_id == window.id() => {\n\n let mut frame_rate = FrameRate::Unlimited;\n\n app.run_for_singleton(|i: &mut FrameRateLimit| frame_rate = i.get());\n\n app.run_for_singleton(|w: &mut Window| {\n\n let size = window.inner_size();\n", "file_path": "crates/modor_graphics/src/runner.rs", "rank": 48, "score": 203017.9326301192 }, { "content": "struct EntityToDelete;\n\n\n\n#[entity]\n\nimpl EntityToDelete {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).inherit_from(Parent::build(id))\n\n }\n\n\n\n #[run]\n\n fn delete(entity: Entity<'_>, mut world: World<'_>) {\n\n world.delete_entity(entity.id());\n\n world.delete_entity(100); // not existing entity\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 49, "score": 202922.92051747488 }, { "content": "struct QueryTester {\n\n done: bool,\n\n}\n\n\n\n#[singleton]\n\nimpl QueryTester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self { done: false })\n\n }\n\n\n\n #[run]\n\n fn collect(&mut self, mut query: Query<'_, Entity<'_>, With<Number>>) {\n\n assert_iter(query.iter().map(Entity::id), [5, 2, 4, 6]);\n\n assert_iter(query.iter_mut().map(Entity::id), [5, 2, 4, 6]);\n\n assert_iter(query.iter().rev().map(Entity::id), [6, 4, 2, 5]);\n\n assert_iter(query.iter_mut().rev().map(Entity::id), [6, 4, 2, 5]);\n\n assert_eq!(query.get(10).map(Entity::id), None);\n\n assert_eq!(query.get_mut(10).map(Entity::id), None);\n\n assert_eq!(query.get(3).map(Entity::id), None);\n\n assert_eq!(query.get_mut(3).map(Entity::id), None);\n", "file_path": "crates/modor/tests/integration/system_params/entity.rs", "rank": 50, "score": 202861.1862043603 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn use_single() {\n\n let mut app: TestApp = App::new().with_entity(Number::build(10)).into();\n\n let tester1_id = app.create_entity(Tester::build());\n\n let tester2_id = app.create_entity(Tester::build());\n\n app.update();\n\n app.assert_entity(tester1_id).has(|t: &Tester| {\n\n assert!(t.done_existing);\n\n assert!(t.done_missing);\n\n });\n\n app.assert_entity(tester2_id).has(|t: &Tester| {\n\n assert!(t.done_existing);\n\n assert!(t.done_missing);\n\n });\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_singletons.rs", "rank": 51, "score": 201664.9248547145 }, { "content": "struct Tester {\n\n done_existing: bool,\n\n done_missing: bool,\n\n}\n\n\n\n#[entity]\n\nimpl Tester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self {\n\n done_existing: false,\n\n done_missing: false,\n\n })\n\n }\n\n\n\n #[run]\n\n fn run_existing(&mut self, number: Option<SingleMut<'_, Number>>) {\n\n assert_eq!(number.map(|n| n.0), Some(10));\n\n self.done_existing = true;\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n spin_sleep::sleep(std::time::Duration::from_millis(100));\n", "file_path": "crates/modor/tests/integration/system_params/optional_singletons_mut.rs", "rank": 52, "score": 201260.43054549574 }, { "content": "struct Other(u32);\n\n\n\n#[singleton]\n\nimpl Other {}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/singletons_mut.rs", "rank": 53, "score": 201179.9036532686 }, { "content": " pub trait BuildEntity: BuildEntityPart {\n\n fn build(self, core: &mut CoreStorage, parent_idx: Option<EntityIdx>) -> EntityIdx;\n\n }\n\n}\n", "file_path": "crates/modor/src/entities.rs", "rank": 54, "score": 201126.78726618222 }, { "content": "struct QueryTester {\n\n done: bool,\n\n}\n\n\n\n#[singleton]\n\nimpl QueryTester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self { done: false })\n\n }\n\n\n\n #[run]\n\n fn collect(&mut self, mut query: Query<'_, Option<&Value>, With<Number>>) {\n\n let values = [None, Some(1), Some(2), Some(3)];\n\n assert_iter(query.iter().map(|v| v.map(|v| v.0)), values);\n\n assert_iter(query.iter_mut().map(|v| v.map(|v| v.0)), values);\n\n let rev_values = [Some(3), Some(2), Some(1), None];\n\n assert_iter(query.iter().rev().map(|v| v.map(|v| v.0)), rev_values);\n\n assert_iter(query.iter_mut().rev().map(|v| v.map(|v| v.0)), rev_values);\n\n assert_eq!(query.get(10).map(|v| v.map(|v| v.0)), None);\n\n assert_eq!(query.get_mut(10).map(|v| v.map(|v| v.0)), None);\n", "file_path": "crates/modor/tests/integration/system_params/optional_components.rs", "rank": 55, "score": 200891.86347057685 }, { "content": "struct OtherNumber;\n\n\n\n#[entity]\n\nimpl OtherNumber {\n\n fn build(value: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).with(Value(value))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_components_mut.rs", "rank": 56, "score": 200546.81709730814 }, { "content": "struct Number;\n\n\n\n#[entity]\n\nimpl Number {\n\n fn build(value: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).with(Value(value))\n\n }\n\n\n\n fn build_without_value() -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n }\n\n\n\n fn build_with_additional_component(value: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .with(Value(value))\n\n .with(String::from(\"other\"))\n\n }\n\n\n\n #[run]\n\n fn collect(value: Option<&mut Value>, mut collector: SingleMut<'_, StreamCollector>) {\n\n collector.0.push(value.map(|v| v.0));\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n spin_sleep::sleep(std::time::Duration::from_millis(50));\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_components_mut.rs", "rank": 57, "score": 200546.81709730814 }, { "content": "struct EntityWithAddedChild;\n\n\n\n#[entity]\n\nimpl EntityWithAddedChild {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self).inherit_from(Parent::build(id))\n\n }\n\n\n\n #[run]\n\n fn create_root_entity(mut world: World<'_>) {\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n spin_sleep::sleep(std::time::Duration::from_millis(100));\n\n world.create_root_entity(NewRootEntity::build(80));\n\n }\n\n\n\n #[run]\n\n fn create_child_entity(entity: Entity<'_>, mut world: World<'_>) {\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n spin_sleep::sleep(std::time::Duration::from_millis(100));\n\n world.create_child_entity(entity.id(), NewChildEntity::build(70));\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 58, "score": 200178.2656306758 }, { "content": "struct ParentOfEntityToDelete;\n\n\n\n#[entity]\n\nimpl ParentOfEntityToDelete {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .inherit_from(Parent::build(id))\n\n .with_child(EntityToDelete::build(id))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 59, "score": 200178.2656306758 }, { "content": "struct ParentEntityToDelete;\n\n\n\n#[entity]\n\nimpl ParentEntityToDelete {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .inherit_from(Parent::build(id))\n\n .with_child(DeletedChild::build())\n\n }\n\n\n\n #[run]\n\n fn delete(entity: Entity<'_>, mut world: World<'_>) {\n\n world.delete_entity(entity.id());\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 60, "score": 200178.2656306758 }, { "content": "struct SingletonEntity;\n\n\n\n#[singleton]\n\nimpl SingletonEntity {}\n\n\n", "file_path": "crates/modor/tests/compilation/compile_fail/systems_with_mutability_issue.rs", "rank": 61, "score": 199301.91064567634 }, { "content": "#[test]\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nfn run_systems_in_parallel() {\n\n let mut app: TestApp = App::new()\n\n .with_thread_count(2)\n\n .with_entity(Number::build(10))\n\n .with_entity(Tester::build())\n\n .into();\n\n let start = instant::Instant::now();\n\n app.update();\n\n assert!(instant::Instant::now() - start > std::time::Duration::from_millis(200));\n\n}\n", "file_path": "crates/modor/tests/integration/system_params/singletons_mut.rs", "rank": 62, "score": 198464.84700922086 }, { "content": "struct Number(u32);\n\n\n\n#[singleton]\n\nimpl Number {\n\n fn build(value: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self(value))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/singletons_mut.rs", "rank": 63, "score": 198417.53172282685 }, { "content": "struct Other(u32);\n\n\n\n#[singleton]\n\nimpl Other {}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_singletons_mut.rs", "rank": 64, "score": 198417.53172282685 }, { "content": "#[test]\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nfn run_systems_in_parallel() {\n\n let mut app: TestApp = App::new()\n\n .with_thread_count(2)\n\n .with_entity(QueryTester::build())\n\n .with_entity(StreamCollector::build())\n\n .with_entity(Number::build(1))\n\n .with_entity(OtherNumber::build(10))\n\n .with_entity(Number::build(2))\n\n .with_entity(Number::build_without_value())\n\n .with_entity(Number::build_with_additional_component(3))\n\n .into();\n\n let start = instant::Instant::now();\n\n app.update();\n\n assert!(instant::Instant::now() - start > std::time::Duration::from_millis(300));\n\n}\n", "file_path": "crates/modor/tests/integration/system_params/components_mut.rs", "rank": 65, "score": 197762.54631326528 }, { "content": "struct Value(u32);\n\n\n", "file_path": "crates/modor/tests/integration/system_params/components_mut.rs", "rank": 66, "score": 197703.91827463926 }, { "content": "#[test]\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nfn run_systems_in_parallel() {\n\n let mut app: TestApp = App::new()\n\n .with_thread_count(2)\n\n .with_entity(Number::build(10))\n\n .with_entity(Tester::build())\n\n .into();\n\n let start = instant::Instant::now();\n\n app.update();\n\n assert!(instant::Instant::now() - start > std::time::Duration::from_millis(200));\n\n}\n", "file_path": "crates/modor/tests/integration/system_params/optional_singletons_mut.rs", "rank": 67, "score": 195873.8552297627 }, { "content": "struct Number(u32);\n\n\n\n#[singleton]\n\nimpl Number {\n\n fn build(value: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self(value))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_singletons_mut.rs", "rank": 68, "score": 195742.74230446084 }, { "content": "struct B(f32);\n", "file_path": "crates/modor/benches/parallel_system_iteration.rs", "rank": 69, "score": 195503.54352379398 }, { "content": "#[test]\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nfn run_systems_in_parallel() {\n\n let mut app: TestApp = App::new()\n\n .with_thread_count(2)\n\n .with_entity(QueryTester::build())\n\n .with_entity(StreamCollector::build())\n\n .with_entity(Number::build(1))\n\n .with_entity(OtherNumber::build(10))\n\n .with_entity(Number::build(2))\n\n .with_entity(Number::build_without_value())\n\n .with_entity(Number::build_with_additional_component(3))\n\n .into();\n\n let start = instant::Instant::now();\n\n app.update();\n\n assert!(instant::Instant::now() - start > std::time::Duration::from_millis(400));\n\n}\n", "file_path": "crates/modor/tests/integration/system_params/optional_components_mut.rs", "rank": 70, "score": 195182.51420633803 }, { "content": "struct Value(u32);\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_components_mut.rs", "rank": 71, "score": 195040.44160850524 }, { "content": "struct NewChildEntity(u32);\n\n\n\n#[entity]\n\nimpl NewChildEntity {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self(id))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 72, "score": 194677.73270473004 }, { "content": "struct NewRootEntity(u32);\n\n\n\n#[entity]\n\nimpl NewRootEntity {\n\n fn build(id: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self(id))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/world.rs", "rank": 73, "score": 194677.73270473004 }, { "content": "/// A trait implemented for all types able to build an entity.\n\n///\n\n/// This trait is particularly useful when defining a building method for an entity.\n\n///\n\n/// # Examples\n\n///\n\n/// See [`EntityBuilder`](crate::EntityBuilder).\n\npub trait Built<E>: BuildEntity\n\nwhere\n\n E: EntityMainComponent,\n\n{\n\n}\n\n\n\n/// A builder for defining the components and children of an entity.\n\n///\n\n/// # Examples\n\n///\n\n///\n\n/// ```rust\n\n/// # use modor::*;\n\n/// #\n\n/// # struct Position(f32, f32);\n\n/// # struct Velocity(f32, f32);\n\n/// # struct Acceleration(f32, f32);\n\n/// #\n\n/// struct Object {\n\n/// name: String,\n", "file_path": "crates/modor/src/entities.rs", "rank": 74, "score": 194304.2412830709 }, { "content": "pub fn set_value<K, V>(vec: &mut TiVec<K, V>, idx: K, value: V)\n\nwhere\n\n usize: From<K>,\n\n K: From<usize>,\n\n V: Default,\n\n{\n\n let idx = usize::from(idx);\n\n (vec.len()..=idx).for_each(|_| vec.push(V::default()));\n\n vec[K::from(idx)] = value;\n\n}\n", "file_path": "crates/modor_internal/src/ti_vec.rs", "rank": 75, "score": 193649.64686567787 }, { "content": "struct Data(f32);\n\n\n\nmacro_rules! create_entities {\n\n ($app:ident; $( $variants:ident ),*) => {\n\n $(\n\n struct $variants(f32);\n\n\n\n #[entity]\n\n impl $variants {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self(0.0)).with(Data(1.0))\n\n }\n\n\n\n #[run]\n\n fn update(data: &mut Data) {\n\n data.0 *= 2.0;\n\n }\n\n }\n\n\n\n for _ in 0..20 {\n\n $app = $app.with_entity($variants::build());\n\n }\n\n )*\n\n };\n\n}\n\n\n", "file_path": "crates/modor/benches/multiple_systems_fragmented_iteration.rs", "rank": 76, "score": 193206.70073722687 }, { "content": "struct Data(f32);\n\n\n\n#[entity]\n\nimpl Data {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self(1.0))\n\n }\n\n\n\n #[run]\n\n fn update(&mut self) {\n\n self.0 *= 2.0;\n\n }\n\n}\n\n\n\nmacro_rules! create_entities {\n\n ($app:ident; $( $variants:ident ),*) => {\n\n $(\n\n struct $variants(f32);\n\n\n\n #[entity]\n", "file_path": "crates/modor/benches/one_system_fragmented_iteration.rs", "rank": 77, "score": 193206.70073722684 }, { "content": "struct StreamCollector(Vec<u32>);\n\n\n\n#[singleton]\n\nimpl StreamCollector {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self(vec![]))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/components.rs", "rank": 78, "score": 193177.59314946522 }, { "content": "struct StreamCollector(Vec<usize>);\n\n\n\n#[singleton]\n\nimpl StreamCollector {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self(vec![]))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/entity.rs", "rank": 79, "score": 192415.5308213906 }, { "content": "fn clean_impl_item(mut item: ImplItem) -> ImplItem {\n\n if let ImplItem::Method(method) = &mut item {\n\n method.attrs = method\n\n .attrs\n\n .clone()\n\n .into_iter()\n\n .filter(|a| attributes::parse_type(a).is_none())\n\n .collect();\n\n }\n\n item\n\n}\n", "file_path": "crates/modor_derive/src/impl_block.rs", "rank": 80, "score": 186694.07496003452 }, { "content": "struct StreamCollector(Vec<Option<u32>>);\n\n\n\n#[singleton]\n\nimpl StreamCollector {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self(vec![]))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/optional_components.rs", "rank": 81, "score": 186049.09528311744 }, { "content": "fn run(c: &mut Criterion) {\n\n c.bench_function(\"entity_creation\", |b| {\n\n b.iter(|| {\n\n let mut app = App::new();\n\n for _ in 0..10_000 {\n\n app = app.with_entity(Object::build());\n\n }\n\n });\n\n });\n\n}\n\n\n\nmod group {\n\n criterion::criterion_group!(benches, super::run);\n\n}\n\ncriterion_main!(group::benches);\n", "file_path": "crates/modor/benches/entity_creation.rs", "rank": 82, "score": 183449.33881400613 }, { "content": "fn run(c: &mut Criterion) {\n\n let mut app = App::new();\n\n for _ in 0..10_000 {\n\n app = app.with_entity(Object::build());\n\n }\n\n c.bench_function(\"simple_system_iteration\", |b| b.iter(|| app.update()));\n\n}\n\n\n\nmod group {\n\n criterion::criterion_group!(benches, super::run);\n\n}\n\ncriterion_main!(group::benches);\n", "file_path": "crates/modor/benches/simple_system_iteration.rs", "rank": 83, "score": 183072.45505232274 }, { "content": "fn run(c: &mut Criterion) {\n\n let mut app = App::new().with_thread_count(3);\n\n for _ in 0..10_000 {\n\n app = app.with_entity(Item1::build());\n\n }\n\n for _ in 0..10_000 {\n\n app = app.with_entity(Item2::build());\n\n }\n\n for _ in 0..10_000 {\n\n app = app.with_entity(Item3::build());\n\n }\n\n for _ in 0..10_000 {\n\n app = app.with_entity(Item4::build());\n\n }\n\n c.bench_function(\"parallel_system_iteration\", |b| b.iter(|| app.update()));\n\n}\n\n\n\nmod group {\n\n criterion::criterion_group!(benches, super::run);\n\n}\n\ncriterion_main!(group::benches);\n", "file_path": "crates/modor/benches/parallel_system_iteration.rs", "rank": 84, "score": 183072.45505232274 }, { "content": "#[allow(clippy::items_after_statements, clippy::cognitive_complexity)]\n\nfn run(c: &mut Criterion) {\n\n let mut app = App::new();\n\n create_entities!(app; A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z);\n\n c.bench_function(\"one_system_fragmented_iteration\", |b| {\n\n b.iter(|| app.update());\n\n });\n\n}\n\n\n\nmod group {\n\n criterion::criterion_group!(benches, super::run);\n\n}\n\ncriterion_main!(group::benches);\n", "file_path": "crates/modor/benches/one_system_fragmented_iteration.rs", "rank": 85, "score": 180978.76200361154 }, { "content": "#[allow(clippy::items_after_statements, clippy::cognitive_complexity)]\n\nfn run(c: &mut Criterion) {\n\n let mut app = App::new();\n\n create_entities!(app; A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z);\n\n c.bench_function(\"multiple_systems_fragmented_iteration\", |b| {\n\n b.iter(|| app.update());\n\n });\n\n}\n\n\n\nmod group {\n\n criterion::criterion_group!(benches, super::run);\n\n}\n\ncriterion_main!(group::benches);\n", "file_path": "crates/modor/benches/multiple_systems_fragmented_iteration.rs", "rank": 86, "score": 180978.76200361154 }, { "content": "#[allow(missing_docs)]\n\n#[proc_macro_attribute]\n\n#[proc_macro_error::proc_macro_error]\n\npub fn action(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let item = parse_macro_input!(item as ItemStruct);\n\n let crate_ident = crate_name::find_crate_ident(item.span());\n\n let type_name = &item.ident;\n\n let actions: Vec<_> = Punctuated::<Ident, Token![,]>::parse_terminated\n\n .parse(attr)\n\n .unwrap_or_abort()\n\n .into_iter()\n\n .collect();\n\n let output = quote! {\n\n #item\n\n\n\n impl #crate_ident::Action for #type_name {\n\n type Constraint = (#(#crate_ident::DependsOn<#actions>,)*);\n\n }\n\n };\n\n output.into()\n\n}\n\n\n", "file_path": "crates/modor_derive/src/lib.rs", "rank": 87, "score": 177857.29734871926 }, { "content": " pub trait IncompatibleSystemParam<P, Z>: Sized {}\n\n\n\n impl<P1, P2, T> IncompatibleSystemParam<P2, ((), T)> for P1\n\n where\n\n P1: LockableSystemParam<LockedType = T, Mutability = Const>,\n\n P2: LockableSystemParam<LockedType = T, Mutability = Mut>,\n\n {\n\n }\n\n\n\n impl<P1, P2, T> IncompatibleSystemParam<P2, ((), T, ())> for P1\n\n where\n\n P1: LockableSystemParam<LockedType = T, Mutability = Mut>,\n\n P2: LockableSystemParam<LockedType = T, Mutability = Const>,\n\n {\n\n }\n\n\n\n impl<P1, P2, T> IncompatibleSystemParam<P2, ((), T, ((),))> for P1\n\n where\n\n P1: LockableSystemParam<LockedType = T, Mutability = Mut>,\n\n P2: LockableSystemParam<LockedType = T, Mutability = Mut>,\n", "file_path": "crates/modor/src/system_checks.rs", "rank": 88, "score": 174678.21617292552 }, { "content": "fn generate_system_call(method: &ImplItemMethod, attribute: &AttributeType) -> Option<TokenStream> {\n\n let crate_ident = crate_name::find_crate_ident(attribute.span());\n\n let system_name = &method.sig.ident;\n\n Some(match attributes::parse(attribute)? {\n\n ParsedAttribute::Run => quote_spanned! { attribute.span() =>\n\n .run(#crate_ident::system!(Self::#system_name))\n\n },\n\n ParsedAttribute::RunAs(action) => quote_spanned! { attribute.span() =>\n\n .run_as::<#action>(#crate_ident::system!(Self::#system_name))\n\n },\n\n ParsedAttribute::RunAfter(actions) => quote_spanned! { attribute.span() =>\n\n .run_constrained::<(#(#crate_ident::DependsOn<#actions>,)*)>(\n\n #crate_ident::system!(Self::#system_name)\n\n )\n\n },\n\n ParsedAttribute::RunAfterPrevious => quote_spanned! { attribute.span() =>\n\n .and_then(#crate_ident::system!(Self::#system_name))\n\n },\n\n })\n\n}\n", "file_path": "crates/modor_derive/src/systems.rs", "rank": 89, "score": 172765.21989216728 }, { "content": "/// A trait implemented for any system.\n\n///\n\n/// This trait is used by the [`entity`](macro@crate::entity) and\n\n/// [`singleton`](macro@crate::singleton) proc macros to detect invalid systems.\n\npub trait SystemWithParams<S, P>: Sized + SealedChecker {\n\n #[doc(hidden)]\n\n #[must_use]\n\n fn check_param_mutability(self) -> Self {\n\n self\n\n }\n\n}\n\n\n\nimpl<S, P> SystemWithParams<S, P> for SystemParamMutabilityChecker<S, P>\n\nwhere\n\n S: System<P>,\n\n P: SystemParam,\n\n{\n\n}\n\n\n", "file_path": "crates/modor/src/system_checks.rs", "rank": 90, "score": 169469.126306345 }, { "content": "fn send_mouse_event(app: &mut App, event: MouseEvent) {\n\n app.run_for_singleton(|c: &mut InputEventCollector| c.push(event.into()));\n\n}\n\n\n", "file_path": "crates/modor_graphics/src/runner.rs", "rank": 91, "score": 166519.31395076413 }, { "content": "fn send_keyboard_event(app: &mut App, event: KeyboardEvent) {\n\n app.run_for_singleton(|c: &mut InputEventCollector| c.push(event.into()));\n\n}\n\n\n", "file_path": "crates/modor_graphics/src/runner.rs", "rank": 92, "score": 166519.31395076413 }, { "content": "fn send_touch_event(app: &mut App, event: TouchEvent) {\n\n app.run_for_singleton(|c: &mut InputEventCollector| c.push(event.into()));\n\n}\n\n\n", "file_path": "crates/modor_graphics/src/runner.rs", "rank": 93, "score": 166519.31395076413 }, { "content": "struct Values {\n\n empty_done: bool,\n\n one_item_done: bool,\n\n two_item_done: bool,\n\n more_than_two_item_done: bool,\n\n}\n\n\n\n#[entity]\n\nimpl Values {\n\n fn build(value1: bool, value2: bool) -> impl Built<Self> {\n\n EntityBuilder::new(Self {\n\n empty_done: false,\n\n one_item_done: false,\n\n two_item_done: false,\n\n more_than_two_item_done: false,\n\n })\n\n .with_option(value1.then(|| Value1(10)))\n\n .with_option(value2.then(|| Value2(20)))\n\n }\n\n\n", "file_path": "crates/modor/tests/integration/system_params/tuples.rs", "rank": 94, "score": 163075.9160199767 }, { "content": "struct Level1;\n\n\n\n#[entity]\n\nimpl Level1 {\n\n fn build(value1: u32, value2: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .with(Value1(value1 + 2))\n\n .with(Value3(value1 + 2))\n\n .with_child(Level2::build(value1, value2))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/queries.rs", "rank": 95, "score": 163038.18673654282 }, { "content": "struct Level2;\n\n\n\n#[entity]\n\nimpl Level2 {\n\n fn build(value1: u32, value2: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .with(Value2(value2 + 1))\n\n .with_child(Level3::build(value1, value2))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/queries.rs", "rank": 96, "score": 163038.18673654282 }, { "content": "struct Level3;\n\n\n\n#[entity]\n\nimpl Level3 {\n\n fn build(value1: u32, value2: u32) -> impl Built<Self> {\n\n EntityBuilder::new(Self)\n\n .with(Value1(value1))\n\n .with(Value2(value2))\n\n }\n\n}\n\n\n", "file_path": "crates/modor/tests/integration/system_params/queries.rs", "rank": 97, "score": 163038.18673654282 }, { "content": "struct Tester {\n\n done_count: u32,\n\n}\n\n\n\n#[singleton]\n\nimpl Tester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self { done_count: 0 })\n\n }\n\n\n\n #[run]\n\n fn iter_with_no_filter(&mut self, query: Query<'_, Entity<'_>>) {\n\n assert_iter(query.iter().map(Entity::id), [0, 1, 4, 2, 5, 3, 6]);\n\n self.done_count += 1;\n\n }\n\n\n\n #[run]\n\n fn iter_with_one_filter(&mut self, query: Query<'_, Entity<'_>, With<Value1>>) {\n\n assert_iter(query.iter().map(Entity::id), [1, 4, 3, 6]);\n\n self.done_count += 1;\n", "file_path": "crates/modor/tests/integration/system_params/queries.rs", "rank": 98, "score": 163038.18673654282 }, { "content": "struct Tester {\n\n done_existing: bool,\n\n done_missing: bool,\n\n}\n\n\n\n#[entity]\n\nimpl Tester {\n\n fn build() -> impl Built<Self> {\n\n EntityBuilder::new(Self {\n\n done_existing: false,\n\n done_missing: false,\n\n })\n\n }\n\n\n\n #[run]\n\n fn run_existing(&mut self, number: Single<'_, Number>) {\n\n assert_eq!(number.0, 10);\n\n assert_eq!(number.entity().id(), 0);\n\n self.done_existing = true;\n\n #[cfg(not(target_arch = \"wasm32\"))]\n", "file_path": "crates/modor/tests/integration/system_params/singletons.rs", "rank": 99, "score": 162833.1606336532 } ]
Rust
src/net.rs
dejano-with-tie/stun_codec
4f15041b761eb57bb3c6aef09610ec61603c9663
use crate::constants::MAGIC_COOKIE; use crate::TransactionId; use bytecodec::bytes::{BytesDecoder, BytesEncoder}; use bytecodec::combinator::Peekable; use bytecodec::fixnum::{U16beDecoder, U16beEncoder, U8Decoder, U8Encoder}; use bytecodec::{ByteCount, Decode, Encode, Eos, ErrorKind, Result, SizedEncode}; use std::net::{IpAddr, SocketAddr}; const FAMILY_IPV4: u8 = 1; const FAMILY_IPV6: u8 = 2; pub fn socket_addr_xor(addr: SocketAddr, transaction_id: TransactionId) -> SocketAddr { let xor_port = addr.port() ^ (MAGIC_COOKIE >> 16) as u16; match addr.ip() { IpAddr::V4(ip) => { let mut octets = ip.octets(); for (i, b) in octets.iter_mut().enumerate() { *b ^= (MAGIC_COOKIE >> (24 - i * 8)) as u8; } let xor_ip = From::from(octets); SocketAddr::new(IpAddr::V4(xor_ip), xor_port) } IpAddr::V6(ip) => { let mut octets = ip.octets(); for (i, b) in octets.iter_mut().enumerate().take(4) { *b ^= (MAGIC_COOKIE >> (24 - i * 8)) as u8; } for (i, b) in octets.iter_mut().enumerate().take(16).skip(4) { *b ^= transaction_id.as_bytes()[i - 4]; } let xor_ip = From::from(octets); SocketAddr::new(IpAddr::V6(xor_ip), xor_port) } } } #[derive(Debug, Default)] pub struct SocketAddrDecoder { unused: U8Decoder, family: Peekable<U8Decoder>, port: U16beDecoder, ip: BytesDecoder<IpBytes>, } impl SocketAddrDecoder { pub fn new() -> Self { Self::default() } } impl Decode for SocketAddrDecoder { type Item = SocketAddr; fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> { let mut offset = 0; if !self.family.is_idle() { bytecodec_try_decode!(self.unused, offset, buf, eos); bytecodec_try_decode!(self.family, offset, buf, eos); let family = self.family.peek().expect("never fails"); match *family { FAMILY_IPV4 => self.ip.set_bytes(IpBytes::V4([0; 4])), FAMILY_IPV6 => self.ip.set_bytes(IpBytes::V6([0; 16])), _ => track_panic!( ErrorKind::InvalidInput, "Unknown address family: {}", family ), } } bytecodec_try_decode!(self.port, offset, buf, eos); bytecodec_try_decode!(self.ip, offset, buf, eos); Ok(offset) } fn finish_decoding(&mut self) -> Result<Self::Item> { let _ = track!(self.unused.finish_decoding())?; let _ = track!(self.family.finish_decoding())?; let port = track!(self.port.finish_decoding())?; let ip = match track!(self.ip.finish_decoding())? { IpBytes::V4(b) => IpAddr::V4(b.into()), IpBytes::V6(b) => IpAddr::V6(b.into()), }; Ok(SocketAddr::new(ip, port)) } fn requiring_bytes(&self) -> ByteCount { self.unused .requiring_bytes() .add_for_decoding(self.family.requiring_bytes()) .add_for_decoding(self.port.requiring_bytes()) .add_for_decoding(self.ip.requiring_bytes()) } fn is_idle(&self) -> bool { self.port.is_idle() && self.ip.is_idle() } } #[derive(Debug, Default)] pub struct SocketAddrEncoder { unused: U8Encoder, family: U8Encoder, port: U16beEncoder, ip: BytesEncoder<IpBytes>, } impl SocketAddrEncoder { pub fn new() -> Self { Self::default() } } impl Encode for SocketAddrEncoder { type Item = SocketAddr; fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> { let mut offset = 0; bytecodec_try_encode!(self.unused, offset, buf, eos); bytecodec_try_encode!(self.family, offset, buf, eos); bytecodec_try_encode!(self.port, offset, buf, eos); bytecodec_try_encode!(self.ip, offset, buf, eos); Ok(offset) } fn start_encoding(&mut self, item: Self::Item) -> Result<()> { track!(self.unused.start_encoding(0))?; if item.ip().is_ipv4() { track!(self.family.start_encoding(FAMILY_IPV4))?; } else { track!(self.family.start_encoding(FAMILY_IPV6))?; } track!(self.port.start_encoding(item.port()))?; track!(self.ip.start_encoding(IpBytes::new(item.ip())))?; Ok(()) } fn requiring_bytes(&self) -> ByteCount { ByteCount::Finite(self.exact_requiring_bytes()) } fn is_idle(&self) -> bool { self.ip.is_idle() } } impl SizedEncode for SocketAddrEncoder { fn exact_requiring_bytes(&self) -> u64 { self.unused.exact_requiring_bytes() + self.family.exact_requiring_bytes() + self.port.exact_requiring_bytes() + self.ip.exact_requiring_bytes() } } #[derive(Debug)] enum IpBytes { V4([u8; 4]), V6([u8; 16]), } impl IpBytes { fn new(ip: IpAddr) -> Self { match ip { IpAddr::V4(ip) => IpBytes::V4(ip.octets()), IpAddr::V6(ip) => IpBytes::V6(ip.octets()), } } } impl AsRef<[u8]> for IpBytes { fn as_ref(&self) -> &[u8] { match self { IpBytes::V4(bytes) => bytes, IpBytes::V6(bytes) => bytes, } } } impl AsMut<[u8]> for IpBytes { fn as_mut(&mut self) -> &mut [u8] { match self { IpBytes::V4(bytes) => bytes, IpBytes::V6(bytes) => bytes, } } } #[cfg(test)] mod tests { use bytecodec::{DecodeExt, EncodeExt}; use super::*; #[test] fn socket_addr_xor_works() { let transaction_id = TransactionId::new([ 0xb7, 0xe7, 0xa7, 0x01, 0xbc, 0x34, 0xd6, 0x86, 0xfa, 0x87, 0xdf, 0xae, ]); let addr: SocketAddr = "192.0.2.1:32853".parse().unwrap(); assert_eq!( socket_addr_xor(addr, transaction_id), "225.18.166.67:41287".parse().unwrap() ); let addr: SocketAddr = "[2001:db8:1234:5678:11:2233:4455:6677]:32853" .parse() .unwrap(); assert_eq!( socket_addr_xor(addr, transaction_id), "[113:a9fa:a5d3:f179:bc25:f4b5:bed2:b9d9]:41287" .parse() .unwrap() ); } #[test] fn socket_addr_encoder_works() { let mut encoder = SocketAddrEncoder::new(); let v4addr = "127.0.0.1:80".parse().unwrap(); let bytes = encoder.encode_into_bytes(v4addr).unwrap(); assert_eq!(bytes, [0, 1, 0, 80, 127, 0, 0, 1]); let v6addr = "[::]:90".parse().unwrap(); let bytes = encoder.encode_into_bytes(v6addr).unwrap(); assert_eq!( bytes, [0, 2, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] ); } #[test] fn socket_addr_decoder_works() { let mut decoder = SocketAddrDecoder::new(); let v4addr = decoder .decode_from_bytes(&[0, 1, 0, 80, 127, 0, 0, 1]) .unwrap(); assert_eq!(v4addr.to_string(), "127.0.0.1:80"); let v6addr = decoder .decode_from_bytes(&[0, 2, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) .unwrap(); assert_eq!(v6addr.to_string(), "[::]:90"); } }
use crate::constants::MAGIC_COOKIE; use crate::TransactionId; use bytecodec::bytes::{BytesDecoder, BytesEncoder}; use bytecodec::combinator::Peekable; use bytecodec::fixnum::{U16beDecoder, U16beEncoder, U8Decoder, U8Encoder}; use bytecodec::{ByteCount, Decode, Encode, Eos, ErrorKind, Result, SizedEncode}; use std::net::{IpAddr, SocketAddr}; const FAMILY_IPV4: u8 = 1; const FAMILY_IPV6: u8 = 2; pub fn socket_addr_xor(addr: SocketAddr, transaction_id: TransactionId) -> SocketAddr { let xor_port = addr.port() ^ (MAGIC_COOKIE >> 16) as u16; match addr.ip() { IpAddr::V4(ip) => { let mut octets = ip.octets(); for (i, b) in octets.iter_mut().enumerate() { *b ^= (MAGIC_COOKIE >> (24 - i * 8)) as u8; } let xor_ip = From::from(octets); SocketAddr::new(IpAddr::V4(xor_ip), xor_port) } IpAddr::V6(ip) => { let mut octets = ip.octets(); for (i, b) in octets.iter_mut().enumerate().take(4) { *b ^= (MAGIC_COOKIE >> (24 - i * 8)) as u8; } for (i, b) in octets.iter_mut().enumerate().take(16).skip(4) { *b ^= transaction_id.as_bytes()[i - 4]; } let xor_ip = From::from(octets); SocketAddr::new(IpAddr::V6(xor_ip), xor_port) } } } #[derive(Debug, Default)] pub struct SocketAddrDecoder { unused: U8Decoder, family: Peekable<U8Decoder>, port: U16beDecoder, ip: BytesDecoder<IpBytes>, } impl SocketAddrDecoder { pub fn new() -> Self { Self::default() } } impl Decode for SocketAddrDecoder { type Item = SocketAddr; fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> { let mut offset = 0; if !self.family.is_idle() { bytecodec_try_decode!(self.unused, offset, buf, eos); bytecodec_try_decode!(self.family, offset, buf, eos); let family = self.family.peek().expect("never fails"); match *family { FAMILY_IPV4 => self.ip.set_bytes(IpBytes::V4([0; 4])), FAMILY_IPV6 => self.ip.set_bytes(IpBytes::V6([0; 16])), _ => track_panic!( ErrorKind::InvalidInput, "Unknown address family: {}", family ), } } bytecodec_try_decode!(self.port, offset, buf, eos); bytecodec_try_decode!(self.ip, offset, buf, eos); Ok(offset) } fn finish_decoding(&mut self) -> Result<Self::Item> { let _ = track!(self.unused.finish_decoding())?; let _ = track!(self.family.finish_decoding())?; let port = track!(self.port.finish_decoding())?; let ip = match track!(self.ip.finish_decoding())? { IpBytes::V4(b) => IpAddr::V4(b.into()), IpBytes::V6(b) => IpAddr::V6(b.into()), }; Ok(SocketAddr::new(ip, port)) } fn requiring_bytes(&self) -> ByteCount { self.unused .requiring_bytes() .add_for_decoding(self.family.requiring_bytes()) .add_for_decoding(self.port.requiring_bytes()) .add_for_decoding(self.ip.requiring_bytes()) } fn is_idle(&self) -> bool { self.port.is_idle() && self.ip.is_idle() } } #[derive(Debug, Default)] pub struct SocketAddrEncoder { unused: U8Encoder, family: U8Encoder, port: U16beEncoder, ip: BytesEncoder<IpBytes>, } impl SocketAddrEncoder { pub fn new() -> Self { Self::default() } } impl Encode for SocketAddrEncoder { type Item = SocketAddr; fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> { let mut offset = 0; bytecodec_try_encode!(self.unused, offset, buf, eos); bytecodec_try_encode!(self.family, offset, buf, eos); bytecodec_try_encode!(self.port, offset, buf, eos); bytecodec_try_encode!(self.ip, offset, buf, eos); Ok(offset) } fn start_encoding(&mut self, item: Self::Item) -> Result<()> { track!(self.unused.start_encoding(0))?; if item.ip().is_ipv4() { track!(self.family.start_encoding(FAMILY_IPV4))?; } else { track!(self.family.start_encoding(FAMILY_IPV6))?; } track!(self.port.start_encoding(item.port()))?; track!(self.ip.start_encoding(IpBytes::new(item.ip())))?; Ok(()) } fn requiring_bytes(&self) -> ByteCount { ByteCount::Finite(self.exact_requiring_bytes()) } fn is_idle(&self) -> bool { self.ip.is_idle() } } impl SizedEncode for SocketAddrEncoder { fn exact_requiring_bytes(&self) -> u64 { self.unused.exact_requiring_bytes() + self.family.exact_requiring_bytes() + self.port.exact_requiring_bytes() + self.ip.exact_requiring_bytes() } } #[derive(Debug)] enum IpBytes { V4([u8; 4]), V6([u8; 16]), } impl IpBytes { fn new(ip: IpAddr) -> Self { match ip { IpAddr::V4(ip) => IpBytes::V4(ip.octets()), IpAddr::V6(ip) => IpBytes::V6(ip.octets()), } } } impl AsRef<[u8]> for IpBytes { fn as_ref(&self) -> &[u8] { match self { IpBytes::V4(bytes) => bytes, IpBytes::V6(bytes) => bytes, } } } impl AsMut<[u8]> for IpBytes {
} #[cfg(test)] mod tests { use bytecodec::{DecodeExt, EncodeExt}; use super::*; #[test] fn socket_addr_xor_works() { let transaction_id = TransactionId::new([ 0xb7, 0xe7, 0xa7, 0x01, 0xbc, 0x34, 0xd6, 0x86, 0xfa, 0x87, 0xdf, 0xae, ]); let addr: SocketAddr = "192.0.2.1:32853".parse().unwrap(); assert_eq!( socket_addr_xor(addr, transaction_id), "225.18.166.67:41287".parse().unwrap() ); let addr: SocketAddr = "[2001:db8:1234:5678:11:2233:4455:6677]:32853" .parse() .unwrap(); assert_eq!( socket_addr_xor(addr, transaction_id), "[113:a9fa:a5d3:f179:bc25:f4b5:bed2:b9d9]:41287" .parse() .unwrap() ); } #[test] fn socket_addr_encoder_works() { let mut encoder = SocketAddrEncoder::new(); let v4addr = "127.0.0.1:80".parse().unwrap(); let bytes = encoder.encode_into_bytes(v4addr).unwrap(); assert_eq!(bytes, [0, 1, 0, 80, 127, 0, 0, 1]); let v6addr = "[::]:90".parse().unwrap(); let bytes = encoder.encode_into_bytes(v6addr).unwrap(); assert_eq!( bytes, [0, 2, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] ); } #[test] fn socket_addr_decoder_works() { let mut decoder = SocketAddrDecoder::new(); let v4addr = decoder .decode_from_bytes(&[0, 1, 0, 80, 127, 0, 0, 1]) .unwrap(); assert_eq!(v4addr.to_string(), "127.0.0.1:80"); let v6addr = decoder .decode_from_bytes(&[0, 2, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) .unwrap(); assert_eq!(v6addr.to_string(), "[::]:90"); } }
fn as_mut(&mut self) -> &mut [u8] { match self { IpBytes::V4(bytes) => bytes, IpBytes::V6(bytes) => bytes, } }
function_block-full_function
[ { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nstruct Type {\n\n class: MessageClass,\n\n method: Method,\n\n}\n\nimpl Type {\n\n fn as_u16(self) -> u16 {\n\n let class = self.class as u16;\n\n let method = self.method.as_u16();\n\n (method & 0b0000_0000_1111)\n\n | ((class & 0b01) << 4)\n\n | ((method & 0b0000_0111_0000) << 5)\n\n | ((class & 0b10) << 7)\n\n | ((method & 0b1111_1000_0000) << 9)\n\n }\n\n\n\n fn from_u16(value: u16) -> Result<Self> {\n\n track_assert!(\n\n value >> 14 == 0,\n\n ErrorKind::InvalidInput,\n\n \"First two-bits of STUN message must be 0\"\n", "file_path": "src/message.rs", "rank": 1, "score": 77447.3470217641 }, { "content": "#[derive(Debug, Default)]\n\nstruct MessageHeaderDecoder {\n\n message_type: U16beDecoder,\n\n message_len: U16beDecoder,\n\n magic_cookie: U32beDecoder,\n\n transaction_id: CopyableBytesDecoder<[u8; 12]>,\n\n}\n\nimpl MessageHeaderDecoder {\n\n fn check_magic_cookie(&self, magic_cookie: u32) -> Result<()> {\n\n track_assert_eq!(\n\n magic_cookie,\n\n MAGIC_COOKIE,\n\n ErrorKind::InvalidInput,\n\n \"Unexpected MAGIC_COOKIE: actual=0x{:08x}, expected=0x{:08x}\",\n\n magic_cookie,\n\n MAGIC_COOKIE,\n\n );\n\n Ok(())\n\n }\n\n}\n\nimpl Decode for MessageHeaderDecoder {\n", "file_path": "src/message.rs", "rank": 3, "score": 59383.93830652969 }, { "content": "#[derive(Debug)]\n\nstruct AttributesDecoder<A: Attribute> {\n\n inner: Collect<LosslessAttributeDecoder<A>, Vec<LosslessAttribute<A>>>,\n\n last_error: Option<Error>,\n\n is_eos: bool,\n\n}\n\nimpl<A: Attribute> Default for AttributesDecoder<A> {\n\n fn default() -> Self {\n\n AttributesDecoder {\n\n inner: Default::default(),\n\n last_error: None,\n\n is_eos: false,\n\n }\n\n }\n\n}\n\nimpl<A: Attribute> Decode for AttributesDecoder<A> {\n\n type Item = Vec<LosslessAttribute<A>>;\n\n\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n\n if self.last_error.is_none() {\n\n match track!(self.inner.decode(buf, eos)) {\n", "file_path": "src/message.rs", "rank": 4, "score": 57148.83852866721 }, { "content": "/// This trait allows for attempting to a cheap reference-to-reference conversion.\n\npub trait TryAsRef<T> {\n\n /// Attempts to convert `self` to a reference to `T`.\n\n ///\n\n /// If it is not possible, this method will return `None`.\n\n fn try_as_ref(&self) -> Option<&T>;\n\n}\n", "file_path": "src/convert.rs", "rank": 5, "score": 31041.569704843212 }, { "content": "/// STUN attribute.\n\n///\n\n/// > **Attribute**: The STUN term for a Type-Length-Value (TLV) object that\n\n/// > can be added to a STUN message. Attributes are divided into two\n\n/// > types: comprehension-required and comprehension-optional. STUN\n\n/// > agents can safely ignore comprehension-optional attributes they\n\n/// > don't understand, but cannot successfully process a message if it\n\n/// > contains comprehension-required attributes that are not\n\n/// > understood.\n\n/// >\n\n/// > [RFC 5389 -- 5. Definitions]\n\n///\n\n/// [RFC 5389 -- 5. Definitions]: https://tools.ietf.org/html/rfc5389#section-5\n\npub trait Attribute: Sized + Clone {\n\n /// The decoder of the value part of the attribute.\n\n type Decoder: Default + TryTaggedDecode<Tag = AttributeType, Item = Self>;\n\n\n\n /// The encoder of the value part of the attribute.\n\n type Encoder: Default + SizedEncode<Item = Self>;\n\n\n\n /// Returns the type of the attribute.\n\n fn get_type(&self) -> AttributeType;\n\n\n\n /// This method is called before encoding the attribute.\n\n ///\n\n /// `message` is the message to which the attribute belongs.\n\n /// The message only contains the attributes preceding to `self`.\n\n ///\n\n /// The default implementation simply returns `Ok(())`.\n\n #[allow(unused_variables)]\n\n fn before_encode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n Ok(())\n\n }\n", "file_path": "src/attribute.rs", "rank": 6, "score": 30267.81351142318 }, { "content": "//! Miscellaneous types.\n\n\n\n/// Unsigned 12 bit integer.\n\n#[derive(Debug, Default, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]\n\npub struct U12(u16);\n\nimpl U12 {\n\n /// Converts from `u8` value.\n\n pub fn from_u8(value: u8) -> Self {\n\n U12(value as u16)\n\n }\n\n\n\n /// Tries to convert from `u16` value.\n\n ///\n\n /// If `value` is greater than `0xFFF`, this will return `None`.\n\n pub fn from_u16(value: u16) -> Option<Self> {\n\n if value < 0x1000 {\n\n Some(U12(value))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Converts to `u16` value.\n\n pub fn as_u16(&self) -> u16 {\n\n self.0\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 7, "score": 29988.861988949542 }, { "content": " fn is_idle(&self) -> bool {\n\n self.0.is_idle()\n\n }\n\n }\n\n impl TryTaggedDecode for $decoder {\n\n type Tag = AttributeType;\n\n\n\n fn try_start_decoding(&mut self, attr_type: Self::Tag) -> Result<bool> {\n\n Ok(attr_type.as_u16() == $item::CODEPOINT)\n\n }\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! impl_encode {\n\n ($encoder:ty, $item:ty, $map_from:expr) => {\n\n impl Encode for $encoder {\n\n type Item = $item;\n\n\n\n fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> {\n", "file_path": "src/rfc5389/attributes.rs", "rank": 12, "score": 40.32922429736421 }, { "content": " fn try_start_decoding(&mut self, attr_type: Self::Tag) -> Result<bool> {\n\n Ok(attr_type.as_u16() == $item::CODEPOINT)\n\n }\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! impl_encode {\n\n ($encoder:ty, $item:ty, $map_from:expr) => {\n\n impl Encode for $encoder {\n\n type Item = $item;\n\n\n\n fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> {\n\n track!(self.0.encode(buf, eos))\n\n }\n\n\n\n fn start_encoding(&mut self, item: Self::Item) -> Result<()> {\n\n track!(self.0.start_encoding($map_from(item)))\n\n }\n\n\n", "file_path": "src/rfc5766/attributes.rs", "rank": 13, "score": 39.844577415594735 }, { "content": "impl Encode for MyAttributeEncoder {\n\n type Item = MyAttribute;\n\n\n\n fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> {\n\n let mut offset = 0;\n\n bytecodec_try_encode!(self.rfc5389, offset, buf, eos);\n\n bytecodec_try_encode!(self.rfc5766, offset, buf, eos);\n\n Ok(offset)\n\n }\n\n\n\n fn start_encoding(&mut self, item: Self::Item) -> Result<()> {\n\n track_assert!(self.is_idle(), ErrorKind::EncoderFull);\n\n match item {\n\n MyAttribute::Rfc5389(item) => track!(self.rfc5389.start_encoding(item)),\n\n MyAttribute::Rfc5766(item) => track!(self.rfc5766.start_encoding(item)),\n\n }\n\n }\n\n\n\n fn requiring_bytes(&self) -> ByteCount {\n\n self.rfc5389\n", "file_path": "src/attribute.rs", "rank": 14, "score": 36.03836811415299 }, { "content": " value: RemainingBytesDecoder,\n\n}\n\nimpl RawAttributeDecoder {\n\n /// Makes a new `RawAttributeDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl Decode for RawAttributeDecoder {\n\n type Item = RawAttribute;\n\n\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n\n track!(self.value.decode(buf, eos))\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n let attr_type = track_assert_some!(self.attr_type.take(), ErrorKind::InconsistentState);\n\n let value = track!(self.value.finish_decoding())?;\n\n Ok(RawAttribute { attr_type, value })\n\n }\n", "file_path": "src/attribute.rs", "rank": 16, "score": 35.892822507679675 }, { "content": "/// [`RawAttribute`]: ./struct.RawAttribute.html\n\n#[derive(Debug, Default)]\n\npub struct RawAttributeEncoder {\n\n value: BytesEncoder,\n\n}\n\nimpl RawAttributeEncoder {\n\n /// Makes a new `RawAttributeEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl Encode for RawAttributeEncoder {\n\n type Item = RawAttribute;\n\n\n\n fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> {\n\n track!(self.value.encode(buf, eos))\n\n }\n\n\n\n fn start_encoding(&mut self, item: Self::Item) -> Result<()> {\n\n track!(self.value.start_encoding(item.into_value()))\n", "file_path": "src/attribute.rs", "rank": 17, "score": 35.3528951333039 }, { "content": "\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n\n track!(self.0.decode(buf, eos))\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n track!(self.0.finish_decoding()).and_then($and_then)\n\n }\n\n\n\n fn requiring_bytes(&self) -> ByteCount {\n\n self.0.requiring_bytes()\n\n }\n\n\n\n fn is_idle(&self) -> bool {\n\n self.0.is_idle()\n\n }\n\n }\n\n impl TryTaggedDecode for $decoder {\n\n type Tag = AttributeType;\n\n\n", "file_path": "src/rfc5766/attributes.rs", "rank": 18, "score": 35.227201440191365 }, { "content": " $($variant(<$variant as $crate::Attribute>::Encoder)),*,\n\n None,\n\n }\n\n impl $encoder {\n\n /// Makes a new encoder instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n }\n\n impl Default for $encoder {\n\n fn default() -> Self {\n\n $encoder::None\n\n }\n\n }\n\n impl ::bytecodec::Encode for $encoder {\n\n type Item = $attr;\n\n\n\n fn encode(&mut self, buf: &mut [u8], eos: ::bytecodec::Eos) -> ::bytecodec::Result<usize> {\n\n match self {\n\n $($encoder::$variant(a) => track!(a.encode(buf, eos), \"attr={}\", stringify!($variant))),*,\n", "file_path": "src/macros.rs", "rank": 19, "score": 34.94006422063193 }, { "content": " let mut offset = 0;\n\n bytecodec_try_encode!(self.get_type, offset, buf, eos);\n\n bytecodec_try_encode!(self.value_len, offset, buf, eos);\n\n bytecodec_try_encode!(self.known_value, offset, buf, eos);\n\n bytecodec_try_encode!(self.unknown_value, offset, buf, eos);\n\n bytecodec_try_encode!(self.padding, offset, buf, eos);\n\n Ok(offset)\n\n }\n\n\n\n fn start_encoding(&mut self, item: Self::Item) -> Result<()> {\n\n track!(self.get_type.start_encoding(item.get_type().as_u16()))?;\n\n let padding = match item {\n\n LosslessAttribute::Known { inner, padding } => {\n\n track!(self.known_value.start_encoding(inner))?;\n\n padding\n\n }\n\n LosslessAttribute::Unknown { inner, padding } => {\n\n track!(self.unknown_value.start_encoding(inner))?;\n\n padding\n\n }\n", "file_path": "src/attribute.rs", "rank": 20, "score": 34.871785183322054 }, { "content": "//! Attributes that are defined in [RFC 5245].\n\n//!\n\n//! [RFC 5245]: https://tools.ietf.org/html/rfc5245\n\nuse crate::attribute::{Attribute, AttributeType};\n\nuse bytecodec::fixnum::{U32beDecoder, U32beEncoder, U64beDecoder, U64beEncoder};\n\nuse bytecodec::null::{NullDecoder, NullEncoder};\n\nuse bytecodec::{ByteCount, Decode, Encode, Eos, Result, SizedEncode, TryTaggedDecode};\n\n\n\nmacro_rules! impl_decode {\n\n ($decoder:ty, $item:ident, $and_then:expr) => {\n\n impl Decode for $decoder {\n\n type Item = $item;\n\n\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n\n track!(self.0.decode(buf, eos))\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n track!(self.0.finish_decoding()).and_then($and_then)\n\n }\n", "file_path": "src/rfc5245/attributes.rs", "rank": 21, "score": 34.239568796106035 }, { "content": "use std::net::SocketAddr;\n\nuse std::vec;\n\n\n\nmacro_rules! impl_decode {\n\n ($decoder:ty, $item:ident, $and_then:expr) => {\n\n impl Decode for $decoder {\n\n type Item = $item;\n\n\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n\n track!(self.0.decode(buf, eos))\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n track!(self.0.finish_decoding()).and_then($and_then)\n\n }\n\n\n\n fn requiring_bytes(&self) -> ByteCount {\n\n self.0.requiring_bytes()\n\n }\n\n\n", "file_path": "src/rfc5389/attributes.rs", "rank": 22, "score": 33.8938815757059 }, { "content": "\n\n fn requiring_bytes(&self) -> ByteCount {\n\n self.0.requiring_bytes()\n\n }\n\n\n\n fn is_idle(&self) -> bool {\n\n self.0.is_idle()\n\n }\n\n }\n\n impl TryTaggedDecode for $decoder {\n\n type Tag = AttributeType;\n\n\n\n fn try_start_decoding(&mut self, attr_type: Self::Tag) -> Result<bool> {\n\n Ok(attr_type.as_u16() == $item::CODEPOINT)\n\n }\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! impl_encode {\n", "file_path": "src/rfc5245/attributes.rs", "rank": 23, "score": 33.73230960074813 }, { "content": " ($encoder:ty, $item:ty, $map_from:expr) => {\n\n impl Encode for $encoder {\n\n type Item = $item;\n\n\n\n fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> {\n\n track!(self.0.encode(buf, eos))\n\n }\n\n\n\n fn start_encoding(&mut self, item: Self::Item) -> Result<()> {\n\n track!(self.0.start_encoding($map_from(item)))\n\n }\n\n\n\n fn requiring_bytes(&self) -> ByteCount {\n\n self.0.requiring_bytes()\n\n }\n\n\n\n fn is_idle(&self) -> bool {\n\n self.0.is_idle()\n\n }\n\n }\n", "file_path": "src/rfc5245/attributes.rs", "rank": 24, "score": 33.712864204580974 }, { "content": " impl $decoder {\n\n /// Makes a new decoder instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n }\n\n impl Default for $decoder {\n\n fn default() -> Self {\n\n $decoder::None\n\n }\n\n }\n\n impl ::bytecodec::Decode for $decoder {\n\n type Item = $attr;\n\n\n\n fn decode(&mut self, buf: &[u8], eos: ::bytecodec::Eos) -> ::bytecodec::Result<usize> {\n\n match self {\n\n $($decoder::$variant(a) => track!(a.decode(buf, eos), \"attr={}\", stringify!($variant))),*,\n\n $decoder::None => track_panic!(::bytecodec::ErrorKind::InconsistentState),\n\n }\n\n }\n", "file_path": "src/macros.rs", "rank": 25, "score": 33.701488796263256 }, { "content": " type Item = (Type, u16, TransactionId);\n\n\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n\n let mut offset = 0;\n\n bytecodec_try_decode!(self.message_type, offset, buf, eos);\n\n bytecodec_try_decode!(self.message_len, offset, buf, eos);\n\n bytecodec_try_decode!(self.magic_cookie, offset, buf, eos);\n\n bytecodec_try_decode!(self.transaction_id, offset, buf, eos);\n\n Ok(offset)\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n let message_type = track!(self.message_type.finish_decoding())?;\n\n let message_type = track!(Type::from_u16(message_type))?;\n\n let message_len = track!(self.message_len.finish_decoding())?;\n\n let magic_cookie = track!(self.magic_cookie.finish_decoding())?;\n\n let transaction_id = TransactionId::new(track!(self.transaction_id.finish_decoding())?);\n\n track!(self.check_magic_cookie(magic_cookie); message_type, message_len, transaction_id)?;\n\n Ok((message_type, message_len, transaction_id))\n\n }\n", "file_path": "src/message.rs", "rank": 26, "score": 33.375710541435694 }, { "content": " message.attributes.set_len(attributes_len);\n\n }\n\n Ok(message)\n\n }\n\n}\n\nimpl<A: Attribute> Default for MessageDecoder<A> {\n\n fn default() -> Self {\n\n MessageDecoder {\n\n header: Default::default(),\n\n attributes: Default::default(),\n\n }\n\n }\n\n}\n\nimpl<A: Attribute> Decode for MessageDecoder<A> {\n\n type Item = DecodedMessage<A>;\n\n\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n\n let mut offset = 0;\n\n if !self.header.is_idle() {\n\n bytecodec_try_decode!(self.header, offset, buf, eos);\n", "file_path": "src/message.rs", "rank": 27, "score": 33.355698054774784 }, { "content": "impl<A: Attribute> Encode for MessageEncoder<A> {\n\n type Item = Message<A>;\n\n\n\n fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> {\n\n let mut offset = 0;\n\n bytecodec_try_encode!(self.message_type, offset, buf, eos);\n\n bytecodec_try_encode!(self.message_len, offset, buf, eos);\n\n bytecodec_try_encode!(self.magic_cookie, offset, buf, eos);\n\n bytecodec_try_encode!(self.transaction_id, offset, buf, eos);\n\n bytecodec_try_encode!(self.attributes, offset, buf, eos);\n\n Ok(offset)\n\n }\n\n\n\n fn start_encoding(&mut self, mut item: Self::Item) -> Result<()> {\n\n let attributes_len = item.attributes.len();\n\n for i in 0..attributes_len {\n\n unsafe {\n\n item.attributes.set_len(i);\n\n let item_mut = &mut *(&mut item as *mut Message<A>);\n\n let attr = item_mut.attributes.get_unchecked_mut(i);\n", "file_path": "src/message.rs", "rank": 28, "score": 33.26061729331741 }, { "content": " pub fn hmac_sha1(&self) -> [u8; 20] {\n\n self.hmac_sha1\n\n }\n\n\n\n fn message_into_bytes<A: Attribute>(message: Message<A>) -> Result<Vec<u8>> {\n\n let mut bytes = track!(MessageEncoder::default().encode_into_bytes(message))?;\n\n let adjusted_len = bytes.len() - 20 /*msg header*/+ 4 /*attr header*/ + 20 /*hmac*/;\n\n BigEndian::write_u16(&mut bytes[2..4], adjusted_len as u16);\n\n Ok(bytes)\n\n }\n\n}\n\nimpl Attribute for MessageIntegrity {\n\n type Decoder = MessageIntegrityDecoder;\n\n type Encoder = MessageIntegrityEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n\n\n fn after_decode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n", "file_path": "src/rfc5389/attributes.rs", "rank": 29, "score": 32.44935731415317 }, { "content": "\n\n let message_len = self.header.peek().expect(\"never fails\").1;\n\n track!(self.attributes.set_expected_bytes(u64::from(message_len)))?;\n\n }\n\n bytecodec_try_decode!(self.attributes, offset, buf, eos);\n\n Ok(offset)\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n let (Type { method, class }, _, transaction_id) = track!(self.header.finish_decoding())?;\n\n match self.finish_decoding_with_header(method, class, transaction_id) {\n\n Err(error) => Ok(Err(BrokenMessage {\n\n method,\n\n class,\n\n transaction_id,\n\n error,\n\n })),\n\n Ok(message) => Ok(Ok(message)),\n\n }\n\n }\n", "file_path": "src/message.rs", "rank": 31, "score": 31.823126125854756 }, { "content": " pub const CODEPOINT: u16 = 0x0018;\n\n\n\n /// Makes a new `EvenPort` instance.\n\n pub fn new(is_requested: bool) -> Self {\n\n EvenPort(is_requested)\n\n }\n\n\n\n /// Returns whether the client requested that the port in the relayed transport address be even.\n\n pub fn is_requested(&self) -> bool {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for EvenPort {\n\n type Decoder = EvenPortDecoder;\n\n type Encoder = EvenPortEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n", "file_path": "src/rfc5766/attributes.rs", "rank": 33, "score": 31.371281330475398 }, { "content": " pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(DataDecoder, Data, |item| Ok(Data(item)));\n\n\n\n/// [`Data`] encoder.\n\n///\n\n/// [`Data`]: ./struct.Data.html\n\n#[derive(Debug, Default)]\n\npub struct DataEncoder(BytesEncoder);\n\nimpl DataEncoder {\n\n /// Makes a new `DataEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(DataEncoder, Data, |item: Self::Item| item.0);\n\n\n\n/// `XOR-RELAY-ADDRESS` attribute.\n", "file_path": "src/rfc5766/attributes.rs", "rank": 34, "score": 31.25940622296676 }, { "content": " track!(self.0.encode(buf, eos))\n\n }\n\n\n\n fn start_encoding(&mut self, item: Self::Item) -> Result<()> {\n\n track!(self.0.start_encoding($map_from(item)))\n\n }\n\n\n\n fn requiring_bytes(&self) -> ByteCount {\n\n self.0.requiring_bytes()\n\n }\n\n\n\n fn is_idle(&self) -> bool {\n\n self.0.is_idle()\n\n }\n\n }\n\n impl SizedEncode for $encoder {\n\n fn exact_requiring_bytes(&self) -> u64 {\n\n self.0.exact_requiring_bytes()\n\n }\n\n }\n", "file_path": "src/rfc5389/attributes.rs", "rank": 35, "score": 30.980637777739517 }, { "content": "impl UnknownAttributesDecoder {\n\n /// Makes a new `UnknownAttributesDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(UnknownAttributesDecoder, UnknownAttributes, |vs: Vec<\n\n u16,\n\n>| Ok(\n\n UnknownAttributes {\n\n unknowns: vs.into_iter().map(AttributeType::new).collect()\n\n }\n\n));\n\n\n\n/// [`UnknownAttributes`] encoder.\n\n///\n\n/// [`UnknownAttributes`]: ./struct.UnknownAttributes.html\n\n#[derive(Debug, Default)]\n\npub struct UnknownAttributesEncoder(PreEncode<Repeat<U16beEncoder, vec::IntoIter<u16>>>);\n\nimpl UnknownAttributesEncoder {\n", "file_path": "src/rfc5389/attributes.rs", "rank": 36, "score": 30.476935404895258 }, { "content": "\n\n/// [`EvenPort`] decoder.\n\n///\n\n/// [`EvenPort`]: ./struct.EvenPort.html\n\n#[derive(Debug, Default)]\n\npub struct EvenPortDecoder(U8Decoder);\n\nimpl EvenPortDecoder {\n\n /// Makes a new `EvenPortDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(EvenPortDecoder, EvenPort, |item| Ok(EvenPort(\n\n (item & 0b1000_0000) != 0\n\n)));\n\n\n\n/// [`EvenPort`] encoder.\n\n///\n\n/// [`EvenPort`]: ./struct.EvenPort.html\n\n#[derive(Debug, Default)]\n", "file_path": "src/rfc5766/attributes.rs", "rank": 37, "score": 30.44295520468498 }, { "content": "}\n\nimpl Decode for MyAttributeDecoder {\n\n type Item = MyAttribute;\n\n\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n\n match self.index {\n\n 1 => track!(self.rfc5389.decode(buf, eos)),\n\n 2 => track!(self.rfc5766.decode(buf, eos)),\n\n _ => track_panic!(ErrorKind::InconsistentState),\n\n }\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n let item = match self.index {\n\n 1 => track!(self.rfc5389.finish_decoding()).map(MyAttribute::Rfc5389)?,\n\n 2 => track!(self.rfc5766.finish_decoding()).map(MyAttribute::Rfc5766)?,\n\n _ => track_panic!(ErrorKind::InconsistentState),\n\n };\n\n self.index = 0;\n\n Ok(item)\n", "file_path": "src/attribute.rs", "rank": 38, "score": 30.034680741184296 }, { "content": "\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`MappedAddress`] decoder.\n\n///\n\n/// [`MappedAddress`]: ./struct.MappedAddress.html\n\n#[derive(Debug, Default)]\n\npub struct MappedAddressDecoder(SocketAddrDecoder);\n\nimpl MappedAddressDecoder {\n\n /// Makes a new `MappedAddressDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(MappedAddressDecoder, MappedAddress, |item| Ok(\n\n MappedAddress(item)\n\n));\n", "file_path": "src/rfc5389/attributes.rs", "rank": 39, "score": 29.8986515083306 }, { "content": " let mut offset = 0;\n\n if !self.value_len.is_idle() {\n\n bytecodec_try_decode!(self.get_type, offset, buf, eos);\n\n bytecodec_try_decode!(self.value_len, offset, buf, eos);\n\n\n\n let attr_type = AttributeType(track!(self.get_type.finish_decoding())?);\n\n let value_len = *self.value_len.peek().expect(\"never fails\");\n\n\n\n self.is_known = track!(self.known_value.inner_mut().try_start_decoding(attr_type))?;\n\n if self.is_known {\n\n track!(self.known_value.set_expected_bytes(u64::from(value_len)))?;\n\n } else {\n\n track!(self.unknown_value.inner_mut().try_start_decoding(attr_type))?; // must be `true`\n\n track!(self.unknown_value.set_expected_bytes(u64::from(value_len)))?;\n\n }\n\n self.padding.set_bytes(Padding::new(value_len as usize));\n\n }\n\n if self.is_known {\n\n bytecodec_try_decode!(self.known_value, offset, buf, eos);\n\n } else {\n", "file_path": "src/attribute.rs", "rank": 40, "score": 29.823351861576576 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"LosslessAttributeDecoder {{ .. }}\")\n\n }\n\n}\n\nimpl<T: Attribute> Default for LosslessAttributeDecoder<T> {\n\n fn default() -> Self {\n\n LosslessAttributeDecoder {\n\n get_type: Default::default(),\n\n value_len: Default::default(),\n\n is_known: false,\n\n known_value: Default::default(),\n\n unknown_value: Default::default(),\n\n padding: Default::default(),\n\n }\n\n }\n\n}\n\nimpl<T: Attribute> Decode for LosslessAttributeDecoder<T> {\n\n type Item = LosslessAttribute<T>;\n\n\n\n fn decode(&mut self, buf: &[u8], eos: Eos) -> Result<usize> {\n", "file_path": "src/attribute.rs", "rank": 41, "score": 29.71655295863578 }, { "content": "impl<T: Attribute> fmt::Debug for LosslessAttributeEncoder<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"LosslessAttributeEncoder {{ .. }}\")\n\n }\n\n}\n\nimpl<T: Attribute> Default for LosslessAttributeEncoder<T> {\n\n fn default() -> Self {\n\n LosslessAttributeEncoder {\n\n get_type: Default::default(),\n\n value_len: Default::default(),\n\n known_value: Default::default(),\n\n unknown_value: Default::default(),\n\n padding: Default::default(),\n\n }\n\n }\n\n}\n\nimpl<T: Attribute> Encode for LosslessAttributeEncoder<T> {\n\n type Item = LosslessAttribute<T>;\n\n\n\n fn encode(&mut self, buf: &mut [u8], eos: Eos) -> Result<usize> {\n", "file_path": "src/attribute.rs", "rank": 42, "score": 29.372254850046357 }, { "content": "impl LifetimeDecoder {\n\n /// Makes a new `LifetimeDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(LifetimeDecoder, Lifetime, |item| Ok(Lifetime(\n\n Duration::from_secs(u64::from(item))\n\n)));\n\n\n\n/// [`Lifetime`] encoder.\n\n///\n\n/// [`Lifetime`]: ./struct.Lifetime.html\n\n#[derive(Debug, Default)]\n\npub struct LifetimeEncoder(U32beEncoder);\n\nimpl LifetimeEncoder {\n\n /// Makes a new `LifetimeEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n", "file_path": "src/rfc5766/attributes.rs", "rank": 43, "score": 29.144925250499146 }, { "content": "\n\n fn after_decode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n self.0 = socket_addr_xor(self.0, message.transaction_id());\n\n Ok(())\n\n }\n\n}\n\n\n\n/// [`XorPeerAddress`] decoder.\n\n///\n\n/// [`XorPeerAddress`]: ./struct.XorPeerAddress.html\n\n#[derive(Debug, Default)]\n\npub struct XorPeerAddressDecoder(SocketAddrDecoder);\n\nimpl XorPeerAddressDecoder {\n\n /// Makes a new `XorPeerAddressDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(XorPeerAddressDecoder, XorPeerAddress, |item| Ok(\n\n XorPeerAddress(item)\n", "file_path": "src/rfc5766/attributes.rs", "rank": 44, "score": 28.626216058489682 }, { "content": " /// Makes a new `XorMappedAddressDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(XorMappedAddress2Decoder, XorMappedAddress2, |item| Ok(\n\n XorMappedAddress2(item)\n\n));\n\n\n\n/// [`XorMappedAddress`] encoder.\n\n///\n\n/// [`XorMappedAddress`]: ./struct.XorMappedAddress.html\n\n#[derive(Debug, Default)]\n\npub struct XorMappedAddress2Encoder(SocketAddrEncoder);\n\nimpl XorMappedAddress2Encoder {\n\n /// Makes a new `XorMappedAddressEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(\n\n XorMappedAddress2Encoder,\n\n XorMappedAddress2,\n\n |item: Self::Item| item.0\n\n);\n", "file_path": "src/rfc5389/attributes.rs", "rank": 45, "score": 28.562224818946095 }, { "content": " $decoder::None => true,\n\n }\n\n }\n\n }\n\n impl ::bytecodec::TryTaggedDecode for $decoder {\n\n type Tag = $crate::AttributeType;\n\n\n\n fn try_start_decoding(&mut self, tag: Self::Tag) -> ::bytecodec::Result<bool> {\n\n *self = match tag.as_u16() {\n\n $($variant::CODEPOINT => $decoder::$variant(<$variant as $crate::Attribute>::Decoder::default())),*,\n\n _ => return Ok(false),\n\n };\n\n Ok(true)\n\n }\n\n }\n\n\n\n /// Attribute set encoder.\n\n #[allow(missing_docs)]\n\n #[derive(Debug)]\n\n pub enum $encoder {\n", "file_path": "src/macros.rs", "rank": 46, "score": 28.517708027853423 }, { "content": " /// Returns the address of this instance.\n\n pub fn address(&self) -> SocketAddr {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for XorMappedAddress {\n\n type Decoder = XorMappedAddressDecoder;\n\n type Encoder = XorMappedAddressEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n\n\n fn before_encode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n self.0 = socket_addr_xor(self.0, message.transaction_id());\n\n Ok(())\n\n }\n\n\n\n fn after_decode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n self.0 = socket_addr_xor(self.0, message.transaction_id());\n", "file_path": "src/rfc5389/attributes.rs", "rank": 47, "score": 28.422220881185076 }, { "content": "impl Attribute for XorRelayAddress {\n\n type Decoder = XorRelayAddressDecoder;\n\n type Encoder = XorRelayAddressEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n\n\n fn before_encode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n self.0 = socket_addr_xor(self.0, message.transaction_id());\n\n Ok(())\n\n }\n\n\n\n fn after_decode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n self.0 = socket_addr_xor(self.0, message.transaction_id());\n\n Ok(())\n\n }\n\n}\n\n\n\n/// [`XorRelayAddress`] decoder.\n", "file_path": "src/rfc5766/attributes.rs", "rank": 48, "score": 27.916241318820695 }, { "content": "///\n\n/// [`XorRelayAddress`]: ./struct.XorRelayAddress.html\n\n#[derive(Debug, Default)]\n\npub struct XorRelayAddressDecoder(SocketAddrDecoder);\n\nimpl XorRelayAddressDecoder {\n\n /// Makes a new `XorRelayAddressDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(XorRelayAddressDecoder, XorRelayAddress, |item| Ok(\n\n XorRelayAddress(item)\n\n));\n\n\n\n/// [`XorRelayAddress`] encoder.\n\n///\n\n/// [`XorRelayAddress`]: ./struct.XorRelayAddress.html\n\n#[derive(Debug, Default)]\n\npub struct XorRelayAddressEncoder(SocketAddrEncoder);\n\nimpl XorRelayAddressEncoder {\n", "file_path": "src/rfc5766/attributes.rs", "rank": 49, "score": 27.845416207152855 }, { "content": "pub struct UseCandidateDecoder(NullDecoder);\n\n\n\nimpl UseCandidateDecoder {\n\n /// Makes a new `UseCandidateDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(UseCandidateDecoder, UseCandidate, |_| Ok(UseCandidate));\n\n\n\n/// [`UseCandidate`] encoder.\n\n///\n\n/// [`UseCandidate`]: ./struct.UseCandidate.html\n\n#[derive(Debug, Default)]\n\npub struct UseCandidateEncoder(NullEncoder);\n\n\n\nimpl UseCandidateEncoder {\n\n /// Makes a new `UseCandidateEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n", "file_path": "src/rfc5245/attributes.rs", "rank": 50, "score": 27.844172530560577 }, { "content": "\n\n fn requiring_bytes(&self) -> ByteCount {\n\n self.value.requiring_bytes()\n\n }\n\n\n\n fn is_idle(&self) -> bool {\n\n self.value.is_idle()\n\n }\n\n}\n\nimpl TryTaggedDecode for RawAttributeDecoder {\n\n type Tag = AttributeType;\n\n\n\n fn try_start_decoding(&mut self, attr_type: Self::Tag) -> Result<bool> {\n\n self.attr_type = Some(attr_type);\n\n Ok(true)\n\n }\n\n}\n\n\n\n/// [`RawAttribute`] encoder.\n\n///\n", "file_path": "src/attribute.rs", "rank": 51, "score": 27.71191637427691 }, { "content": " Ok(())\n\n }\n\n}\n\n\n\n/// [`XorMappedAddress`] decoder.\n\n///\n\n/// [`XorMappedAddress`]: ./struct.XorMappedAddress.html\n\n#[derive(Debug, Default)]\n\npub struct XorMappedAddressDecoder(SocketAddrDecoder);\n\nimpl XorMappedAddressDecoder {\n\n /// Makes a new `XorMappedAddressDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(XorMappedAddressDecoder, XorMappedAddress, |item| Ok(\n\n XorMappedAddress(item)\n\n));\n\n\n\n/// [`XorMappedAddress`] encoder.\n", "file_path": "src/rfc5389/attributes.rs", "rank": 52, "score": 27.708198674437178 }, { "content": " XorPeerAddress(addr)\n\n }\n\n\n\n /// Returns the address specified by the attribute.\n\n pub fn address(&self) -> SocketAddr {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for XorPeerAddress {\n\n type Decoder = XorPeerAddressDecoder;\n\n type Encoder = XorPeerAddressEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n\n\n fn before_encode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n self.0 = socket_addr_xor(self.0, message.transaction_id());\n\n Ok(())\n\n }\n", "file_path": "src/rfc5766/attributes.rs", "rank": 53, "score": 27.667552885677335 }, { "content": " pub fn data(&self) -> &[u8] {\n\n &self.0\n\n }\n\n}\n\nimpl Attribute for Data {\n\n type Decoder = DataDecoder;\n\n type Encoder = DataEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`Data`] decoder.\n\n///\n\n/// [`Data`]: ./struct.Data.html\n\n#[derive(Debug, Default)]\n\npub struct DataDecoder(RemainingBytesDecoder);\n\nimpl DataDecoder {\n\n /// Makes a new `DataDecoder` instance.\n", "file_path": "src/rfc5766/attributes.rs", "rank": 54, "score": 27.581515294832972 }, { "content": "//! Attributes that are defined in [RFC 5766 -- 14. New STUN Attributes].\n\n//!\n\n//! [RFC 5766 -- 14. New STUN Attributes]: https://tools.ietf.org/html/rfc5766#section-14\n\n\n\nuse crate::attribute::{Attribute, AttributeType};\n\nuse crate::message::Message;\n\nuse crate::net::{socket_addr_xor, SocketAddrDecoder, SocketAddrEncoder};\n\nuse bytecodec::bytes::{BytesEncoder, RemainingBytesDecoder};\n\nuse bytecodec::fixnum::{\n\n U32beDecoder, U32beEncoder, U64beDecoder, U64beEncoder, U8Decoder, U8Encoder,\n\n};\n\nuse bytecodec::null::{NullDecoder, NullEncoder};\n\nuse bytecodec::{ByteCount, Decode, Encode, Eos, ErrorKind, Result, SizedEncode, TryTaggedDecode};\n\nuse std::net::SocketAddr;\n\nuse std::time::Duration;\n\n\n\nmacro_rules! impl_decode {\n\n ($decoder:ty, $item:ident, $and_then:expr) => {\n\n impl Decode for $decoder {\n\n type Item = $item;\n", "file_path": "src/rfc5766/attributes.rs", "rank": 55, "score": 27.52896729283711 }, { "content": "\n\n /// Returns the unknown attribute types of this instance.\n\n pub fn unknowns(&self) -> &[AttributeType] {\n\n &self.unknowns\n\n }\n\n}\n\nimpl Attribute for UnknownAttributes {\n\n type Decoder = UnknownAttributesDecoder;\n\n type Encoder = UnknownAttributesEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`UnknownAttributes`] decoder.\n\n///\n\n/// [`UnknownAttributes`]: ./struct.UnknownAttributes.html\n\n#[derive(Debug, Default)]\n\npub struct UnknownAttributesDecoder(Collect<U16beDecoder, Vec<u16>>);\n", "file_path": "src/rfc5389/attributes.rs", "rank": 56, "score": 27.376698871166777 }, { "content": " bytecodec_try_decode!(self.unknown_value, offset, buf, eos);\n\n }\n\n bytecodec_try_decode!(self.padding, offset, buf, eos);\n\n Ok(offset)\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n let _ = track!(self.value_len.finish_decoding())?;\n\n let padding = track!(self.padding.finish_decoding())?;\n\n if self.is_known {\n\n let value = track!(self.known_value.finish_decoding())?;\n\n Ok(LosslessAttribute::Known {\n\n inner: value,\n\n padding: Some(padding),\n\n })\n\n } else {\n\n let value = track!(self.unknown_value.finish_decoding())?;\n\n Ok(LosslessAttribute::Unknown {\n\n inner: value,\n\n padding: Some(padding),\n", "file_path": "src/attribute.rs", "rank": 57, "score": 27.216244413332564 }, { "content": " let mut bytes = track!(MessageEncoder::default().encode_into_bytes(message.clone()))?;\n\n let final_len = bytes.len() as u16 - 20 + 8; // Adds `Fingerprint` attribute length\n\n BigEndian::write_u16(&mut bytes[2..4], final_len);\n\n let crc32 = crc32::checksum_ieee(&bytes[..]) ^ 0x5354_554e;\n\n Ok(Fingerprint { crc32 })\n\n }\n\n\n\n /// Returns the crc32 value of this instance.\n\n pub fn crc32(&self) -> u32 {\n\n self.crc32\n\n }\n\n}\n\nimpl Attribute for Fingerprint {\n\n type Decoder = FingerprintDecoder;\n\n type Encoder = FingerprintEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n\n", "file_path": "src/rfc5389/attributes.rs", "rank": 58, "score": 27.160252489870608 }, { "content": " && self.padding.is_idle()\n\n }\n\n}\n\nimpl<T: Attribute> SizedEncode for LosslessAttributeEncoder<T> {\n\n fn exact_requiring_bytes(&self) -> u64 {\n\n self.get_type.exact_requiring_bytes()\n\n + self.value_len.exact_requiring_bytes()\n\n + self.known_value.exact_requiring_bytes()\n\n + self.unknown_value.exact_requiring_bytes()\n\n + self.padding.exact_requiring_bytes()\n\n }\n\n}\n\n\n\n#[derive(Default, Clone)]\n\npub struct Padding {\n\n buf: [u8; 3],\n\n len: usize,\n\n}\n\nimpl Padding {\n\n fn new(value_len: usize) -> Self {\n", "file_path": "src/attribute.rs", "rank": 59, "score": 27.119601121676617 }, { "content": " AttributeType::new(Self::CODEPOINT)\n\n }\n\n\n\n fn before_encode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n self.0 = socket_addr_xor(self.0, message.transaction_id());\n\n Ok(())\n\n }\n\n\n\n fn after_decode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n self.0 = socket_addr_xor(self.0, message.transaction_id());\n\n Ok(())\n\n }\n\n}\n\n\n\n/// [`XorMappedAddress2`] decoder.\n\n///\n\n/// [`XorMappedAddress2`]: ./struct.XorMappedAddress.html\n\n#[derive(Debug, Default)]\n\npub struct XorMappedAddress2Decoder(SocketAddrDecoder);\n\nimpl XorMappedAddress2Decoder {\n", "file_path": "src/rfc5389/attributes.rs", "rank": 61, "score": 26.89964406601652 }, { "content": " }\n\n}\n\nimpl Attribute for UseCandidate {\n\n type Decoder = UseCandidateDecoder;\n\n type Encoder = UseCandidateEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\nimpl Default for UseCandidate {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\n/// [`UseCandidate`] decoder.\n\n///\n\n/// [`UseCandidate`]: ./struct.UseCandidate.html\n\n#[derive(Debug, Default)]\n", "file_path": "src/rfc5245/attributes.rs", "rank": 62, "score": 26.513455158573812 }, { "content": " /// Makes a new `XorRelayAddressEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(\n\n XorRelayAddressEncoder,\n\n XorRelayAddress,\n\n |item: Self::Item| item.0\n\n);\n\n\n\n/// `EVEN-PORT` attribute.\n\n///\n\n/// See [RFC 5766 -- 14.6. EVEN-PORT] about this attribute.\n\n///\n\n/// [RFC 5766 -- 14.6. EVEN-PORT]: https://tools.ietf.org/html/rfc5766#section-14.6\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct EvenPort(bool);\n\nimpl EvenPort {\n\n /// The codepoint of the type of the attribute.\n", "file_path": "src/rfc5766/attributes.rs", "rank": 63, "score": 26.22742595365073 }, { "content": " pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(AlternateServerDecoder, AlternateServer, |item| Ok(\n\n AlternateServer(item)\n\n));\n\n\n\n/// [`AlternateServer`] encoder.\n\n///\n\n/// [`AlternateServer`]: ./struct.AlternateServer.html\n\n#[derive(Debug, Default)]\n\npub struct AlternateServerEncoder(SocketAddrEncoder);\n\nimpl AlternateServerEncoder {\n\n /// Makes a new `AlternateServerEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(\n", "file_path": "src/rfc5389/attributes.rs", "rank": 64, "score": 25.930354316093517 }, { "content": "\n\nimpl IceControllingDecoder {\n\n /// Makes a new `IceControllingDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(IceControllingDecoder, IceControlling, |prio| Ok(\n\n IceControlling(prio)\n\n));\n\n\n\n/// [`IceControlling`] encoder.\n\n///\n\n/// [`IceControlling`]: ./struct.IceControlling.html\n\n#[derive(Debug, Default)]\n\npub struct IceControllingEncoder(U64beEncoder);\n\n\n\nimpl IceControllingEncoder {\n\n /// Makes a new `IceControllingEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(IceControllingEncoder, IceControlling, |item: Self::Item| {\n\n item.0\n\n});\n", "file_path": "src/rfc5245/attributes.rs", "rank": 65, "score": 25.805262492548373 }, { "content": " /// Returns the alternate address.\n\n pub fn prio(&self) -> u64 {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for IceControlled {\n\n type Decoder = IceControlledDecoder;\n\n type Encoder = IceControlledEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`IceControlled`] decoder.\n\n///\n\n/// [`IceControlled`]: ./struct.IceControlled.html\n\n#[derive(Debug, Default)]\n\npub struct IceControlledDecoder(U64beDecoder);\n\n\n", "file_path": "src/rfc5245/attributes.rs", "rank": 66, "score": 25.785679810973054 }, { "content": "\n\n /// Returns the alternate address.\n\n pub fn prio(&self) -> u64 {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for IceControlling {\n\n type Decoder = IceControllingDecoder;\n\n type Encoder = IceControllingEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`IceControlling`] decoder.\n\n///\n\n/// [`IceControlling`]: ./struct.IceControlling.html\n\n#[derive(Debug, Default)]\n\npub struct IceControllingDecoder(U64beDecoder);\n", "file_path": "src/rfc5245/attributes.rs", "rank": 67, "score": 25.785679810973058 }, { "content": "\n\n/// [`RequestedTransport`] decoder.\n\n///\n\n/// [`RequestedTransport`]: ./struct.RequestedTransport.html\n\n#[derive(Debug, Default)]\n\npub struct RequestedTransportDecoder(U32beDecoder);\n\nimpl RequestedTransportDecoder {\n\n /// Makes a new `RequestedTransportDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(RequestedTransportDecoder, RequestedTransport, |item| Ok(\n\n RequestedTransport((item >> 24) as u8)\n\n));\n\n\n\n/// [`RequestedTransport`] encoder.\n\n///\n\n/// [`RequestedTransport`]: ./struct.RequestedTransport.html\n\n#[derive(Debug, Default)]\n", "file_path": "src/rfc5766/attributes.rs", "rank": 68, "score": 25.696372282230822 }, { "content": "impl_decode!(ReservationTokenDecoder, ReservationToken, |item| Ok(\n\n ReservationToken(item)\n\n));\n\n\n\n/// [`ReservationToken`] encoder.\n\n///\n\n/// [`ReservationToken`]: ./struct.ReservationToken.html\n\n#[derive(Debug, Default)]\n\npub struct ReservationTokenEncoder(U64beEncoder);\n\nimpl ReservationTokenEncoder {\n\n /// Makes a new `ReservationTokenEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(\n\n ReservationTokenEncoder,\n\n ReservationToken,\n\n |item: Self::Item| item.0\n\n);\n", "file_path": "src/rfc5766/attributes.rs", "rank": 69, "score": 25.505972800015606 }, { "content": " pub const CODEPOINT: u16 = 0x0019;\n\n\n\n /// Makes a new `RequestedTransport` instance.\n\n pub fn new(protocol: u8) -> Self {\n\n RequestedTransport(protocol)\n\n }\n\n\n\n /// Returns the transport protocol requested by the client.\n\n pub fn protocol(&self) -> u8 {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for RequestedTransport {\n\n type Decoder = RequestedTransportDecoder;\n\n type Encoder = RequestedTransportEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n", "file_path": "src/rfc5766/attributes.rs", "rank": 70, "score": 25.42513726188702 }, { "content": " }\n\n));\n\n\n\n/// [`MessageIntegrity`] encoder.\n\n///\n\n/// [`MessageIntegrity`]: ./struct.MessageIntegrity.html\n\n#[derive(Debug, Default)]\n\npub struct MessageIntegrityEncoder(BytesEncoder<[u8; 20]>);\n\nimpl MessageIntegrityEncoder {\n\n /// Makes a new `MessageIntegrityEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(\n\n MessageIntegrityEncoder,\n\n MessageIntegrity,\n\n |item: Self::Item| item.hmac_sha1\n\n);\n\n\n", "file_path": "src/rfc5389/attributes.rs", "rank": 71, "score": 25.30224726367021 }, { "content": "///\n\n/// [`ChannelNumber`]: ./struct.ChannelNumber.html\n\n#[derive(Debug, Default)]\n\npub struct ChannelNumberDecoder(U32beDecoder);\n\nimpl ChannelNumberDecoder {\n\n /// Makes a new `ChannelNumberDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(ChannelNumberDecoder, ChannelNumber, |item| track!(\n\n ChannelNumber::new((item >> 16) as u16)\n\n));\n\n\n\n/// [`ChannelNumber`] encoder.\n\n///\n\n/// [`ChannelNumber`]: ./struct.ChannelNumber.html\n\n#[derive(Debug, Default)]\n\npub struct ChannelNumberEncoder(U32beEncoder);\n\nimpl ChannelNumberEncoder {\n", "file_path": "src/rfc5766/attributes.rs", "rank": 72, "score": 25.298877103005868 }, { "content": " Err(e) => {\n\n self.last_error = Some(e);\n\n }\n\n Ok(size) => return Ok(size),\n\n }\n\n }\n\n\n\n // Skips remaining bytes if an error occurred\n\n self.is_eos = eos.is_reached();\n\n Ok(buf.len())\n\n }\n\n\n\n fn finish_decoding(&mut self) -> Result<Self::Item> {\n\n self.is_eos = false;\n\n if let Some(e) = self.last_error.take() {\n\n return Err(track!(e));\n\n }\n\n track!(self.inner.finish_decoding())\n\n }\n\n\n", "file_path": "src/message.rs", "rank": 73, "score": 25.271337061195858 }, { "content": "\n\n fn finish_decoding(&mut self) -> ::bytecodec::Result<Self::Item> {\n\n let item = match self {\n\n $($decoder::$variant(a) => track!(a.finish_decoding(), \"attr={}\", stringify!($variant))?.into()),*,\n\n $decoder::None => track_panic!(::bytecodec::ErrorKind::IncompleteDecoding),\n\n };\n\n *self = $decoder::None;\n\n Ok(item)\n\n }\n\n\n\n fn requiring_bytes(&self) -> ::bytecodec::ByteCount {\n\n match self {\n\n $($decoder::$variant(a) => a.requiring_bytes()),*,\n\n $decoder::None => ::bytecodec::ByteCount::Finite(0),\n\n }\n\n }\n\n\n\n fn is_idle(&self) -> bool {\n\n match self {\n\n $($decoder::$variant(a) => a.is_idle()),*,\n", "file_path": "src/macros.rs", "rank": 74, "score": 25.099053304120716 }, { "content": " $encoder::None => Ok(0),\n\n }\n\n }\n\n\n\n fn start_encoding(&mut self, item: Self::Item) -> ::bytecodec::Result<()> {\n\n track_assert!(self.is_idle(), ::bytecodec::ErrorKind::EncoderFull; item);\n\n *self = match item {\n\n $($attr::$variant(a) => {\n\n let mut encoder = <$variant as $crate::Attribute>::Encoder::default();\n\n track!(encoder.start_encoding(a), \"attr={}\", stringify!($variant))?;\n\n $encoder::$variant(encoder)\n\n }),*\n\n };\n\n Ok(())\n\n }\n\n\n\n fn requiring_bytes(&self) -> ::bytecodec::ByteCount {\n\n use ::bytecodec::SizedEncode;\n\n ::bytecodec::ByteCount::Finite(self.exact_requiring_bytes())\n\n }\n", "file_path": "src/macros.rs", "rank": 75, "score": 24.825492874575254 }, { "content": "pub struct EvenPortEncoder(U8Encoder);\n\nimpl EvenPortEncoder {\n\n /// Makes a new `EvenPortEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(EvenPortEncoder, EvenPort, |item: Self::Item| u8::from(\n\n item.0\n\n) << 7);\n\n\n\n/// `REQUESTED-TRANSPORT` attribute.\n\n///\n\n/// See [RFC 5766 -- 14.7. REQUESTED-TRANSPORT] about this attribute.\n\n///\n\n/// [RFC 5766 -- 14.7. REQUESTED-TRANSPORT]: https://tools.ietf.org/html/rfc5766#section-14.7\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct RequestedTransport(u8);\n\nimpl RequestedTransport {\n\n /// The codepoint of the type of the attribute.\n", "file_path": "src/rfc5766/attributes.rs", "rank": 76, "score": 24.780590653895853 }, { "content": "impl_encode!(SoftwareEncoder, Software, |item: Self::Item| item\n\n .description);\n\n\n\n/// `UNKNOWN-ATTRIBUTES` attribute.\n\n///\n\n/// See [RFC 5389 -- 15.9. UNKNOWN-ATTRIBUTES] about this attribute.\n\n///\n\n/// [RFC 5389 -- 15.9. UNKNOWN-ATTRIBUTES]: https://tools.ietf.org/html/rfc5389#section-15.9\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct UnknownAttributes {\n\n unknowns: Vec<AttributeType>,\n\n}\n\nimpl UnknownAttributes {\n\n /// The codepoint of the type of the attribute.\n\n pub const CODEPOINT: u16 = 0x000A;\n\n\n\n /// Makes a new `UnknownAttributes` instance.\n\n pub fn new(unknowns: Vec<AttributeType>) -> Self {\n\n UnknownAttributes { unknowns }\n\n }\n", "file_path": "src/rfc5389/attributes.rs", "rank": 77, "score": 24.76448141042012 }, { "content": " Self::default()\n\n }\n\n}\n\nimpl_decode!(RealmDecoder, Realm, Realm::new);\n\n\n\n/// [`Realm`] encoder.\n\n///\n\n/// [`Realm`]: ./struct.Realm.html\n\n#[derive(Debug, Default)]\n\npub struct RealmEncoder(Utf8Encoder);\n\nimpl RealmEncoder {\n\n /// Makes a new `RealmEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(RealmEncoder, Realm, |item: Self::Item| item.text);\n\n\n\n/// `SOFTWARE` attribute.\n\n///\n", "file_path": "src/rfc5389/attributes.rs", "rank": 78, "score": 24.65224599529236 }, { "content": " self.preceding_message_bytes = track!(Self::message_into_bytes(message.clone()))?;\n\n Ok(())\n\n }\n\n}\n\n\n\n/// [`MessageIntegrity`] decoder.\n\n///\n\n/// [`MessageIntegrity`]: ./struct.MessageIntegrity.html\n\n#[derive(Debug, Default)]\n\npub struct MessageIntegrityDecoder(CopyableBytesDecoder<[u8; 20]>);\n\nimpl MessageIntegrityDecoder {\n\n /// Makes a new `MessageIntegrityDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(MessageIntegrityDecoder, MessageIntegrity, |hmac_sha1| Ok(\n\n MessageIntegrity {\n\n hmac_sha1,\n\n preceding_message_bytes: Vec::new() // dummy\n", "file_path": "src/rfc5389/attributes.rs", "rank": 79, "score": 24.412451331313683 }, { "content": " pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(DontFragmentDecoder, DontFragment, |()| Ok(DontFragment));\n\n\n\n/// [`DontFragment`] encoder.\n\n///\n\n/// [`DontFragment`]: ./struct.DontFragment.html\n\n#[derive(Debug, Default)]\n\npub struct DontFragmentEncoder(NullEncoder);\n\nimpl DontFragmentEncoder {\n\n /// Makes a new `DontFragmentEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(DontFragmentEncoder, DontFragment, |_: Self::Item| ());\n\n\n\n/// `RESERVATION-TOKEN` attribute.\n", "file_path": "src/rfc5766/attributes.rs", "rank": 80, "score": 24.41150048214805 }, { "content": "}));\n\n\n\n/// [`Fingerprint`] encoder.\n\n///\n\n/// [`Fingerprint`]: ./struct.Fingerprint.html\n\n#[derive(Debug, Default)]\n\npub struct FingerprintEncoder(U32beEncoder);\n\nimpl FingerprintEncoder {\n\n /// Makes a new `FingerprintEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(FingerprintEncoder, Fingerprint, |item: Self::Item| item\n\n .crc32);\n\n\n\n/// `MAPPED-ADDRESS` attribute.\n\n///\n\n/// See [RFC 5389 -- 15.1. MAPPED-ADDRESS] about this attribute.\n\n///\n", "file_path": "src/rfc5389/attributes.rs", "rank": 81, "score": 24.293984899500668 }, { "content": " &self.text\n\n }\n\n}\n\nimpl Attribute for Realm {\n\n type Decoder = RealmDecoder;\n\n type Encoder = RealmEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`Realm`] decoder.\n\n///\n\n/// [`Realm`]: ./struct.Realm.html\n\n#[derive(Debug, Default)]\n\npub struct RealmDecoder(Utf8Decoder);\n\nimpl RealmDecoder {\n\n /// Makes a new `RealmDecoder` instance.\n\n pub fn new() -> Self {\n", "file_path": "src/rfc5389/attributes.rs", "rank": 82, "score": 24.158956318975264 }, { "content": " /// Makes a new `UnknownAttributesEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(\n\n UnknownAttributesEncoder,\n\n UnknownAttributes,\n\n |item: Self::Item| item\n\n .unknowns\n\n .into_iter()\n\n .map(|ty| ty.as_u16())\n\n .collect::<Vec<_>>()\n\n .into_iter()\n\n);\n\n\n\n/// `USERNAME` attribute.\n\n///\n\n/// See [RFC 5389 -- 15.3. USERNAME] about this attribute.\n\n///\n", "file_path": "src/rfc5389/attributes.rs", "rank": 83, "score": 24.03756900493748 }, { "content": " pub fn new(prio: u32) -> Self {\n\n Priority(prio)\n\n }\n\n\n\n /// Returns the alternate address.\n\n pub fn prio(&self) -> u32 {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for Priority {\n\n type Decoder = PriorityDecoder;\n\n type Encoder = PriorityEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`Priority`] decoder.\n\n///\n", "file_path": "src/rfc5245/attributes.rs", "rank": 84, "score": 24.019689969164713 }, { "content": "));\n\n\n\n/// [`XorPeerAddress`] encoder.\n\n///\n\n/// [`XorPeerAddress`]: ./struct.XorPeerAddress.html\n\n#[derive(Debug, Default)]\n\npub struct XorPeerAddressEncoder(SocketAddrEncoder);\n\nimpl XorPeerAddressEncoder {\n\n /// Makes a new `XorPeerAddressEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(XorPeerAddressEncoder, XorPeerAddress, |item: Self::Item| {\n\n item.0\n\n});\n\n\n\n/// `DATA` attribute.\n\n///\n\n/// See [RFC 5766 -- 14.4. DATA] about this attribute.\n", "file_path": "src/rfc5766/attributes.rs", "rank": 85, "score": 23.900717108616384 }, { "content": "/// [`Priority`]: ./struct.Priority.html\n\n#[derive(Debug, Default)]\n\npub struct PriorityDecoder(U32beDecoder);\n\n\n\nimpl PriorityDecoder {\n\n /// Makes a new `PriorityDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(PriorityDecoder, Priority, |prio| Ok(Priority(prio)));\n\n\n\n/// [`Priority`] encoder.\n\n///\n\n/// [`Priority`]: ./struct.Priority.html\n\n#[derive(Debug, Default)]\n\npub struct PriorityEncoder(U32beEncoder);\n\n\n\nimpl PriorityEncoder {\n\n /// Makes a new `PriorityEncoder` instance.\n", "file_path": "src/rfc5245/attributes.rs", "rank": 86, "score": 23.852150024622865 }, { "content": " pub fn address(&self) -> SocketAddr {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for AlternateServer {\n\n type Decoder = AlternateServerDecoder;\n\n type Encoder = AlternateServerEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`AlternateServer`] decoder.\n\n///\n\n/// [`AlternateServer`]: ./struct.AlternateServer.html\n\n#[derive(Debug, Default)]\n\npub struct AlternateServerDecoder(SocketAddrDecoder);\n\nimpl AlternateServerDecoder {\n\n /// Makes a new `AlternateServerDecoder` instance.\n", "file_path": "src/rfc5389/attributes.rs", "rank": 87, "score": 23.767624899910366 }, { "content": " fn after_decode<A: Attribute>(&mut self, message: &Message<A>) -> Result<()> {\n\n let actual = track!(Self::new(message))?;\n\n track_assert_eq!(actual.crc32, self.crc32, ErrorKind::InvalidInput);\n\n Ok(())\n\n }\n\n}\n\n\n\n/// [`Fingerprint`] decoder.\n\n///\n\n/// [`Fingerprint`]: ./struct.Fingerprint.html\n\n#[derive(Debug, Default)]\n\npub struct FingerprintDecoder(U32beDecoder);\n\nimpl FingerprintDecoder {\n\n /// Makes a new `FingerprintDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(FingerprintDecoder, Fingerprint, |crc32| Ok(Fingerprint {\n\n crc32\n", "file_path": "src/rfc5389/attributes.rs", "rank": 88, "score": 23.715212591115005 }, { "content": " pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(PriorityEncoder, Priority, |item: Self::Item| item.0);\n\n\n\n/// `USE-CANDIDATE` attribute.\n\n///\n\n/// See [RFC 5245 -- 7.1.2.1 USE-CANDIDATE] about this attribute.\n\n///\n\n/// [RFC 5245 -- 7.1.2.1 USE-CANDIDATE]: https://tools.ietf.org/html/rfc5245#section-7.1.2.1\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct UseCandidate;\n\nimpl UseCandidate {\n\n /// The codepoint of the type of the attribute.\n\n pub const CODEPOINT: u16 = 0x0025;\n\n\n\n /// Makes a new `UseCandidate` instance.\n\n pub fn new() -> Self {\n\n UseCandidate\n", "file_path": "src/rfc5245/attributes.rs", "rank": 89, "score": 23.69761201261094 }, { "content": " let code = (class * 100 + number) as u16;\n\n Ok(ErrorCode {\n\n code,\n\n reason_phrase,\n\n })\n\n});\n\n\n\n/// [`ErrorCode`] encoder.\n\n///\n\n/// [`ErrorCode`]: ./struct.ErrorCode.html\n\n#[derive(Debug, Default)]\n\npub struct ErrorCodeEncoder(TupleEncoder<(U32beEncoder, Utf8Encoder)>);\n\nimpl ErrorCodeEncoder {\n\n /// Makes a new `ErrorCodeEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(ErrorCodeEncoder, ErrorCode, |item: Self::Item| {\n\n let class = u32::from(item.code / 100);\n", "file_path": "src/rfc5389/attributes.rs", "rank": 90, "score": 23.655201879771578 }, { "content": "impl DontFragment {\n\n /// The codepoint of the type of the attribute.\n\n pub const CODEPOINT: u16 = 0x001A;\n\n}\n\nimpl Attribute for DontFragment {\n\n type Decoder = DontFragmentDecoder;\n\n type Encoder = DontFragmentEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`DontFragment`] decoder.\n\n///\n\n/// [`DontFragment`]: ./struct.DontFragment.html\n\n#[derive(Debug, Default)]\n\npub struct DontFragmentDecoder(NullDecoder);\n\nimpl DontFragmentDecoder {\n\n /// Makes a new `DontFragmentDecoder` instance.\n", "file_path": "src/rfc5766/attributes.rs", "rank": 91, "score": 23.57621461262115 }, { "content": "\n\n/// [`MappedAddress`] encoder.\n\n///\n\n/// [`MappedAddress`]: ./struct.MappedAddress.html\n\n#[derive(Debug, Default)]\n\npub struct MappedAddressEncoder(SocketAddrEncoder);\n\nimpl MappedAddressEncoder {\n\n /// Makes a new `MappedAddressEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_encode!(MappedAddressEncoder, MappedAddress, |item: Self::Item| item\n\n .0);\n\n\n\n/// `MESSAGE-INTEGRITY` attribute.\n\n///\n\n/// See [RFC 5389 -- 15.3. MESSAGE-INTEGRITY] about this attribute.\n\n///\n\n/// [RFC 5389 -- 15.3. MESSAGE-INTEGRITY]: https://tools.ietf.org/html/rfc5389#section-15.4\n", "file_path": "src/rfc5389/attributes.rs", "rank": 92, "score": 23.461904903920058 }, { "content": " }\n\n}\n\nimpl_encode!(UseCandidateEncoder, UseCandidate, |_item: Self::Item| ());\n\n\n\n/// `ICE-CONTROLLED` attribute.\n\n///\n\n/// See [RFC 5245 -- 7.1.2.1 ICE-CONTROLLED] about this attribute.\n\n///\n\n/// [RFC 5245 -- 7.1.2.1 ICE-CONTROLLED]: https://tools.ietf.org/html/rfc5245#section-7.1.2.2\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct IceControlled(u64);\n\nimpl IceControlled {\n\n /// The codepoint of the type of the attribute.\n\n pub const CODEPOINT: u16 = 0x8029;\n\n\n\n /// Makes a new `IceControlled` instance.\n\n pub fn new(rnd: u64) -> Self {\n\n IceControlled(rnd)\n\n }\n\n\n", "file_path": "src/rfc5245/attributes.rs", "rank": 93, "score": 23.366887598155024 }, { "content": "impl TransactionId {\n\n /// Makes a new `TransactionId` instance.\n\n pub fn new(id: [u8; 12]) -> Self {\n\n TransactionId(id)\n\n }\n\n\n\n /// Returns a reference to the bytes that represents the identifier.\n\n pub fn as_bytes(&self) -> &[u8; 12] {\n\n &self.0\n\n }\n\n}\n\nimpl fmt::Debug for TransactionId {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"TransactionId(0x\")?;\n\n for b in self.as_ref() {\n\n write!(f, \"{:02X}\", b)?;\n\n }\n\n write!(f, \")\")?;\n\n Ok(())\n\n }\n\n}\n\nimpl AsRef<[u8]> for TransactionId {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.0[..]\n\n }\n\n}\n", "file_path": "src/transaction_id.rs", "rank": 94, "score": 23.328271354028818 }, { "content": "impl Attribute for ReservationToken {\n\n type Decoder = ReservationTokenDecoder;\n\n type Encoder = ReservationTokenEncoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n\n AttributeType::new(Self::CODEPOINT)\n\n }\n\n}\n\n\n\n/// [`ReservationToken`] decoder.\n\n///\n\n/// [`ReservationToken`]: ./struct.ReservationToken.html\n\n#[derive(Debug, Default)]\n\npub struct ReservationTokenDecoder(U64beDecoder);\n\nimpl ReservationTokenDecoder {\n\n /// Makes a new `ReservationTokenDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n", "file_path": "src/rfc5766/attributes.rs", "rank": 95, "score": 23.242551864695194 }, { "content": "impl IceControlledDecoder {\n\n /// Makes a new `IceControlledDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(IceControlledDecoder, IceControlled, |prio| Ok(\n\n IceControlled(prio)\n\n));\n\n\n\n/// [`IceControlled`] encoder.\n\n///\n\n/// [`IceControlled`]: ./struct.IceControlled.html\n\n#[derive(Debug, Default)]\n\npub struct IceControlledEncoder(U64beEncoder);\n\n\n\nimpl IceControlledEncoder {\n\n /// Makes a new `IceControlledEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n", "file_path": "src/rfc5245/attributes.rs", "rank": 96, "score": 23.02247814479462 }, { "content": "/// [`Username`]: ./struct.Username.html\n\n#[derive(Debug, Default)]\n\npub struct UsernameDecoder(Utf8Decoder);\n\nimpl UsernameDecoder {\n\n /// Makes a new `UsernameDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(UsernameDecoder, Username, Username::new);\n\n\n\n/// [`Username`] encoder.\n\n///\n\n/// [`Username`]: ./struct.Username.html\n\n#[derive(Debug, Default)]\n\npub struct UsernameEncoder(Utf8Encoder);\n\nimpl UsernameEncoder {\n\n /// Makes a new `UsernameEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n", "file_path": "src/rfc5389/attributes.rs", "rank": 97, "score": 22.951111101363402 }, { "content": "pub struct XorMappedAddress2(SocketAddr);\n\nimpl XorMappedAddress2 {\n\n /// The codepoint of the type of the attribute.\n\n pub const CODEPOINT: u16 = 0x8020;\n\n\n\n /// Makes a new `XorMappedAddress2` instance.\n\n pub fn new(addr: SocketAddr) -> Self {\n\n XorMappedAddress2(addr)\n\n }\n\n\n\n /// Returns the address of this instance.\n\n pub fn address(&self) -> SocketAddr {\n\n self.0\n\n }\n\n}\n\nimpl Attribute for XorMappedAddress2 {\n\n type Decoder = XorMappedAddress2Decoder;\n\n type Encoder = XorMappedAddress2Encoder;\n\n\n\n fn get_type(&self) -> AttributeType {\n", "file_path": "src/rfc5389/attributes.rs", "rank": 98, "score": 22.950419315692596 }, { "content": "pub struct SoftwareDecoder(Utf8Decoder);\n\nimpl SoftwareDecoder {\n\n /// Makes a new `SoftwareDecoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\nimpl_decode!(SoftwareDecoder, Software, Software::new);\n\n\n\n/// [`Software`] encoder.\n\n///\n\n/// [`Software`]: ./struct.Software.html\n\n#[derive(Debug, Default)]\n\npub struct SoftwareEncoder(Utf8Encoder);\n\nimpl SoftwareEncoder {\n\n /// Makes a new `SoftwareEncoder` instance.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n", "file_path": "src/rfc5389/attributes.rs", "rank": 99, "score": 22.63077090828363 } ]
Rust
zandbox/src/zandbox/main.rs
tpscrpt/zinc
35307d3da96377b76425e03aefca97c5c10c5565
mod arguments; mod error; use std::collections::HashMap; use std::str::FromStr; use actix_web::middleware; use actix_web::web; use actix_web::App; use actix_web::HttpServer; use colored::Colorize; use rayon::iter::IntoParallelIterator; use rayon::iter::ParallelIterator; use zksync_eth_signer::PrivateKeySigner; use zksync_types::AccountId; use zinc_build::Application as BuildApplication; use zandbox::ContractSelectAllOutput; use zandbox::ContractStorage; use zandbox::DatabaseClient; use zandbox::FieldSelectInput; use zandbox::SharedData; use zandbox::SharedDataContract; use self::arguments::Arguments; use self::error::Error; #[actix_rt::main] async fn main() -> Result<(), Error> { let args = Arguments::new(); zinc_logger::initialize(zinc_const::app_name::ZANDBOX, args.verbosity); log::info!("Zandbox server started"); let network = zksync::Network::from_str(args.network.as_str()).map_err(Error::InvalidNetwork)?; log::info!("Initializing the PostgreSQL client"); let postgresql = DatabaseClient::new(args.postgresql_uri.as_str()).await?; log::info!("Loading the compiled contracts from the database"); let database_data: Vec<ContractSelectAllOutput> = postgresql .select_contracts() .await? .into_par_iter() .collect(); let mut contracts = HashMap::with_capacity(database_data.len()); for contract in database_data.into_iter() { let eth_address = zinc_zksync::eth_address_from_vec(contract.eth_address); let eth_private_key = zinc_zksync::eth_private_key_from_vec(contract.eth_private_key); log::info!( "{} instance `{}` of the contract `{} v{}` with address {}", "Loaded".bright_green(), contract.instance, contract.name, contract.version, serde_json::to_string(&eth_address).expect(zinc_const::panic::DATA_CONVERSION), ); let application = BuildApplication::try_from_slice(contract.bytecode.as_slice()) .expect(zinc_const::panic::VALIDATED_DURING_DATABASE_POPULATION); let build = match application { BuildApplication::Circuit(_circuit) => { panic!(zinc_const::panic::VALIDATED_DURING_DATABASE_POPULATION) } BuildApplication::Contract(contract) => contract, }; let provider = zksync::Provider::new(network); let wallet_credentials = zksync::WalletCredentials::from_eth_signer( eth_address, PrivateKeySigner::new(eth_private_key), network, ) .await?; let wallet = zksync::Wallet::new(provider, wallet_credentials).await?; let database_fields = postgresql .select_fields(FieldSelectInput::new(contract.account_id as AccountId)) .await?; let storage = ContractStorage::new_with_data( database_fields, build.storage.as_slice(), eth_address, &wallet, ) .await?; contracts.insert( eth_address, SharedDataContract::new( eth_address, contract.name, contract.version, contract.instance, contract.source_code, contract.bytecode, contract.verifying_key, Some(contract.account_id as AccountId), eth_private_key, build, storage, ), ); } let data = SharedData::new(postgresql, contracts).wrap(); HttpServer::new(move || { App::new() .wrap(middleware::Logger::default()) .wrap(middleware::DefaultHeaders::new().content_type()) .wrap(actix_cors::Cors::default()) .app_data(web::JsonConfig::default().limit(zinc_const::limit::JSON_PAYLOAD)) .data(data.clone()) .configure(zandbox::configure) }) .bind(format!( "{}:{}", zinc_const::zandbox::HOST, args.http_port.unwrap_or(zinc_const::zandbox::PORT) )) .map_err(Error::ServerBinding)? .run() .await .map_err(Error::ServerRuntime)?; log::info!("Zandbox server finished"); Ok(()) }
mod arguments; mod error; use std::collections::HashMap; use std::str::FromStr; use actix_web::middleware; use actix_web::web; use actix_web::App; use actix_web::HttpServer; use colored::Colorize; use rayon::iter::IntoParallelIterator; use rayon::iter::ParallelIterator; use zksync_eth_signer::PrivateKeySigner; use zksync_types::AccountId; use zinc_build::Application as BuildApplication; use zandbox::ContractSelectAllOutput; use zandbox::ContractStorage; use zandbox::DatabaseClient; use zandbox::FieldSelectInput; use zandbox::SharedData; use zandbox::SharedDataContract; use self::arguments::Arguments; use self::error::Error; #[actix_rt::main]
async fn main() -> Result<(), Error> { let args = Arguments::new(); zinc_logger::initialize(zinc_const::app_name::ZANDBOX, args.verbosity); log::info!("Zandbox server started"); let network = zksync::Network::from_str(args.network.as_str()).map_err(Error::InvalidNetwork)?; log::info!("Initializing the PostgreSQL client"); let postgresql = DatabaseClient::new(args.postgresql_uri.as_str()).await?; log::info!("Loading the compiled contracts from the database"); let database_data: Vec<ContractSelectAllOutput> = postgresql .select_contracts() .await? .into_par_iter() .collect(); let mut contracts = HashMap::with_capacity(database_data.len()); for contract in database_data.into_iter() { let eth_address = zinc_zksync::eth_address_from_vec(contract.eth_address); let eth_private_key = zinc_zksync::eth_private_key_from_vec(contract.eth_private_key); log::info!( "{} instance `{}` of the contract `{} v{}` with address {}", "Loaded".bright_green(), contract.instance, contract.name, contract.version, serde_json::to_string(&eth_address).expect(zinc_const::panic::DATA_CONVERSION), ); let application = BuildApplication::try_from_slice(contract.bytecode.as_slice()) .expect(zinc_const::panic::VALIDATED_DURING_DATABASE_POPULATION); let build = match application { BuildApplication::Circuit(_circuit) => { panic!(zinc_const::panic::VALIDATED_DURING_DATABASE_POPULATION) } BuildApplication::Contract(contract) => contract, }; let provider = zksync::Provider::new(network); let wallet_credentials = zksync::WalletCredentials::from_eth_signer( eth_address, PrivateKeySigner::new(eth_private_key), network, ) .await?; let wallet = zksync::Wallet::new(provider, wallet_credentials).await?; let database_fields = postgresql .select_fields(FieldSelectInput::new(contract.account_id as AccountId)) .await?; let storage = ContractStorage::new_with_data( database_fields, build.storage.as_slice(), eth_address, &wallet, ) .await?; contracts.insert( eth_address, SharedDataContract::new( eth_address, contract.name, contract.version, contract.instance, contract.source_code, contract.bytecode, contract.verifying_key, Some(contract.account_id as AccountId), eth_private_key, build, storage, ), ); } let data = SharedData::new(postgresql, contracts).wrap(); HttpServer::new(move || { App::new() .wrap(middleware::Logger::default()) .wrap(middleware::DefaultHeaders::new().content_type()) .wrap(actix_cors::Cors::default()) .app_data(web::JsonConfig::default().limit(zinc_const::limit::JSON_PAYLOAD)) .data(data.clone()) .configure(zandbox::configure) }) .bind(format!( "{}:{}", zinc_const::zandbox::HOST, args.http_port.unwrap_or(zinc_const::zandbox::PORT) )) .map_err(Error::ServerBinding)? .run() .await .map_err(Error::ServerRuntime)?; log::info!("Zandbox server finished"); Ok(()) }
function_block-full_function
[ { "content": "///\n\n/// The auxiliary `main` function to facilitate the `?` error conversion operator.\n\n///\n\nfn main_inner() -> Result<(), Error> {\n\n let args = Arguments::new();\n\n\n\n zinc_logger::initialize(zinc_const::app_name::COMPILER, args.verbosity);\n\n\n\n let manifest = Manifest::try_from(&args.manifest_path).map_err(Error::Manifest)?;\n\n\n\n let source_directory_path = args.source_directory_path;\n\n let optimize_dead_function_elimination = args.optimize_dead_function_elimination;\n\n let build = thread::Builder::new()\n\n .stack_size(zinc_const::limit::COMPILER_STACK_SIZE)\n\n .spawn(move || -> Result<Build, Error> {\n\n let source = Source::try_from_entry(&source_directory_path)?;\n\n let state = source.compile(manifest)?;\n\n let application =\n\n State::unwrap_rc(state).into_application(optimize_dead_function_elimination);\n\n Ok(application.into_build())\n\n })\n\n .expect(zinc_const::panic::SYNCHRONIZATION)\n\n .join()\n", "file_path": "zinc-compiler/src/znc/mod.rs", "rank": 0, "score": 116093.85051506555 }, { "content": "//!\n\n//! The Zargo package manager arguments.\n\n//!\n\n\n\npub mod command;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse self::command::Command;\n\n\n\n///\n\n/// The Zargo package manager arguments.\n\n///\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(\n\n name = zinc_const::app_name::ZARGO,\n\n about = \"The Zinc package manager\",\n\n)]\n\npub struct Arguments {\n\n /// Prints more logs, if passed several times.\n", "file_path": "zargo/src/arguments/mod.rs", "rank": 1, "score": 104261.59529615239 }, { "content": " #[structopt(short = \"v\", long = \"verbose\", parse(from_occurrences))]\n\n pub verbosity: usize,\n\n\n\n /// The subcommand variant.\n\n #[structopt(subcommand)]\n\n pub command: Command,\n\n}\n\n\n\nimpl Arguments {\n\n ///\n\n /// A shortcut constructor.\n\n ///\n\n pub fn new() -> Self {\n\n Self::from_args()\n\n }\n\n}\n", "file_path": "zargo/src/arguments/mod.rs", "rank": 2, "score": 104250.74391657874 }, { "content": "//!\n\n//! The generic file system errors.\n\n//!\n\n\n\npub mod directory;\n\npub mod file;\n", "file_path": "zargo/src/error/mod.rs", "rank": 3, "score": 103390.8890692503 }, { "content": "//!\n\n//! The Zargo package manager subcommand error.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::arguments::command::build::error::Error as BuildCommandError;\n\nuse crate::arguments::command::call::error::Error as CallCommandError;\n\nuse crate::arguments::command::clean::error::Error as CleanCommandError;\n\nuse crate::arguments::command::init::error::Error as InitCommandError;\n\nuse crate::arguments::command::new::error::Error as NewCommandError;\n\nuse crate::arguments::command::proof_check::error::Error as ProofCheckCommandError;\n\nuse crate::arguments::command::prove::error::Error as ProveCommandError;\n\nuse crate::arguments::command::publish::error::Error as PublishCommandError;\n\nuse crate::arguments::command::query::error::Error as QueryCommandError;\n\nuse crate::arguments::command::run::error::Error as RunCommandError;\n\nuse crate::arguments::command::setup::error::Error as SetupCommandError;\n\nuse crate::arguments::command::test::error::Error as TestCommandError;\n\nuse crate::arguments::command::verify::error::Error as VerifyCommandError;\n\n\n", "file_path": "zargo/src/arguments/command/error.rs", "rank": 4, "score": 101698.73425094452 }, { "content": " Query(QueryCommandError),\n\n /// The `call` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Call(CallCommandError),\n\n}\n\n\n\nimpl From<NewCommandError> for Error {\n\n fn from(inner: NewCommandError) -> Self {\n\n Self::New(inner)\n\n }\n\n}\n\n\n\nimpl From<InitCommandError> for Error {\n\n fn from(inner: InitCommandError) -> Self {\n\n Self::Init(inner)\n\n }\n\n}\n\n\n\nimpl From<BuildCommandError> for Error {\n\n fn from(inner: BuildCommandError) -> Self {\n", "file_path": "zargo/src/arguments/command/error.rs", "rank": 5, "score": 101685.27757497839 }, { "content": "///\n\n/// The Zargo package manager error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The `new` command error.\n\n #[fail(display = \"{}\", _0)]\n\n New(NewCommandError),\n\n /// The `init` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Init(InitCommandError),\n\n /// The `build` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Build(BuildCommandError),\n\n /// The `clean` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Clean(CleanCommandError),\n\n /// The `run` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Run(RunCommandError),\n", "file_path": "zargo/src/arguments/command/error.rs", "rank": 6, "score": 101685.22803257519 }, { "content": "impl From<ProofCheckCommandError> for Error {\n\n fn from(inner: ProofCheckCommandError) -> Self {\n\n Self::ProofCheck(inner)\n\n }\n\n}\n\n\n\nimpl From<PublishCommandError> for Error {\n\n fn from(inner: PublishCommandError) -> Self {\n\n Self::Publish(inner)\n\n }\n\n}\n\n\n\nimpl From<QueryCommandError> for Error {\n\n fn from(inner: QueryCommandError) -> Self {\n\n Self::Query(inner)\n\n }\n\n}\n\n\n\nimpl From<CallCommandError> for Error {\n\n fn from(inner: CallCommandError) -> Self {\n\n Self::Call(inner)\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/error.rs", "rank": 7, "score": 101685.20351873756 }, { "content": " /// The `test` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Test(TestCommandError),\n\n /// The `setup` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Setup(SetupCommandError),\n\n /// The `prove` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Prove(ProveCommandError),\n\n /// The `verify` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Verify(VerifyCommandError),\n\n /// The `proof-check` command error.\n\n #[fail(display = \"{}\", _0)]\n\n ProofCheck(ProofCheckCommandError),\n\n /// The `publish` command error.\n\n #[fail(display = \"{}\", _0)]\n\n Publish(PublishCommandError),\n\n /// The `query` command error.\n\n #[fail(display = \"{}\", _0)]\n", "file_path": "zargo/src/arguments/command/error.rs", "rank": 8, "score": 101685.18827787899 }, { "content": "}\n\n\n\nimpl From<SetupCommandError> for Error {\n\n fn from(inner: SetupCommandError) -> Self {\n\n Self::Setup(inner)\n\n }\n\n}\n\n\n\nimpl From<ProveCommandError> for Error {\n\n fn from(inner: ProveCommandError) -> Self {\n\n Self::Prove(inner)\n\n }\n\n}\n\n\n\nimpl From<VerifyCommandError> for Error {\n\n fn from(inner: VerifyCommandError) -> Self {\n\n Self::Verify(inner)\n\n }\n\n}\n\n\n", "file_path": "zargo/src/arguments/command/error.rs", "rank": 9, "score": 101685.18830852977 }, { "content": " Self::Build(inner)\n\n }\n\n}\n\n\n\nimpl From<CleanCommandError> for Error {\n\n fn from(inner: CleanCommandError) -> Self {\n\n Self::Clean(inner)\n\n }\n\n}\n\n\n\nimpl From<RunCommandError> for Error {\n\n fn from(inner: RunCommandError) -> Self {\n\n Self::Run(inner)\n\n }\n\n}\n\n\n\nimpl From<TestCommandError> for Error {\n\n fn from(inner: TestCommandError) -> Self {\n\n Self::Test(inner)\n\n }\n", "file_path": "zargo/src/arguments/command/error.rs", "rank": 10, "score": 101685.13999504391 }, { "content": "//!\n\n//! The Zargo package manager subcommand.\n\n//!\n\n\n\npub mod build;\n\npub mod call;\n\npub mod clean;\n\npub mod error;\n\npub mod init;\n\npub mod new;\n\npub mod proof_check;\n\npub mod prove;\n\npub mod publish;\n\npub mod query;\n\npub mod run;\n\npub mod setup;\n\npub mod test;\n\npub mod verify;\n\n\n\nuse structopt::StructOpt;\n", "file_path": "zargo/src/arguments/command/mod.rs", "rank": 11, "score": 100851.37803562284 }, { "content": "\n\nuse self::build::Command as BuildCommand;\n\nuse self::call::Command as CallCommand;\n\nuse self::clean::Command as CleanCommand;\n\nuse self::error::Error;\n\nuse self::init::Command as InitCommand;\n\nuse self::new::Command as NewCommand;\n\nuse self::proof_check::Command as ProofCheckCommand;\n\nuse self::prove::Command as ProveCommand;\n\nuse self::publish::Command as PublishCommand;\n\nuse self::query::Command as QueryCommand;\n\nuse self::run::Command as RunCommand;\n\nuse self::setup::Command as SetupCommand;\n\nuse self::test::Command as TestCommand;\n\nuse self::verify::Command as VerifyCommand;\n\n\n\n///\n\n/// The Zargo package manager subcommand.\n\n///\n\n#[derive(Debug, StructOpt)]\n", "file_path": "zargo/src/arguments/command/mod.rs", "rank": 12, "score": 100848.24428712095 }, { "content": " /// Runs the full project building, running, trusted setup, proving & verifying sequence.\n\n ProofCheck(ProofCheckCommand),\n\n /// Uploads the smart contract to the specified network.\n\n Publish(PublishCommand),\n\n /// Queries a contract storage or calls an immutable method.\n\n Query(QueryCommand),\n\n /// Calls a mutable smart contract method.\n\n Call(CallCommand),\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub async fn execute(self) -> Result<(), Error> {\n\n match self {\n\n Self::New(inner) => inner.execute()?,\n\n Self::Init(inner) => inner.execute()?,\n\n Self::Build(inner) => inner.execute()?,\n\n Self::Clean(inner) => inner.execute()?,\n", "file_path": "zargo/src/arguments/command/mod.rs", "rank": 13, "score": 100839.86088683036 }, { "content": " Self::Run(inner) => inner.execute()?,\n\n Self::Test(inner) => inner.execute()?,\n\n Self::Setup(inner) => inner.execute()?,\n\n Self::Prove(inner) => inner.execute()?,\n\n Self::Verify(inner) => inner.execute()?,\n\n Self::ProofCheck(inner) => inner.execute()?,\n\n Self::Publish(inner) => inner.execute().await?,\n\n Self::Query(inner) => inner.execute().await?,\n\n Self::Call(inner) => inner.execute().await?,\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/mod.rs", "rank": 14, "score": 100837.15001399971 }, { "content": "#[structopt(about = \"The Zinc package manager\")]\n\npub enum Command {\n\n /// Creates a new project in the specified directory.\n\n New(NewCommand),\n\n /// Initializes a new project in the specified directory.\n\n Init(InitCommand),\n\n /// Builds the project at the given path.\n\n Build(BuildCommand),\n\n /// Removes the project build artifacts.\n\n Clean(CleanCommand),\n\n /// Runs the project and prints its output.\n\n Run(RunCommand),\n\n /// Runs the project unit tests.\n\n Test(TestCommand),\n\n /// Generates a pair of proving and verifying keys.\n\n Setup(SetupCommand),\n\n /// Generates the zero-knowledge proof for given input data.\n\n Prove(ProveCommand),\n\n /// Verifies the zero-knowledge proof.\n\n Verify(VerifyCommand),\n", "file_path": "zargo/src/arguments/command/mod.rs", "rank": 15, "score": 100837.15001399971 }, { "content": "//!\n\n//! The Zargo package manager `publish` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse zinc_zksync::SourceError;\n\n\n\nuse crate::error::directory::Error as DirectoryError;\n\nuse crate::error::file::Error as FileError;\n\nuse crate::executable::compiler::Error as CompilerError;\n\nuse crate::executable::virtual_machine::Error as VirtualMachineError;\n\nuse crate::transaction::error::Error as TransactionError;\n\n\n\n///\n\n/// The Zargo package manager `publish` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The invalid network error.\n", "file_path": "zargo/src/arguments/command/publish/error.rs", "rank": 16, "score": 98472.18862823468 }, { "content": "//!\n\n//! The Zargo package manager `new` subcommand.\n\n//!\n\n\n\nuse std::ffi::OsString;\n\nuse std::io;\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::error::directory::Error as DirectoryError;\n\nuse crate::error::file::Error as FileError;\n\n\n\n///\n\n/// The Zargo package manager `new` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The invalid project name error.\n\n #[fail(\n\n display = \"project name is missing and cannot be inferred from path {:?}\",\n", "file_path": "zargo/src/arguments/command/new/error.rs", "rank": 17, "score": 98471.58614836747 }, { "content": "//!\n\n//! The Zargo package manager `run` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::error::directory::Error as DirectoryError;\n\nuse crate::error::file::Error as FileError;\n\nuse crate::executable::compiler::Error as CompilerError;\n\nuse crate::executable::virtual_machine::Error as VirtualMachineError;\n\n\n\n///\n\n/// The Zargo package manager `run` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The contract method to call is missing.\n", "file_path": "zargo/src/arguments/command/run/error.rs", "rank": 18, "score": 98471.58496660767 }, { "content": "//!\n\n//! The Zargo package manager `init` subcommand.\n\n//!\n\n\n\nuse std::ffi::OsString;\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::error::directory::Error as DirectoryError;\n\nuse crate::error::file::Error as FileError;\n\n\n\n///\n\n/// The Zargo package manager `init` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The invalid project name error.\n\n #[fail(\n\n display = \"project name is missing and cannot be inferred from path {:?}\",\n\n _0\n", "file_path": "zargo/src/arguments/command/init/error.rs", "rank": 19, "score": 98471.36647216919 }, { "content": "//!\n\n//! The Zargo package manager `test` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::error::directory::Error as DirectoryError;\n\nuse crate::executable::compiler::Error as CompilerError;\n\nuse crate::executable::virtual_machine::Error as VirtualMachineError;\n\n\n\n///\n\n/// The Zargo package manager `test` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The project binary build directory error.\n\n #[fail(display = \"build directory {}\", _0)]\n\n BuildDirectory(DirectoryError),\n\n /// The compiler process error.\n\n #[fail(display = \"compiler {}\", _0)]\n\n Compiler(CompilerError),\n\n /// The virtual machine process error.\n\n #[fail(display = \"virtual machine {}\", _0)]\n\n VirtualMachine(VirtualMachineError),\n\n}\n", "file_path": "zargo/src/arguments/command/test/error.rs", "rank": 20, "score": 98470.62242778098 }, { "content": "//!\n\n//! The Zargo package manager `call` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::error::file::Error as FileError;\n\nuse crate::transaction::error::Error as TransactionError;\n\n\n\n///\n\n/// The Zargo package manager `call` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The ETH address is invalid.\n\n #[fail(display = \"invalid ETH address: {}\", _0)]\n\n InvalidContractAddress(rustc_hex::FromHexError),\n\n /// The invalid network error.\n\n #[fail(display = \"invalid network name: {}\", _0)]\n\n NetworkInvalid(String),\n", "file_path": "zargo/src/arguments/command/call/error.rs", "rank": 21, "score": 98470.54330305452 }, { "content": "//!\n\n//! The Zargo package manager `prove` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::error::file::Error as FileError;\n\nuse crate::executable::virtual_machine::Error as VirtualMachineError;\n\n\n\n///\n\n/// The Zargo package manager `prove` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The contract method to call is missing.\n\n #[fail(display = \"contract method to call must be specified\")]\n\n MethodMissing,\n\n /// The private key file generation error.\n\n #[fail(display = \"private key file {}\", _0)]\n\n PrivateKeyFile(FileError),\n\n /// The virtual machine process error.\n\n #[fail(display = \"virtual machine {}\", _0)]\n\n VirtualMachine(VirtualMachineError),\n\n}\n", "file_path": "zargo/src/arguments/command/prove/error.rs", "rank": 22, "score": 98469.89676571016 }, { "content": "//!\n\n//! The Zargo package manager `query` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::error::file::Error as FileError;\n\n\n\n///\n\n/// The Zargo package manager `query` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The ETH address is invalid.\n\n #[fail(display = \"invalid ETH address: {}\", _0)]\n\n InvalidContractAddress(rustc_hex::FromHexError),\n\n /// The invalid network error.\n\n #[fail(display = \"invalid network name: {}\", _0)]\n\n NetworkInvalid(String),\n\n /// The unimplemented network error.\n", "file_path": "zargo/src/arguments/command/query/error.rs", "rank": 23, "score": 98469.69014021098 }, { "content": " /// The compiler process error.\n\n #[fail(display = \"compiler {}\", _0)]\n\n Compiler(CompilerError),\n\n /// The virtual machine process error.\n\n #[fail(display = \"virtual machine {}\", _0)]\n\n VirtualMachine(VirtualMachineError),\n\n /// The contract bytecode binary file error.\n\n #[fail(display = \"bytecode binary file {}\", _0)]\n\n BinaryFile(FileError),\n\n /// The input file error.\n\n #[fail(display = \"input file {}\", _0)]\n\n InputFile(FileError<serde_json::Error>),\n\n /// The input file data is invalid.\n\n #[fail(display = \"invalid input file data\")]\n\n InvalidInputData,\n\n /// The constructor arguments not found.\n\n #[fail(display = \"constructor arguments not found\")]\n\n ConstructorArgumentsNotFound,\n\n /// The verifying key file error.\n\n #[fail(display = \"verifying key file {}\", _0)]\n", "file_path": "zargo/src/arguments/command/publish/error.rs", "rank": 24, "score": 98469.67152703715 }, { "content": "//!\n\n//! The Zargo package manager `clean` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::error::directory::Error as DirectoryError;\n\n\n\n///\n\n/// The Zargo package manager `clean` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The project binary build directory error.\n\n #[fail(display = \"build directory {}\", _0)]\n\n BuildDirectory(DirectoryError),\n\n /// The project template, keys, and other auxiliary data directory error.\n\n #[fail(display = \"data directory {}\", _0)]\n\n DataDirectory(DirectoryError),\n\n}\n", "file_path": "zargo/src/arguments/command/clean/error.rs", "rank": 25, "score": 98469.61996510289 }, { "content": "//!\n\n//! The Zargo package manager `setup` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::executable::virtual_machine::Error as VirtualMachineError;\n\n\n\n///\n\n/// The Zargo package manager `setup` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The contract method to call is missing.\n\n #[fail(display = \"contract method to call must be specified\")]\n\n MethodMissing,\n\n /// The virtual machine process error.\n\n #[fail(display = \"virtual machine {}\", _0)]\n\n VirtualMachine(VirtualMachineError),\n\n}\n", "file_path": "zargo/src/arguments/command/setup/error.rs", "rank": 26, "score": 98469.28122334422 }, { "content": "//!\n\n//! The Zargo package manager `verify` subcommand.\n\n//!\n\n\n\nuse failure::Fail;\n\n\n\nuse crate::executable::virtual_machine::Error as VirtualMachineError;\n\n\n\n///\n\n/// The Zargo package manager `verify` subcommand error.\n\n///\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The contract method to call is missing.\n\n #[fail(display = \"contract method to call must be specified\")]\n\n MethodMissing,\n\n /// The virtual machine process error.\n\n #[fail(display = \"virtual machine {}\", _0)]\n\n VirtualMachine(VirtualMachineError),\n\n}\n", "file_path": "zargo/src/arguments/command/verify/error.rs", "rank": 27, "score": 98469.28122334422 }, { "content": " _0\n\n )]\n\n ProjectNameInvalid(OsString),\n\n /// The invalid project type error.\n\n #[fail(\n\n display = \"project type must be either `circuit` or `contract`, but found `{}`\",\n\n _0\n\n )]\n\n ProjectTypeInvalid(String),\n\n /// The project directory already exists. Use `init` instead.\n\n #[fail(\n\n display = \"directory {:?} already exists. To initialize it with a project, use `zargo init`\",\n\n _0\n\n )]\n\n DirectoryAlreadyExists(OsString),\n\n /// The project directory creating error.\n\n #[fail(display = \"root directory {:?} creating: {}\", _0, _1)]\n\n CreatingRootDirectory(OsString, io::Error),\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n", "file_path": "zargo/src/arguments/command/new/error.rs", "rank": 28, "score": 98468.18105086972 }, { "content": " )]\n\n ProjectNameInvalid(OsString),\n\n /// The invalid project type error.\n\n #[fail(\n\n display = \"project type must be either `circuit` or `contract`, found `{}`\",\n\n _0\n\n )]\n\n ProjectTypeInvalid(String),\n\n /// The project directory does not exist. Use `new` instead.\n\n #[fail(\n\n display = \"directory {:?} does not exist. To create a new directory, use `zargo new`\",\n\n _0\n\n )]\n\n DirectoryDoesNotExist(OsString),\n\n /// The project has been already initialized error.\n\n #[fail(display = \"project at path {:?} is already initialized\", _0)]\n\n CircuitAlreadyInitialized(OsString),\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n", "file_path": "zargo/src/arguments/command/init/error.rs", "rank": 29, "score": 98468.03551140611 }, { "content": " #[fail(display = \"initial deposit amount: {}\", _0)]\n\n InitialDepositAmount(zinc_math::BigIntError),\n\n /// The wallet initialization error.\n\n #[fail(display = \"wallet initialization: {}\", _0)]\n\n WalletInitialization(zksync::error::ClientError),\n\n /// The transaction signing error.\n\n #[fail(display = \"transaction: {}\", _0)]\n\n Transaction(TransactionError),\n\n}\n", "file_path": "zargo/src/arguments/command/publish/error.rs", "rank": 30, "score": 98465.04797463694 }, { "content": " #[fail(display = \"contract method to call must be specified\")]\n\n MethodMissing,\n\n /// The project binary build directory error.\n\n #[fail(display = \"build directory {}\", _0)]\n\n BuildDirectory(DirectoryError),\n\n /// The project template, keys, and other auxiliary data directory error.\n\n #[fail(display = \"data directory {}\", _0)]\n\n DataDirectory(DirectoryError),\n\n /// The private key file generation error.\n\n #[fail(display = \"private key file {}\", _0)]\n\n PrivateKeyFile(FileError),\n\n /// The compiler process error.\n\n #[fail(display = \"compiler {}\", _0)]\n\n Compiler(CompilerError),\n\n /// The virtual machine process error.\n\n #[fail(display = \"virtual machine {}\", _0)]\n\n VirtualMachine(VirtualMachineError),\n\n}\n", "file_path": "zargo/src/arguments/command/run/error.rs", "rank": 31, "score": 98464.97967414797 }, { "content": " Manifest(zinc_manifest::Error),\n\n /// The project source code directory error.\n\n #[fail(display = \"source directory {}\", _0)]\n\n SourceDirectory(DirectoryError),\n\n /// The circuit source code entry point file generation error.\n\n #[fail(display = \"main file {}\", _0)]\n\n CircuitFile(FileError),\n\n /// The contract source code entry point file generation error.\n\n #[fail(display = \"contract file {}\", _0)]\n\n ContractFile(FileError),\n\n}\n", "file_path": "zargo/src/arguments/command/new/error.rs", "rank": 32, "score": 98464.93471867236 }, { "content": " SenderPrivateKeyInvalid(rustc_hex::FromHexError),\n\n /// The sender address cannot be derived from the private key.\n\n #[fail(\n\n display = \"could not derive the ETH address from the private key: {}\",\n\n _0\n\n )]\n\n SenderAddressDeriving(anyhow::Error),\n\n /// The wallet initialization error.\n\n #[fail(display = \"wallet initialization: {}\", _0)]\n\n WalletInitialization(zksync::error::ClientError),\n\n /// The transaction signing error.\n\n #[fail(display = \"transaction: {}\", _0)]\n\n Transaction(TransactionError),\n\n /// The publish HTTP request error.\n\n #[fail(display = \"HTTP request: {}\", _0)]\n\n HttpRequest(reqwest::Error),\n\n /// The smart contract server failure.\n\n #[fail(display = \"action failed: {}\", _0)]\n\n ActionFailed(String),\n\n}\n", "file_path": "zargo/src/arguments/command/call/error.rs", "rank": 33, "score": 98464.91529968465 }, { "content": " #[fail(display = \"invalid network name: {}\", _0)]\n\n NetworkInvalid(String),\n\n /// The unimplemented network error.\n\n #[fail(display = \"unimplemented network: {}\", _0)]\n\n NetworkUnimplemented(zksync::Network),\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The project is not a contract.\n\n #[fail(display = \"not a contract\")]\n\n NotAContract,\n\n /// The source code error.\n\n #[fail(display = \"source code {}\", _0)]\n\n Source(SourceError),\n\n /// The project binary build directory error.\n\n #[fail(display = \"build directory {}\", _0)]\n\n BuildDirectory(DirectoryError),\n\n /// The project template, keys, and other auxiliary data directory error.\n\n #[fail(display = \"data directory {}\", _0)]\n\n DataDirectory(DirectoryError),\n", "file_path": "zargo/src/arguments/command/publish/error.rs", "rank": 34, "score": 98464.8448191648 }, { "content": " /// The project source code directory error.\n\n #[fail(display = \"source directory {}\", _0)]\n\n SourceDirectory(DirectoryError),\n\n /// The circuit source code entry point file generation error.\n\n #[fail(display = \"main file {}\", _0)]\n\n CircuitFile(FileError),\n\n /// The contract source code entry point file generation error.\n\n #[fail(display = \"contract file {}\", _0)]\n\n ContractFile(FileError),\n\n}\n", "file_path": "zargo/src/arguments/command/init/error.rs", "rank": 35, "score": 98464.83268227529 }, { "content": " /// The unimplemented network error.\n\n #[fail(display = \"unimplemented network: {}\", _0)]\n\n NetworkUnimplemented(zksync::Network),\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The project is not a contract.\n\n #[fail(display = \"not a contract\")]\n\n NotAContract,\n\n /// The input file error.\n\n #[fail(display = \"input file {}\", _0)]\n\n InputFile(FileError<serde_json::Error>),\n\n /// The input file data is invalid.\n\n #[fail(display = \"invalid input file data\")]\n\n InvalidInputData,\n\n /// The private key file error.\n\n #[fail(display = \"private key file {}\", _0)]\n\n PrivateKeyFile(FileError),\n\n /// The sender private key is invalid.\n\n #[fail(display = \"sender private key is invalid: {}\", _0)]\n", "file_path": "zargo/src/arguments/command/call/error.rs", "rank": 36, "score": 98464.68373829276 }, { "content": " #[fail(display = \"unimplemented network: {}\", _0)]\n\n NetworkUnimplemented(zksync::Network),\n\n /// The manifest file error.\n\n #[fail(display = \"manifest {}\", _0)]\n\n Manifest(zinc_manifest::Error),\n\n /// The project is not a contract.\n\n #[fail(display = \"not a contract\")]\n\n NotAContract,\n\n /// The input file error.\n\n #[fail(display = \"input file {}\", _0)]\n\n InputFile(FileError<serde_json::Error>),\n\n /// The input file data is invalid.\n\n #[fail(display = \"invalid input file data\")]\n\n InvalidInputData,\n\n /// The publish HTTP request error.\n\n #[fail(display = \"HTTP request: {}\", _0)]\n\n HttpRequest(reqwest::Error),\n\n /// The smart contract server failure.\n\n #[fail(display = \"action failed: {}\", _0)]\n\n ActionFailed(String),\n\n}\n", "file_path": "zargo/src/arguments/command/query/error.rs", "rank": 37, "score": 98464.50402182633 }, { "content": " VerifyingKeyFile(FileError),\n\n /// The publish HTTP request error.\n\n #[fail(display = \"HTTP request: {}\", _0)]\n\n HttpRequest(reqwest::Error),\n\n /// The smart contract server failure.\n\n #[fail(display = \"action failed: {}\", _0)]\n\n ActionFailed(String),\n\n /// The private key file error.\n\n #[fail(display = \"private key file {}\", _0)]\n\n PrivateKeyFile(FileError),\n\n /// The sender private key is invalid.\n\n #[fail(display = \"sender private key is invalid: {}\", _0)]\n\n SenderPrivateKeyInvalid(rustc_hex::FromHexError),\n\n /// The sender address cannot be derived from the private key.\n\n #[fail(\n\n display = \"could not derive the ETH address from the private key: {}\",\n\n _0\n\n )]\n\n SenderAddressDeriving(anyhow::Error),\n\n /// The initial deposit amount is invalid.\n", "file_path": "zargo/src/arguments/command/publish/error.rs", "rank": 38, "score": 98464.48795708964 }, { "content": "//!\n\n//! The Zargo package manager `clean` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse zinc_manifest::Manifest;\n\n\n\nuse crate::project::build::Directory as BuildDirectory;\n\nuse crate::project::data::Directory as DataDirectory;\n\n\n\nuse self::error::Error;\n\n\n\n///\n\n/// The Zargo package manager `clean` subcommand.\n", "file_path": "zargo/src/arguments/command/clean/mod.rs", "rank": 39, "score": 97659.98171624349 }, { "content": "//!\n\n//! The Zargo package manager `setup` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\n\n\nuse crate::executable::virtual_machine::VirtualMachine;\n\nuse crate::project::build::Directory as BuildDirectory;\n\nuse crate::project::data::Directory as DataDirectory;\n\n\n\nuse self::error::Error;\n\n\n", "file_path": "zargo/src/arguments/command/setup/mod.rs", "rank": 40, "score": 97659.62066531951 }, { "content": "//!\n\n//! The Zargo package manager `verify` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\n\n\nuse crate::executable::virtual_machine::VirtualMachine;\n\nuse crate::project::build::Directory as BuildDirectory;\n\nuse crate::project::data::Directory as DataDirectory;\n\n\n\nuse self::error::Error;\n\n\n", "file_path": "zargo/src/arguments/command/verify/mod.rs", "rank": 41, "score": 97659.62066531951 }, { "content": "//!\n\n//! The Zargo package manager `init` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\nuse colored::Colorize;\n\nuse structopt::StructOpt;\n\n\n\nuse crate::project::source::circuit::Circuit as CircuitFile;\n\nuse crate::project::source::contract::Contract as ContractFile;\n\nuse crate::project::source::Directory as SourceDirectory;\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\n\n\nuse self::error::Error;\n\n\n", "file_path": "zargo/src/arguments/command/init/mod.rs", "rank": 42, "score": 97659.37394200858 }, { "content": "//!\n\n//! The Zargo package manager `new` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\nuse colored::Colorize;\n\nuse structopt::StructOpt;\n\n\n\nuse crate::project::source::circuit::Circuit as CircuitFile;\n\nuse crate::project::source::contract::Contract as ContractFile;\n\nuse crate::project::source::Directory as SourceDirectory;\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\n\n\nuse self::error::Error;\n", "file_path": "zargo/src/arguments/command/new/mod.rs", "rank": 43, "score": 97659.26893951982 }, { "content": "//!\n\n//! The Zargo package manager `test` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse zinc_manifest::Manifest;\n\n\n\nuse crate::executable::compiler::Compiler;\n\nuse crate::executable::virtual_machine::VirtualMachine;\n\nuse crate::project::build::Directory as BuildDirectory;\n\nuse crate::project::data::Directory as DataDirectory;\n\nuse crate::project::source::Directory as SourceDirectory;\n\n\n\nuse self::error::Error;\n", "file_path": "zargo/src/arguments/command/test/mod.rs", "rank": 44, "score": 97659.23892036187 }, { "content": "//!\n\n//! The Zargo package manager `prove` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\n\n\nuse crate::executable::virtual_machine::VirtualMachine;\n\nuse crate::project::build::Directory as BuildDirectory;\n\nuse crate::project::data::private_key::PrivateKey as PrivateKeyFile;\n\nuse crate::project::data::Directory as DataDirectory;\n\n\n\nuse self::error::Error;\n", "file_path": "zargo/src/arguments/command/prove/mod.rs", "rank": 45, "score": 97659.04220651419 }, { "content": "//!\n\n//! The Zinc virtual machine arguments.\n\n//!\n\n\n\npub mod command;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse self::command::Command;\n\n\n\n///\n\n/// The Zinc virtual machine arguments.\n\n///\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(name = zinc_const::app_name::VIRTUAL_MACHINE, about = \"The Zinc virtual machine\")]\n\npub struct Arguments {\n\n /// Prints more logs, if passed several times.\n\n #[structopt(short = \"v\", long = \"verbose\", parse(from_occurrences))]\n\n pub verbosity: usize,\n\n\n", "file_path": "zinc-vm/src/zvm/arguments/mod.rs", "rank": 46, "score": 97657.90334114361 }, { "content": "//!\n\n//! The Zargo package manager `query` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\nuse colored::Colorize;\n\nuse reqwest::Client as HttpClient;\n\nuse reqwest::Method;\n\nuse reqwest::Url;\n\nuse serde_json::Value as JsonValue;\n\nuse structopt::StructOpt;\n\n\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\nuse zinc_zksync::QueryRequestBody;\n", "file_path": "zargo/src/arguments/command/query/mod.rs", "rank": 47, "score": 97657.34022450664 }, { "content": "//!\n\n//! The Zargo package manager `publish` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\nuse colored::Colorize;\n\nuse num::BigUint;\n\nuse reqwest::Client as HttpClient;\n\nuse reqwest::Method;\n\nuse reqwest::Url;\n\nuse structopt::StructOpt;\n\n\n\nuse zksync::web3::types::H256;\n\nuse zksync_eth_signer::PrivateKeySigner;\n\nuse zksync_types::tx::PackedEthSignature;\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 48, "score": 97657.27464062287 }, { "content": "//!\n\n//! The Zargo package manager `call` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\nuse colored::Colorize;\n\nuse reqwest::Client as HttpClient;\n\nuse reqwest::Method;\n\nuse reqwest::Url;\n\nuse serde_json::Value as JsonValue;\n\nuse structopt::StructOpt;\n\n\n\nuse zksync::web3::types::H256;\n\nuse zksync_eth_signer::PrivateKeySigner;\n\nuse zksync_types::tx::PackedEthSignature;\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 49, "score": 97657.14631886182 }, { "content": "\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\nuse zinc_zksync::CallRequestBody;\n\nuse zinc_zksync::CallRequestQuery;\n\nuse zinc_zksync::FeeRequestBody;\n\nuse zinc_zksync::FeeRequestQuery;\n\nuse zinc_zksync::FeeResponseBody;\n\nuse zinc_zksync::TransactionMsg;\n\n\n\nuse crate::network::Network;\n\nuse crate::project::data::input::Input as InputFile;\n\nuse crate::project::data::private_key::PrivateKey as PrivateKeyFile;\n\nuse crate::project::data::Directory as DataDirectory;\n\nuse crate::transaction::error::Error as TransactionError;\n\n\n\nuse self::error::Error;\n\n\n\n///\n\n/// The Zargo package manager `call` subcommand.\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 50, "score": 97656.49162650079 }, { "content": "//!\n\n//! The Zargo package manager `run` subcommand.\n\n//!\n\n\n\npub mod error;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::path::PathBuf;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\n\n\nuse crate::executable::compiler::Compiler;\n\nuse crate::executable::virtual_machine::VirtualMachine;\n\nuse crate::project::build::Directory as BuildDirectory;\n\nuse crate::project::data::private_key::PrivateKey as PrivateKeyFile;\n\nuse crate::project::data::Directory as DataDirectory;\n\nuse crate::project::source::Directory as SourceDirectory;\n", "file_path": "zargo/src/arguments/command/run/mod.rs", "rank": 51, "score": 97656.4650197162 }, { "content": " private_key_path.push(zinc_const::file_name::PRIVATE_KEY.to_owned());\n\n\n\n let input = InputFile::try_from_path(&input_path).map_err(Error::InputFile)?;\n\n let arguments = input\n\n .inner\n\n .as_object()\n\n .ok_or(Error::InvalidInputData)?\n\n .get(\"arguments\")\n\n .cloned()\n\n .ok_or(Error::InvalidInputData)?\n\n .as_object()\n\n .ok_or(Error::InvalidInputData)?\n\n .get(self.method.as_str())\n\n .cloned()\n\n .ok_or(Error::InvalidInputData)?;\n\n\n\n let private_key =\n\n PrivateKeyFile::try_from(&private_key_path).map_err(Error::PrivateKeyFile)?;\n\n\n\n let signer_private_key: H256 = private_key\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 52, "score": 97654.27001458012 }, { "content": " self.verbosity,\n\n manifest.project.name.as_str(),\n\n manifest.project.version.as_str(),\n\n &manifest_path,\n\n &data_directory_path,\n\n &source_directory_path,\n\n &binary_path,\n\n false,\n\n )\n\n .map_err(Error::Compiler)?;\n\n\n\n let bytecode = BytecodeFile::try_from(&binary_path).map_err(Error::BinaryFile)?;\n\n\n\n let input = InputFile::try_from_path(&input_path).map_err(Error::InputFile)?;\n\n let arguments = input\n\n .inner\n\n .as_object()\n\n .ok_or(Error::InvalidInputData)?\n\n .get(\"arguments\")\n\n .cloned()\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 53, "score": 97654.26326905153 }, { "content": "use zinc_zksync::QueryRequestQuery;\n\n\n\nuse crate::network::Network;\n\nuse crate::project::data::input::Input as InputFile;\n\nuse crate::project::data::Directory as DataDirectory;\n\n\n\nuse self::error::Error;\n\n\n\n///\n\n/// The Zargo package manager `query` subcommand.\n\n///\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(about = \"Queries a contract storage or calls an immutable method\")]\n\npub struct Command {\n\n /// Prints more logs, if passed several times.\n\n #[structopt(short = \"v\", long = \"verbose\", parse(from_occurrences))]\n\n pub verbosity: usize,\n\n\n\n /// The path to the Zinc project manifest file.\n\n #[structopt(\n", "file_path": "zargo/src/arguments/command/query/mod.rs", "rank": 54, "score": 97654.19541086144 }, { "content": " .inner\n\n .as_object()\n\n .ok_or(Error::InvalidInputData)?\n\n .get(\"arguments\")\n\n .cloned()\n\n .ok_or(Error::InvalidInputData)?\n\n .as_object()\n\n .ok_or(Error::InvalidInputData)?\n\n .get(method)\n\n .cloned()\n\n .ok_or(Error::InvalidInputData)?;\n\n\n\n eprintln!(\n\n \" {} method `{}` of the contract `{} v{}` with address {} on network `{}`\",\n\n \"Querying\".bright_green(),\n\n method,\n\n manifest.project.name,\n\n manifest.project.version,\n\n self.address,\n\n network,\n", "file_path": "zargo/src/arguments/command/query/mod.rs", "rank": 55, "score": 97653.43969630574 }, { "content": "use crate::project::source::Directory as SourceDirectory;\n\n\n\nuse self::error::Error;\n\n\n\n///\n\n/// The Zargo package manager `publish` subcommand.\n\n///\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(about = \"Uploads the smart contract to the specified network\")]\n\npub struct Command {\n\n /// Prints more logs, if passed several times.\n\n #[structopt(short = \"v\", long = \"verbose\", parse(from_occurrences))]\n\n pub verbosity: usize,\n\n\n\n /// The path to the Zinc project manifest file.\n\n #[structopt(\n\n long = \"manifest-path\",\n\n parse(from_os_str),\n\n default_value = \"./Zargo.toml\"\n\n )]\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 56, "score": 97652.95231174538 }, { "content": " _ => return Err(Error::NotAContract),\n\n }\n\n\n\n let mut manifest_path = self.manifest_path;\n\n if manifest_path.is_file() {\n\n manifest_path.pop();\n\n }\n\n\n\n let arguments = match self.method {\n\n Some(ref method) => {\n\n let data_directory_path = DataDirectory::path(&manifest_path);\n\n let mut input_path = data_directory_path;\n\n input_path.push(format!(\n\n \"{}.{}\",\n\n zinc_const::file_name::INPUT,\n\n zinc_const::extension::JSON,\n\n ));\n\n\n\n let input = InputFile::try_from_path(&input_path).map_err(Error::InputFile)?;\n\n let arguments = input\n", "file_path": "zargo/src/arguments/command/query/mod.rs", "rank": 57, "score": 97652.87081272034 }, { "content": " .ok_or(Error::InvalidInputData)?\n\n .as_object()\n\n .ok_or(Error::InvalidInputData)?\n\n .get(zinc_const::contract::CONSTRUCTOR_NAME)\n\n .cloned()\n\n .ok_or(Error::ConstructorArgumentsNotFound)?;\n\n\n\n if !verifying_key_path.exists() {\n\n VirtualMachine::setup_contract(\n\n self.verbosity,\n\n &binary_path,\n\n zinc_const::contract::CONSTRUCTOR_NAME,\n\n &proving_key_path,\n\n &verifying_key_path,\n\n )\n\n .map_err(Error::VirtualMachine)?;\n\n }\n\n\n\n let verifying_key =\n\n VerifyingKeyFile::try_from(&verifying_key_path).map_err(Error::VerifyingKeyFile)?;\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 58, "score": 97652.83540124651 }, { "content": " )\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .json(&FeeRequestBody::new(arguments.clone(), transaction))\n\n .build()\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .await\n\n .map_err(Error::HttpRequest)?;\n\n\n\n if !http_response.status().is_success() {\n\n return Err(Error::ActionFailed(format!(\n\n \"HTTP error ({}) {}\",\n\n http_response.status(),\n\n http_response\n\n .text()\n\n .await\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )));\n\n }\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 59, "score": 97652.66105145993 }, { "content": " .execute(\n\n http_client\n\n .request(\n\n Method::PUT,\n\n Url::parse_with_params(\n\n format!(\"{}{}\", url, zinc_const::zandbox::CONTRACT_QUERY_URL).as_str(),\n\n QueryRequestQuery::new(address, self.method, network.into()),\n\n )\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .json(&QueryRequestBody::new(arguments))\n\n .build()\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .await\n\n .map_err(Error::HttpRequest)?;\n\n\n\n if !http_response.status().is_success() {\n\n return Err(Error::ActionFailed(format!(\n\n \"HTTP error ({}) {}\",\n", "file_path": "zargo/src/arguments/command/query/mod.rs", "rank": 60, "score": 97652.1050638762 }, { "content": " Url::parse_with_params(\n\n format!(\"{}{}\", url, zinc_const::zandbox::CONTRACT_CALL_URL).as_str(),\n\n CallRequestQuery::new(address, self.method, network.into()),\n\n )\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .json(&CallRequestBody::new(arguments, transaction))\n\n .build()\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .await\n\n .map_err(Error::HttpRequest)?;\n\n\n\n if !http_response.status().is_success() {\n\n return Err(Error::ActionFailed(format!(\n\n \"HTTP error ({}) {}\",\n\n http_response.status(),\n\n http_response\n\n .text()\n\n .await\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 61, "score": 97652.00290566591 }, { "content": "\n\nuse self::error::Error;\n\n\n\n///\n\n/// The Zargo package manager `run` subcommand.\n\n///\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(about = \"Runs the project and prints its output\")]\n\npub struct Command {\n\n /// Prints more logs, if passed several times.\n\n #[structopt(short = \"v\", long = \"verbose\", parse(from_occurrences))]\n\n pub verbosity: usize,\n\n\n\n /// The path to the Zinc project manifest file.\n\n #[structopt(\n\n long = \"manifest-path\",\n\n parse(from_os_str),\n\n default_value = \"./Zargo.toml\"\n\n )]\n\n pub manifest_path: PathBuf,\n", "file_path": "zargo/src/arguments/command/run/mod.rs", "rank": 62, "score": 97651.7109952442 }, { "content": " PublishRequestQuery::new(\n\n manifest.project.name,\n\n manifest.project.version,\n\n self.instance,\n\n network.into(),\n\n ),\n\n )\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .json(&PublishRequestBody::new(\n\n source,\n\n bytecode.inner,\n\n arguments,\n\n verifying_key.inner,\n\n ))\n\n .build()\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .await\n\n .map_err(Error::HttpRequest)?;\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 63, "score": 97651.06594363195 }, { "content": "\n\nuse zinc_manifest::Manifest;\n\nuse zinc_manifest::ProjectType;\n\nuse zinc_zksync::InitializeRequestBody;\n\nuse zinc_zksync::InitializeRequestQuery;\n\nuse zinc_zksync::InitializeResponseBody;\n\nuse zinc_zksync::PublishRequestBody;\n\nuse zinc_zksync::PublishRequestQuery;\n\nuse zinc_zksync::PublishResponseBody;\n\nuse zinc_zksync::Source;\n\n\n\nuse crate::executable::compiler::Compiler;\n\nuse crate::executable::virtual_machine::VirtualMachine;\n\nuse crate::network::Network;\n\nuse crate::project::build::bytecode::Bytecode as BytecodeFile;\n\nuse crate::project::build::Directory as BuildDirectory;\n\nuse crate::project::data::input::Input as InputFile;\n\nuse crate::project::data::private_key::PrivateKey as PrivateKeyFile;\n\nuse crate::project::data::verifying_key::VerifyingKey as VerifyingKeyFile;\n\nuse crate::project::data::Directory as DataDirectory;\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 64, "score": 97651.04996332899 }, { "content": " ///\n\n /// Executes the command.\n\n ///\n\n pub async fn execute(self) -> Result<(), Error> {\n\n let network = zksync::Network::from_str(self.network.as_str())\n\n .map(Network::from)\n\n .map_err(Error::NetworkInvalid)?;\n\n\n\n let url = network\n\n .try_into_url()\n\n .map_err(Error::NetworkUnimplemented)?;\n\n\n\n let manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n match manifest.project.r#type {\n\n ProjectType::Contract => {}\n\n _ => return Err(Error::NotAContract),\n\n }\n\n\n\n let mut manifest_path = self.manifest_path;\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 65, "score": 97650.11857951984 }, { "content": " if self.path.exists() {\n\n return Err(Error::DirectoryAlreadyExists(\n\n self.path.as_os_str().to_owned(),\n\n ));\n\n }\n\n fs::create_dir_all(&self.path).map_err(|error| {\n\n Error::CreatingRootDirectory(self.path.as_os_str().to_owned(), error)\n\n })?;\n\n\n\n Manifest::new(&project_name, project_type)\n\n .write_to(&self.path)\n\n .map_err(Error::Manifest)?;\n\n\n\n SourceDirectory::create(&self.path).map_err(Error::SourceDirectory)?;\n\n\n\n match project_type {\n\n ProjectType::Circuit => {\n\n if !CircuitFile::exists_at(&self.path) {\n\n CircuitFile::new(&project_name)\n\n .write_to(&self.path)\n", "file_path": "zargo/src/arguments/command/new/mod.rs", "rank": 66, "score": 97650.07311813346 }, { "content": " ///\n\n /// Executes the command.\n\n ///\n\n pub async fn execute(self) -> Result<(), Error> {\n\n let address = self.address[\"0x\".len()..]\n\n .parse()\n\n .map_err(Error::InvalidContractAddress)?;\n\n\n\n let network = zksync::Network::from_str(self.network.as_str())\n\n .map(Network::from)\n\n .map_err(Error::NetworkInvalid)?;\n\n\n\n let url = network\n\n .try_into_url()\n\n .map_err(Error::NetworkUnimplemented)?;\n\n\n\n let manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n match manifest.project.r#type {\n\n ProjectType::Contract => {}\n", "file_path": "zargo/src/arguments/command/query/mod.rs", "rank": 67, "score": 97650.07406138183 }, { "content": " ///\n\n pub fn execute(self) -> Result<(), Error> {\n\n let _manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n let mut manifest_path = self.manifest_path;\n\n if manifest_path.is_file() {\n\n manifest_path.pop();\n\n }\n\n\n\n DataDirectory::remove(&manifest_path).map_err(Error::DataDirectory)?;\n\n BuildDirectory::remove(&manifest_path).map_err(Error::BuildDirectory)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/clean/mod.rs", "rank": 68, "score": 97649.94034585974 }, { "content": "\n\n if !self.path.exists() {\n\n return Err(Error::DirectoryDoesNotExist(\n\n self.path.as_os_str().to_owned(),\n\n ));\n\n }\n\n\n\n if Manifest::exists_at(&self.path) {\n\n return Err(Error::CircuitAlreadyInitialized(\n\n self.path.as_os_str().to_owned(),\n\n ));\n\n }\n\n Manifest::new(&project_name, project_type)\n\n .write_to(&self.path)\n\n .map_err(Error::Manifest)?;\n\n\n\n SourceDirectory::create(&self.path).map_err(Error::SourceDirectory)?;\n\n\n\n match project_type {\n\n ProjectType::Circuit => {\n", "file_path": "zargo/src/arguments/command/init/mod.rs", "rank": 69, "score": 97649.81104802909 }, { "content": " .map_err(Error::InvalidContractAddress)?;\n\n\n\n let network = zksync::Network::from_str(self.network.as_str())\n\n .map(Network::from)\n\n .map_err(Error::NetworkInvalid)?;\n\n\n\n let url = network\n\n .try_into_url()\n\n .map_err(Error::NetworkUnimplemented)?;\n\n\n\n let manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n eprintln!(\n\n \" {} method `{}` of the contract `{} v{}` with address {} on network `{}`\",\n\n \"Calling\".bright_green(),\n\n self.method,\n\n manifest.project.name,\n\n manifest.project.version,\n\n self.address,\n\n network,\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 70, "score": 97649.73611411222 }, { "content": " .ok_or(Error::InvalidInputData)?\n\n .get(\"msg\")\n\n .cloned()\n\n .ok_or(Error::InvalidInputData)?;\n\n let msg = TransactionMsg::try_from(&msg)\n\n .map_err(TransactionError::Parsing)\n\n .map_err(Error::Transaction)?;\n\n let transaction = crate::transaction::try_into_zksync(msg.clone(), &wallet, None)\n\n .await\n\n .map_err(Error::Transaction)?;\n\n\n\n let http_client = HttpClient::new();\n\n let http_response = http_client\n\n .execute(\n\n http_client\n\n .request(\n\n Method::PUT,\n\n Url::parse_with_params(\n\n format!(\"{}{}\", url, zinc_const::zandbox::CONTRACT_FEE_URL).as_str(),\n\n FeeRequestQuery::new(address, self.method.clone(), network.into()),\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 71, "score": 97649.62365025912 }, { "content": " pub method: Option<String>,\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub fn execute(self) -> Result<(), Error> {\n\n let manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n match manifest.project.r#type {\n\n ProjectType::Contract if self.method.is_none() => return Err(Error::MethodMissing),\n\n _ => {}\n\n }\n\n\n\n let mut manifest_path = self.manifest_path.clone();\n\n if manifest_path.is_file() {\n\n manifest_path.pop();\n\n }\n\n\n", "file_path": "zargo/src/arguments/command/verify/mod.rs", "rank": 72, "score": 97649.4789362758 }, { "content": " pub method: Option<String>,\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub fn execute(self) -> Result<(), Error> {\n\n let manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n match manifest.project.r#type {\n\n ProjectType::Contract if self.method.is_none() => return Err(Error::MethodMissing),\n\n _ => {}\n\n }\n\n\n\n let mut manifest_path = self.manifest_path.clone();\n\n if manifest_path.is_file() {\n\n manifest_path.pop();\n\n }\n\n\n", "file_path": "zargo/src/arguments/command/setup/mod.rs", "rank": 73, "score": 97649.4789362758 }, { "content": " #[structopt(long = \"method\")]\n\n pub method: Option<String>,\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub fn execute(self) -> Result<(), Error> {\n\n let manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n match manifest.project.r#type {\n\n ProjectType::Contract if self.method.is_none() => return Err(Error::MethodMissing),\n\n _ => {}\n\n }\n\n\n\n let mut manifest_path = self.manifest_path.clone();\n\n if manifest_path.is_file() {\n\n manifest_path.pop();\n\n }\n", "file_path": "zargo/src/arguments/command/prove/mod.rs", "rank": 74, "score": 97649.39031789386 }, { "content": " &manifest_path,\n\n &data_directory_path,\n\n &source_directory_path,\n\n &binary_path,\n\n true,\n\n )\n\n .map_err(Error::Compiler)?;\n\n\n\n VirtualMachine::test(self.verbosity, &binary_path).map_err(Error::VirtualMachine)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/test/mod.rs", "rank": 75, "score": 97649.39404297392 }, { "content": "\n\n /// The contract method to call. Only for contracts.\n\n #[structopt(long = \"method\")]\n\n pub method: Option<String>,\n\n\n\n /// Runs the release build.\n\n #[structopt(long = \"release\")]\n\n pub is_release: bool,\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub fn execute(self) -> Result<(), Error> {\n\n let manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n match manifest.project.r#type {\n\n ProjectType::Contract if self.method.is_none() => return Err(Error::MethodMissing),\n\n _ => {}\n", "file_path": "zargo/src/arguments/command/run/mod.rs", "rank": 76, "score": 97649.3614063231 }, { "content": " pub path: PathBuf,\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub fn execute(mut self) -> Result<(), Error> {\n\n let project_name = match self.name.take() {\n\n Some(name) => name,\n\n None => self\n\n .path\n\n .file_stem()\n\n .ok_or_else(|| Error::ProjectNameInvalid(self.path.as_os_str().to_owned()))?\n\n .to_string_lossy()\n\n .to_string(),\n\n };\n\n\n\n let project_type =\n\n ProjectType::from_str(self.r#type.as_str()).map_err(Error::ProjectTypeInvalid)?;\n", "file_path": "zargo/src/arguments/command/init/mod.rs", "rank": 77, "score": 97649.33280039989 }, { "content": " #[structopt(parse(from_os_str))]\n\n pub path: PathBuf,\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub fn execute(mut self) -> Result<(), Error> {\n\n let project_name = self.name.take().unwrap_or(\n\n self.path\n\n .file_stem()\n\n .ok_or_else(|| Error::ProjectNameInvalid(self.path.as_os_str().to_owned()))?\n\n .to_string_lossy()\n\n .to_string(),\n\n );\n\n\n\n let project_type =\n\n ProjectType::from_str(self.r#type.as_str()).map_err(Error::ProjectTypeInvalid)?;\n\n\n", "file_path": "zargo/src/arguments/command/new/mod.rs", "rank": 78, "score": 97649.30449530281 }, { "content": " .map_err(Error::CircuitFile)?;\n\n }\n\n }\n\n ProjectType::Contract => {\n\n if !ContractFile::exists_at(&self.path) {\n\n ContractFile::new(&project_name)\n\n .write_to(&self.path)\n\n .map_err(Error::ContractFile)?;\n\n }\n\n }\n\n }\n\n\n\n eprintln!(\n\n \" {} {} `{}`\",\n\n \"Created\".bright_green(),\n\n project_type,\n\n project_name,\n\n );\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/new/mod.rs", "rank": 79, "score": 97649.14411549042 }, { "content": " /// The subcommand variant.\n\n #[structopt(subcommand)]\n\n pub command: Command,\n\n}\n\n\n\nimpl Arguments {\n\n ///\n\n /// A shortcut constructor.\n\n ///\n\n pub fn new() -> Self {\n\n Self::from_args()\n\n }\n\n}\n", "file_path": "zinc-vm/src/zvm/arguments/mod.rs", "rank": 80, "score": 97648.9974483345 }, { "content": " )\n\n .map_err(Error::Compiler)?;\n\n }\n\n\n\n match self.method {\n\n Some(method) => VirtualMachine::run_contract(\n\n self.verbosity,\n\n &binary_path,\n\n &input_path,\n\n &output_path,\n\n method.as_str(),\n\n ),\n\n None => {\n\n VirtualMachine::run_circuit(self.verbosity, &binary_path, &input_path, &output_path)\n\n }\n\n }\n\n .map_err(Error::VirtualMachine)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/run/mod.rs", "rank": 81, "score": 97648.95272507038 }, { "content": " .expect(zinc_const::panic::DATA_CONVERSION)\n\n .replace(\"\\\"\", \"\")\n\n );\n\n\n\n let private_key =\n\n PrivateKeyFile::try_from(&private_key_path).map_err(Error::PrivateKeyFile)?;\n\n\n\n let signer_private_key: H256 = private_key\n\n .inner\n\n .parse()\n\n .map_err(Error::SenderPrivateKeyInvalid)?;\n\n let signer_address = PackedEthSignature::address_from_private_key(&signer_private_key)\n\n .map_err(Error::SenderAddressDeriving)?;\n\n\n\n let wallet_credentials = zksync::WalletCredentials::from_eth_signer(\n\n signer_address,\n\n PrivateKeySigner::new(signer_private_key),\n\n network.into(),\n\n )\n\n .await\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 82, "score": 97648.8415028849 }, { "content": " .expect(zinc_const::panic::DATA_CONVERSION);\n\n let wallet = zksync::Wallet::new(zksync::Provider::new(network.into()), wallet_credentials)\n\n .await\n\n .map_err(Error::WalletInitialization)?;\n\n\n\n let initial_deposit_amount: BigUint =\n\n zinc_math::bigint_from_str(self.deposit_amount.as_str())\n\n .map_err(Error::InitialDepositAmount)?\n\n .to_biguint()\n\n .expect(zinc_const::panic::DATA_CONVERSION);\n\n let initial_transfer = crate::transaction::new_initial(\n\n &wallet,\n\n response.address,\n\n self.deposit_token,\n\n initial_deposit_amount,\n\n )\n\n .await\n\n .map_err(Error::Transaction)?;\n\n\n\n let http_response = http_client\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 83, "score": 97648.7949544212 }, { "content": " .inner\n\n .parse()\n\n .map_err(Error::SenderPrivateKeyInvalid)?;\n\n let signer_address = PackedEthSignature::address_from_private_key(&signer_private_key)\n\n .map_err(Error::SenderAddressDeriving)?;\n\n\n\n let wallet_credentials = zksync::WalletCredentials::from_eth_signer(\n\n signer_address,\n\n PrivateKeySigner::new(signer_private_key),\n\n network.into(),\n\n )\n\n .await\n\n .expect(zinc_const::panic::DATA_CONVERSION);\n\n let wallet = zksync::Wallet::new(zksync::Provider::new(network.into()), wallet_credentials)\n\n .await\n\n .map_err(Error::WalletInitialization)?;\n\n\n\n let msg = input\n\n .inner\n\n .as_object()\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 84, "score": 97648.7949544212 }, { "content": " if !CircuitFile::exists_at(&self.path) {\n\n CircuitFile::new(&project_name)\n\n .write_to(&self.path)\n\n .map_err(Error::CircuitFile)?;\n\n }\n\n }\n\n ProjectType::Contract => {\n\n if !ContractFile::exists_at(&self.path) {\n\n ContractFile::new(&project_name)\n\n .write_to(&self.path)\n\n .map_err(Error::ContractFile)?;\n\n }\n\n }\n\n }\n\n\n\n eprintln!(\n\n \" {} {} `{}`\",\n\n \"Created\".bright_green(),\n\n project_type,\n\n project_name,\n\n );\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/init/mod.rs", "rank": 85, "score": 97648.74088508991 }, { "content": "///\n\n/// The unit test summary.\n\n///\n\n#[derive(Default)]\n\npub struct Summary {\n\n pub passed: u8,\n\n pub failed: u8,\n\n pub invalid: u8,\n\n pub ignored: u8,\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub fn execute(self) -> Result<(), Error> {\n\n let manifest = Manifest::try_from(&self.manifest_path).map_err(Error::Manifest)?;\n\n\n\n let mut manifest_path = self.manifest_path.clone();\n\n if manifest_path.is_file() {\n", "file_path": "zargo/src/arguments/command/test/mod.rs", "rank": 86, "score": 97648.74088508991 }, { "content": " self.verbosity,\n\n &binary_path,\n\n &proving_key_path,\n\n &input_path,\n\n &output_path,\n\n ),\n\n }\n\n .map_err(Error::VirtualMachine)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/prove/mod.rs", "rank": 87, "score": 97648.71586535923 }, { "content": "\n\n if !http_response.status().is_success() {\n\n return Err(Error::ActionFailed(format!(\n\n \"HTTP error ({}) {}\",\n\n http_response.status(),\n\n http_response\n\n .text()\n\n .await\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )));\n\n }\n\n\n\n let response = http_response\n\n .json::<PublishResponseBody>()\n\n .await\n\n .expect(zinc_const::panic::DATA_CONVERSION);\n\n println!(\n\n \" {} {}\",\n\n \"Address\".bright_green(),\n\n serde_json::to_string(&response.address)\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 88, "score": 97648.64155294203 }, { "content": " &verifying_key_path,\n\n ),\n\n None => VirtualMachine::setup_circuit(\n\n self.verbosity,\n\n &binary_path,\n\n &proving_key_path,\n\n &verifying_key_path,\n\n ),\n\n }\n\n .map_err(Error::VirtualMachine)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/setup/mod.rs", "rank": 89, "score": 97648.28935120266 }, { "content": " );\n\n\n\n Some(arguments)\n\n }\n\n None => {\n\n eprintln!(\n\n \" {} the storage of the contract `{} v{}` with address {} on network `{}`\",\n\n \"Querying\".bright_green(),\n\n manifest.project.name,\n\n manifest.project.version,\n\n self.address,\n\n network,\n\n );\n\n\n\n None\n\n }\n\n };\n\n\n\n let http_client = HttpClient::new();\n\n let http_response = http_client\n", "file_path": "zargo/src/arguments/command/query/mod.rs", "rank": 90, "score": 97648.26960767714 }, { "content": " .execute(\n\n http_client\n\n .request(\n\n Method::PUT,\n\n Url::parse_with_params(\n\n format!(\"{}{}\", url, zinc_const::zandbox::CONTRACT_INITIALIZE_URL)\n\n .as_str(),\n\n InitializeRequestQuery::new(response.address, network.into()),\n\n )\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .json(&InitializeRequestBody::new(initial_transfer))\n\n .build()\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )\n\n .await\n\n .map_err(Error::HttpRequest)?;\n\n\n\n if !http_response.status().is_success() {\n\n return Err(Error::ActionFailed(format!(\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 91, "score": 97648.2547845423 }, { "content": " self.verbosity,\n\n &binary_path,\n\n &verifying_key_path,\n\n &output_path,\n\n method.as_str(),\n\n ),\n\n _ => VirtualMachine::verify_circuit(\n\n self.verbosity,\n\n &binary_path,\n\n &verifying_key_path,\n\n &output_path,\n\n ),\n\n }\n\n .map_err(Error::VirtualMachine)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/verify/mod.rs", "rank": 92, "score": 97648.02862622164 }, { "content": " zinc_const::file_name::OUTPUT,\n\n zinc_const::extension::JSON,\n\n ));\n\n if self.method.is_some() && !PrivateKeyFile::exists_at(&data_directory_path) {\n\n PrivateKeyFile::default()\n\n .write_to(&data_directory_path)\n\n .map_err(Error::PrivateKeyFile)?;\n\n }\n\n\n\n BuildDirectory::create(&manifest_path).map_err(Error::BuildDirectory)?;\n\n let build_directory_path = BuildDirectory::path(&manifest_path);\n\n let mut binary_path = build_directory_path;\n\n binary_path.push(format!(\n\n \"{}.{}\",\n\n zinc_const::file_name::BINARY,\n\n zinc_const::extension::BINARY\n\n ));\n\n\n\n if self.is_release {\n\n Compiler::build_release(\n", "file_path": "zargo/src/arguments/command/run/mod.rs", "rank": 93, "score": 97647.95036197122 }, { "content": " private_key_path.push(zinc_const::file_name::PRIVATE_KEY.to_owned());\n\n\n\n BuildDirectory::create(&manifest_path).map_err(Error::BuildDirectory)?;\n\n let build_directory_path = BuildDirectory::path(&manifest_path);\n\n let mut binary_path = build_directory_path;\n\n binary_path.push(format!(\n\n \"{}.{}\",\n\n zinc_const::file_name::BINARY,\n\n zinc_const::extension::BINARY\n\n ));\n\n\n\n if let ProjectType::Contract = manifest.project.r#type {\n\n if !PrivateKeyFile::exists_at(&data_directory_path) {\n\n PrivateKeyFile::default()\n\n .write_to(&data_directory_path)\n\n .map_err(Error::PrivateKeyFile)?;\n\n }\n\n }\n\n\n\n Compiler::build_release(\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 94, "score": 97647.85823470034 }, { "content": " \"HTTP error ({}) {}\",\n\n http_response.status(),\n\n http_response\n\n .text()\n\n .await\n\n .expect(zinc_const::panic::DATA_CONVERSION),\n\n )));\n\n }\n\n\n\n let response = http_response\n\n .json::<InitializeResponseBody>()\n\n .await\n\n .expect(zinc_const::panic::DATA_CONVERSION);\n\n println!(\" {} {}\", \"Account ID\".bright_green(), response.account_id);\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 95, "score": 97647.591079562 }, { "content": " self.verbosity,\n\n manifest.project.name.as_str(),\n\n manifest.project.version.as_str(),\n\n &manifest_path,\n\n &data_directory_path,\n\n &source_directory_path,\n\n &binary_path,\n\n false,\n\n )\n\n .map_err(Error::Compiler)?;\n\n } else {\n\n Compiler::build_debug(\n\n self.verbosity,\n\n manifest.project.name.as_str(),\n\n manifest.project.version.as_str(),\n\n &manifest_path,\n\n &data_directory_path,\n\n &source_directory_path,\n\n &binary_path,\n\n false,\n", "file_path": "zargo/src/arguments/command/run/mod.rs", "rank": 96, "score": 97647.27048068609 }, { "content": " if manifest_path.is_file() {\n\n manifest_path.pop();\n\n }\n\n\n\n let source_directory_path = SourceDirectory::path(&manifest_path);\n\n let source = Source::try_from_path(&source_directory_path, true).map_err(Error::Source)?;\n\n\n\n DataDirectory::create(&manifest_path).map_err(Error::DataDirectory)?;\n\n let data_directory_path = DataDirectory::path(&manifest_path);\n\n let mut input_path = data_directory_path.clone();\n\n input_path.push(format!(\n\n \"{}.{}\",\n\n zinc_const::file_name::INPUT,\n\n zinc_const::extension::JSON,\n\n ));\n\n let mut proving_key_path = data_directory_path.clone();\n\n proving_key_path.push(zinc_const::file_name::PROVING_KEY);\n\n let mut verifying_key_path = data_directory_path.clone();\n\n verifying_key_path.push(zinc_const::file_name::VERIFYING_KEY.to_owned());\n\n let mut private_key_path = data_directory_path.clone();\n", "file_path": "zargo/src/arguments/command/publish/mod.rs", "rank": 97, "score": 97647.26632538011 }, { "content": " /// Sets the ETH address of the contract.\n\n #[structopt(long = \"address\")]\n\n pub address: String,\n\n\n\n /// Sets the contract method to call.\n\n #[structopt(long = \"method\")]\n\n pub method: String,\n\n\n\n /// Sets the path to the sender private key.\n\n #[structopt(long = \"private-key\", default_value = \"./data/private_key\")]\n\n pub private_key_path: PathBuf,\n\n}\n\n\n\nimpl Command {\n\n ///\n\n /// Executes the command.\n\n ///\n\n pub async fn execute(self) -> Result<(), Error> {\n\n let address = self.address[\"0x\".len()..]\n\n .parse()\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 98, "score": 97647.20675578453 }, { "content": "\n\n let response = http_response\n\n .json::<FeeResponseBody>()\n\n .await\n\n .expect(zinc_const::panic::DATA_CONVERSION);\n\n let contract_fee = response.fee;\n\n let transaction = crate::transaction::try_into_zksync(\n\n msg,\n\n &wallet,\n\n Some(zinc_zksync::num_compat_forward(contract_fee)),\n\n )\n\n .await\n\n .map_err(Error::Transaction)?;\n\n\n\n let http_client = HttpClient::new();\n\n let http_response = http_client\n\n .execute(\n\n http_client\n\n .request(\n\n Method::POST,\n", "file_path": "zargo/src/arguments/command/call/mod.rs", "rank": 99, "score": 97647.14543053931 } ]
Rust
src/scene/save_scene.rs
atsisy/subterranean
8c844e95b47e441c43709bd7f6aefa2c15da880b
use ggez::graphics as ggraphics; use torifune::core::Clock; use torifune::graphics::drawable::*; use torifune::graphics::object::*; use crate::core::{FontID, SavableData, SoundID, SuzuContext, TextureID, TileBatchTextureID}; use crate::object::effect_object; use crate::object::save_scene_object::*; use crate::object::util_object::*; use crate::scene::*; use crate::flush_delay_event; pub struct SaveScene { background: UniTexture, exit_button: SelectButton, event_list: DelayEventList<Self>, scene_transition_effect: Option<effect_object::ScreenTileEffect>, save_entry_table: SaveEntryTable, scene_transition: SceneID, scene_transition_type: SceneTransition, clock: Clock, } impl SaveScene { pub fn new<'a>(ctx: &mut SuzuContext<'a>) -> Self { let save_data_list = (1..=4) .map(|slot_index| match SavableData::new_load(slot_index) { Ok(savable_data) => Some(savable_data), Err(_) => None, }) .collect(); let save_entry_table = SaveEntryTable::new( ctx, numeric::Rect::new(50.0, 50.0, 1248.0, 672.0), save_data_list, 0, ); let background = UniTexture::new( ctx.ref_texture(TextureID::JpHouseTexture), numeric::Point2f::new(0.0, 0.0), numeric::Vector2f::new(1.0, 1.0), 0.0, 0, ); let scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Open, effect_object::TilingEffectType::WholeTile, -128, 0, )); let texture = Box::new(TextButtonTexture::new( ctx, numeric::Point2f::new(0.0, 0.0), "戻る".to_string(), FontInformation::new( ctx.resource.get_font(FontID::Cinema), numeric::Vector2f::new(24.0, 24.0), ggraphics::Color::from_rgba_u32(0xf6e1d5ff), ), 10.0, ggraphics::Color::from_rgba_u32(0x5a4f3fff), 0, )); let exit_button = SelectButton::new( ctx, numeric::Rect::new( 1050.0, (crate::core::WINDOW_SIZE_Y as f32) - 120.0, 100.0, 50.0, ), texture, ); let mut event_list = DelayEventList::new(); event_list.add_event( Box::new(move |slf: &mut Self, _, _| { slf.scene_transition_effect = None; }), 31, ); SaveScene { background: background, event_list: event_list, exit_button: exit_button, scene_transition_effect: scene_transition_effect, save_entry_table: save_entry_table, scene_transition: SceneID::Save, scene_transition_type: SceneTransition::Keep, clock: 0, } } fn exit_scene_poping<'a>(&mut self, ctx: &mut SuzuContext<'a>, t: Clock) { self.scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Close, effect_object::TilingEffectType::WholeTile, -128, t, )); self.event_list.add_event( Box::new(move |slf: &mut Self, _, _| { slf.scene_transition = SceneID::Scenario; slf.scene_transition_type = SceneTransition::PoppingTransition; }), 31, ); if let Some(save_data) = ctx.savable_data.as_mut() { let _ = save_data.get_scenario_save_data(); } } fn load_and_scene_swap<'a>(&mut self, ctx: &mut SuzuContext<'a>, slot: u8, t: Clock) { match SavableData::new_load(slot) { Ok(data) => { ctx.savable_data.replace(data); } Err(_) => return, } self.scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Close, effect_object::TilingEffectType::WholeTile, -128, t, )); self.event_list.add_event( Box::new(move |slf: &mut Self, ctx, _| { slf.scene_transition = SceneID::Scenario; slf.scene_transition_type = SceneTransition::SwapTransition; ctx.resource.stop_bgm(ctx.context, SoundID::Title); }), 31, ); } } impl SceneManager for SaveScene { fn mouse_button_up_event<'a>( &mut self, ctx: &mut SuzuContext<'a>, _button: ginput::mouse::MouseButton, point: numeric::Point2f, ) { let t = self.get_current_clock(); match self.save_entry_table.click_handler(ctx, point) { SaveDataOperation::Loading(slot) => { self.load_and_scene_swap(ctx, slot, t); } _ => (), } if self.exit_button.contains(ctx.context, point) { self.exit_scene_poping(ctx, t); } } fn pre_process<'a>(&mut self, ctx: &mut SuzuContext<'a>) { let t = self.get_current_clock(); if let Some(transition_effect) = self.scene_transition_effect.as_mut() { transition_effect.effect(ctx.context, t); ctx.process_utility.redraw(); } if flush_delay_event!(self, self.event_list, ctx, self.get_current_clock()) > 0 { ctx.process_utility.redraw(); } } fn drawing_process(&mut self, ctx: &mut ggez::Context) { self.background.draw(ctx).unwrap(); self.save_entry_table.draw(ctx).unwrap(); self.exit_button.draw(ctx).unwrap(); if let Some(transition_effect) = self.scene_transition_effect.as_mut() { transition_effect.draw(ctx).unwrap(); } } fn post_process<'a>(&mut self, _ctx: &mut SuzuContext<'a>) -> SceneTransition { self.update_current_clock(); self.scene_transition_type } fn transition(&self) -> SceneID { self.scene_transition } fn get_current_clock(&self) -> Clock { self.clock } fn update_current_clock(&mut self) { self.clock += 1; } }
use ggez::graphics as ggraphics; use torifune::core::Clock; use torifune::graphics::drawable::*; use torifune::graphics::object::*; use crate::core::{FontID, SavableData, SoundID, SuzuContext, TextureID, TileBatchTextureID}; use crate::object::effect_object; use crate::object::save_scene_object::*; use crate::object::util_object::*; use crate::scene::*; use crate::flush_delay_event; pub struct SaveScene { background: UniTexture, exit_button: SelectButton, event_list: DelayEventList<Self>, scene_transition_effect: Option<effect_object::ScreenTileEffect>, save_entry_table: SaveEntryTable, scene_transition: SceneID, scene_transition_type: SceneTransition, clock: Clock, } impl SaveScene { pub fn new<'a>(ctx: &mut SuzuContext<'a>) -> Self { let save_data_list = (1..=4) .map(|slot_index| match SavableData::new_load(slot_index) { Ok(savable_data) => Some(savable_data), Err(_) => None, }) .collect(); let save_entry_table = SaveEntryTable::new( ctx, numeric::Rect::new(50.0, 50.0, 1248.0, 672.0), save_data_list, 0, ); let background = UniTexture::new( ctx.ref_texture(TextureID::JpHouseTexture), numeric::Point2f::new(0.0, 0.0), numeric::Vector2f::new(1.0, 1.0), 0.0, 0, ); let scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Open, effect_object::TilingEffectType::WholeTile, -128, 0, )); let texture = Box::new(TextButtonTexture::new( ctx, numeric::Point2f::new(0.0, 0.0), "戻る".to_string(), FontInformation::new( ctx.resource.get_font(FontID::Cinema), numeric::Vector2f::new(24.0, 24.0), ggraphics::Color::from_rgba_u32(0xf6e1d5ff), ), 10.0, ggraphics::Color::from_rgba_u32(0x5a4f3fff), 0, )); let exit_button = SelectButton::new( ctx, numeric::Rect::new( 1050.0, (crate::core::WINDOW_SIZE_Y as f32) - 120.0, 100.0, 50.0, ), texture, ); let mut event_list = DelayEventList::new(); event_list.add_event( Box::new(move |slf: &mut Self, _, _| { slf.scene_transition_effect = None; }), 31, ); SaveScene { background: background, event_list: event_list, exit_button: exit_button, scene_transition_effect: scene_transition_effect, save_entry_table: save_entry_table, scene_transition: SceneID::Save, scene_transition_type: SceneTransition::Keep, clock: 0, } } fn exit_scene_poping<'a>(&mut self, ctx: &mut SuzuContext<'a>, t: Clock) { self.scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Close, effect_object::TilingEffectType::WholeTile, -128, t, )); self.event_list.add_event( Box::new(move |slf: &mut Self, _, _| { slf.scene_transition = SceneID::Scenario; slf.scene_transition_type = SceneTransition::PoppingTransition; }), 31, ); if let Some(save_data) = ctx.savable_data.as_mut() { let _ = save_data.get_scenario_save_data(); } } fn load_and_scene_swap<'a>(&mut self, ctx: &mut SuzuContext<'a>, slot: u8, t: Clock) { match SavableData::new_load(slot) { Ok(data) => { ctx.savable_data.replace(data); } Err(_) => return, } self.scene_transition_effect = Some(effect_object::ScreenTileEffect::new( ctx, TileBatchTextureID::Shoji, numeric::Rect::new( 0.0, 0.0, crate::core::WINDOW_SIZE_X as f32, crate::core::WINDOW_SIZE_Y as f32, ), 30, effect_object::SceneTransitionEffectType::Close, effect_object::TilingEffectType::WholeTile, -128, t, )); self.event_list.add_event( Box::new(move |slf: &mut Self, ctx, _| { slf.scene_transition = SceneID::Scenario; slf.scene_transition_type = SceneTransition::SwapTransition; ctx.resource.stop_bgm(ctx.context, SoundID::Title); }), 31, ); } } impl SceneManager for SaveScene { fn mouse_button_up_event<'a>( &mut self, ctx: &mut SuzuContext<'a>, _button: ginput::mouse::MouseButton, point: numeric::Point2f, ) { let t = self.get_current_clock(); match self.save_entry_table.click_handler(ctx, point) { SaveDataOperation::Loading(slot) => { self.load_and_scene_swap(ctx, slot, t); } _ => (), } if self.exit_button.contains(ctx.context, point) { self.exit_scene_poping(ctx, t); } } fn pre_process<'a>(&mut self, ctx: &mut SuzuContext<'a>) { let t = self.get_current_clock(); if let Some(transition_effect) = self.scene_transition_effect.as_mut() { transition_effect.effect(ctx.context, t); ctx.process_utility.redraw(); } if flush_delay_event!(self, self.event_list, ctx, self.get_current_clock()) > 0 { ctx.process_utility.redraw(); } } fn drawing_process(&mut self, ctx: &mut ggez::Context) { self.background.draw(ctx).unwrap(); self.save_entry_table.draw(ctx).unwrap(); self.exit_button.draw(ctx).unwrap();
} fn post_process<'a>(&mut self, _ctx: &mut SuzuContext<'a>) -> SceneTransition { self.update_current_clock(); self.scene_transition_type } fn transition(&self) -> SceneID { self.scene_transition } fn get_current_clock(&self) -> Clock { self.clock } fn update_current_clock(&mut self) { self.clock += 1; } }
if let Some(transition_effect) = self.scene_transition_effect.as_mut() { transition_effect.draw(ctx).unwrap(); }
if_condition
[ { "content": "pub fn clock_needle_angle(hour: u8, minute: u8) -> (f32, f32) {\n\n let hour = hour % 12;\n\n\n\n let angle_per_hour = 2.0 * std::f32::consts::PI / (12.0 * 60.0);\n\n let angle_per_minute = 2.0 * std::f32::consts::PI / 60.0;\n\n\n\n (\n\n ((hour as f32 * 60.0) + minute as f32) * angle_per_hour,\n\n minute as f32 * angle_per_minute,\n\n )\n\n}\n\n\n", "file_path": "src/core/util.rs", "rank": 0, "score": 227244.49612250197 }, { "content": "pub fn clock_needle_angle_inverse(hour: u8, minute: u8) -> (f32, f32) {\n\n let mut t = clock_needle_angle(hour, minute);\n\n\n\n t.0 += std::f32::consts::PI;\n\n t.1 += std::f32::consts::PI;\n\n\n\n t\n\n}\n\n\n", "file_path": "src/core/util.rs", "rank": 1, "score": 222407.00001959462 }, { "content": "pub fn create_coins<'a>(ctx: &mut SuzuContext<'a>, mut price: u32, t: Clock) -> Vec<TaskItem> {\n\n let mut coins = Vec::new();\n\n\n\n while price >= 500 {\n\n coins.push(create_coin(ctx, 500, numeric::Point2f::new(0.0, 0.0), t));\n\n price -= 500;\n\n }\n\n\n\n while price >= 100 {\n\n coins.push(create_coin(ctx, 100, numeric::Point2f::new(0.0, 0.0), t));\n\n price -= 100;\n\n }\n\n\n\n while price >= 50 {\n\n coins.push(create_coin(ctx, 50, numeric::Point2f::new(0.0, 0.0), t));\n\n price -= 50;\n\n }\n\n\n\n coins\n\n}\n", "file_path": "src/object/task_object/factory.rs", "rank": 2, "score": 181246.79256992857 }, { "content": "pub fn constant_rotating(speed_rad: f32, start: Clock) -> GenericEffectFn {\n\n Box::new(\n\n move |obj: &mut dyn MovableObject, _: &ggez::Context, t: Clock| {\n\n if start <= t {\n\n let rotation_rad = obj.get_rotation();\n\n obj.set_rotation(rotation_rad + speed_rad);\n\n }\n\n EffectFnStatus::EffectContinue\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/object/effect.rs", "rank": 3, "score": 171022.1602420271 }, { "content": "///\n\n/// # required_time\n\n/// アニメーションにかける時間\n\n///\n\n/// # start\n\n/// アニメーションが開始する時間, 未来を指定することもできる\n\n///\n\npub fn fade_out(required_time: Clock, start: Clock) -> GenericEffectFn {\n\n Box::new(\n\n move |obj: &mut dyn MovableObject, _: &ggez::Context, t: Clock| {\n\n if start <= t {\n\n let elapsed_time = t - start;\n\n if elapsed_time <= required_time {\n\n obj.set_alpha(1.0 - (elapsed_time as f32 / required_time as f32));\n\n EffectFnStatus::EffectContinue\n\n } else {\n\n obj.set_alpha(0.0);\n\n EffectFnStatus::EffectFinish\n\n }\n\n } else {\n\n EffectFnStatus::EffectContinue\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/object/effect.rs", "rank": 4, "score": 154714.2850988796 }, { "content": "///\n\n/// # required_time\n\n/// アニメーションにかける時間\n\n///\n\n/// # start\n\n/// アニメーションが開始する時間, 未来を指定することもできる\n\n///\n\npub fn fade_in(required_time: Clock, start: Clock) -> GenericEffectFn {\n\n Box::new(\n\n move |obj: &mut dyn MovableObject, _: &ggez::Context, t: Clock| {\n\n if start <= t {\n\n let elapsed_time = t - start;\n\n if elapsed_time < required_time {\n\n obj.set_alpha(elapsed_time as f32 / required_time as f32);\n\n EffectFnStatus::EffectContinue\n\n } else {\n\n obj.set_alpha(1.0);\n\n EffectFnStatus::EffectFinish\n\n }\n\n } else {\n\n EffectFnStatus::EffectContinue\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/object/effect.rs", "rank": 5, "score": 154714.2850988796 }, { "content": "pub fn appear_bale_up_from_bottom(required_time: Clock, called_clock: Clock) -> GenericEffectFn {\n\n Box::new(\n\n move |obj: &mut dyn MovableObject, _: &ggez::Context, t: Clock| {\n\n let elapsed_time = t - called_clock;\n\n if elapsed_time < required_time {\n\n let mut current_crop = obj.get_crop();\n\n current_crop.y = elapsed_time as f32 / required_time as f32;\n\n obj.set_crop(current_crop);\n\n EffectFnStatus::EffectContinue\n\n } else {\n\n EffectFnStatus::EffectFinish\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/object/effect.rs", "rank": 6, "score": 153897.97675193835 }, { "content": "pub fn hide_bale_down_from_top(required_time: Clock, called_clock: Clock) -> GenericEffectFn {\n\n Box::new(\n\n move |obj: &mut dyn MovableObject, _: &ggez::Context, t: Clock| {\n\n if called_clock <= t {\n\n let elapsed_time = t - called_clock;\n\n let mut current_crop = obj.get_crop();\n\n if elapsed_time < required_time {\n\n current_crop.h = 1.0 - (elapsed_time as f32 / required_time as f32);\n\n obj.set_crop(current_crop);\n\n EffectFnStatus::EffectContinue\n\n } else {\n\n current_crop.h = 0.0;\n\n obj.set_crop(current_crop);\n\n EffectFnStatus::EffectFinish\n\n }\n\n } else {\n\n EffectFnStatus::EffectContinue\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/object/effect.rs", "rank": 7, "score": 153897.97675193835 }, { "content": "pub fn hide_bale_up_from_bottom(required_time: Clock, called_clock: Clock) -> GenericEffectFn {\n\n Box::new(\n\n move |obj: &mut dyn MovableObject, _: &ggez::Context, t: Clock| {\n\n let elapsed_time = t - called_clock;\n\n if elapsed_time < required_time {\n\n let mut current_crop = obj.get_crop();\n\n current_crop.y = 1.0 - (elapsed_time as f32 / required_time as f32);\n\n obj.set_crop(current_crop);\n\n EffectFnStatus::EffectContinue\n\n } else {\n\n EffectFnStatus::EffectFinish\n\n }\n\n },\n\n )\n\n}\n", "file_path": "src/object/effect.rs", "rank": 8, "score": 153897.97675193835 }, { "content": "pub fn appear_bale_down_from_top(required_time: Clock, called_clock: Clock) -> GenericEffectFn {\n\n Box::new(\n\n move |obj: &mut dyn MovableObject, _: &ggez::Context, t: Clock| {\n\n if called_clock <= t {\n\n let elapsed_time = t - called_clock;\n\n let mut current_crop = obj.get_crop();\n\n if elapsed_time < required_time {\n\n current_crop.h = elapsed_time as f32 / required_time as f32;\n\n obj.set_crop(current_crop);\n\n EffectFnStatus::EffectContinue\n\n } else {\n\n current_crop.h = 1.0;\n\n obj.set_crop(current_crop);\n\n EffectFnStatus::EffectFinish\n\n }\n\n } else {\n\n EffectFnStatus::EffectContinue\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/object/effect.rs", "rank": 9, "score": 153897.97675193835 }, { "content": "pub fn read_from_resources_as_string(ctx: &mut ggez::Context, path: &str) -> String {\n\n let mut file = ggez::filesystem::open(ctx, path).unwrap();\n\n let mut buffer = Vec::new();\n\n file.read_to_end(&mut buffer).unwrap();\n\n std::str::from_utf8(&buffer).unwrap().to_string()\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! parse_toml_file {\n\n ( $ctx:expr, $path:expr) => {{\n\n let content = crate::core::util::read_from_resources_as_string($ctx, $path);\n\n\n\n content\n\n .parse::<toml::Value>()\n\n .expect(\"Failed to parse toml file\")\n\n }};\n\n}\n", "file_path": "src/core/util.rs", "rank": 10, "score": 151063.1906583576 }, { "content": "pub fn devide_distance(dest: numeric::Point2f, divide_c: f32) -> Option<GenericMoveFn> {\n\n Some(Box::new(\n\n move |p: &dyn tg::object::MovableObject, _t: Clock| {\n\n let current_pos = p.get_position();\n\n\n\n if distance!(current_pos, dest) < 1.0 {\n\n return Some(dest);\n\n }\n\n\n\n if dest == current_pos {\n\n return None;\n\n }\n\n\n\n let offset = numeric::Vector2f::new(dest.x - current_pos.x, dest.y - current_pos.y);\n\n Some(numeric::Point2f::new(\n\n current_pos.x + (offset.x * divide_c),\n\n current_pos.y + (offset.y * divide_c),\n\n ))\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/object/move_fn.rs", "rank": 11, "score": 128215.98534872815 }, { "content": "pub fn gravity_move(\n\n init_speed: f32,\n\n max_speed: f32,\n\n border_y: f32,\n\n a: f32,\n\n) -> Option<GenericMoveFn> {\n\n Some(Box::new(\n\n move |p: &dyn tg::object::MovableObject, t: Clock| {\n\n let p = p.get_position();\n\n let next_spped = ((t as f32) * a) + init_speed;\n\n\n\n let speed = if next_spped < max_speed {\n\n next_spped\n\n } else {\n\n max_speed\n\n };\n\n\n\n let mut next = numeric::Point2f::new(p.x, p.y + (speed));\n\n if next.y > border_y {\n\n next.y = border_y;\n\n }\n\n\n\n Some(next)\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/object/move_fn.rs", "rank": 12, "score": 126464.11000205127 }, { "content": "pub fn main() {\n\n let resource_dir = if let Ok(manifest_dir) = env::var(\"CARGO_MANIFEST_DIR\") {\n\n let mut path = path::PathBuf::from(manifest_dir);\n\n path.push(\"resources\");\n\n path\n\n } else {\n\n path::PathBuf::from(\"resources\")\n\n };\n\n\n\n let (mut ctx, event_loop) = ContextBuilder::new(\"suzu\", \"akichi\")\n\n .window_setup(\n\n conf::WindowSetup::default()\n\n .icon(\"/tile_textures/menu_art3.png\")\n\n .title(\"電氣貸本屋\")\n\n .samples(ggez::conf::NumSamples::Four),\n\n )\n\n .add_resource_path(resource_dir)\n\n .window_mode(WindowMode {\n\n width: 1366.0,\n\n height: 768.0,\n", "file_path": "src/main.rs", "rank": 13, "score": 124209.96728784326 }, { "content": "pub fn move_constant_dest(\n\n dest: numeric::Point2f,\n\n speed: numeric::Vector2f,\n\n) -> Option<GenericMoveFn> {\n\n Some(Box::new(\n\n move |p: &dyn tg::object::MovableObject, _t: Clock| {\n\n let current_pos = p.get_position();\n\n\n\n if current_pos == dest {\n\n return None;\n\n }\n\n\n\n let next_pos = numeric::Point2f::new(current_pos.x + speed.x, current_pos.y + speed.y);\n\n\n\n let current_distance = distance!(dest, current_pos);\n\n let next_distance = distance!(dest, next_pos);\n\n\n\n if next_distance < current_distance {\n\n Some(next_pos)\n\n } else {\n\n Some(dest)\n\n }\n\n },\n\n ))\n\n}\n", "file_path": "src/object/move_fn.rs", "rank": 14, "score": 123606.45783991036 }, { "content": "///\n\n/// # required_time\n\n/// アニメーションにかける時間\n\n///\n\n/// # start\n\n/// アニメーションが開始する時間, 未来を指定することもできる\n\n///\n\npub fn alpha_effect(\n\n required_time: Clock,\n\n start: Clock,\n\n init_alpha: u8,\n\n fin_alpha: u8,\n\n) -> GenericEffectFn {\n\n let init_ratio_alpha = init_alpha as f32 / 255.0;\n\n let alpha_offset = fin_alpha as i32 - init_alpha as i32;\n\n let diff_alpha_per_clock = alpha_offset as f32 / 255.0 / required_time as f32;\n\n\n\n Box::new(\n\n move |obj: &mut dyn MovableObject, _: &ggez::Context, t: Clock| {\n\n if start <= t {\n\n let elapsed_time = t - start;\n\n if elapsed_time < required_time {\n\n obj.set_alpha(init_ratio_alpha + (diff_alpha_per_clock * elapsed_time as f32));\n\n EffectFnStatus::EffectContinue\n\n } else {\n\n obj.set_alpha(fin_alpha as f32 * (1.0 / 255.0));\n\n EffectFnStatus::EffectFinish\n\n }\n\n } else {\n\n EffectFnStatus::EffectContinue\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/object/effect.rs", "rank": 15, "score": 116821.94091699964 }, { "content": "pub fn stop() -> Option<GenericMoveFn> {\n\n None\n\n}\n\n\n", "file_path": "src/object/move_fn.rs", "rank": 16, "score": 115535.58333554512 }, { "content": "pub fn random_point_in_rect(rect: numeric::Rect) -> numeric::Point2f {\n\n let begin_x = rect.left() as usize;\n\n let begin_y = rect.top() as usize;\n\n\n\n numeric::Point2f::new(\n\n (begin_x + rand::random::<usize>() % rect.w as usize) as f32,\n\n (begin_y + rand::random::<usize>() % rect.h as usize) as f32,\n\n )\n\n}\n\n\n", "file_path": "src/core/util.rs", "rank": 17, "score": 112034.17283482019 }, { "content": "pub fn find_proper_window_position(\n\n window_rect: numeric::Rect,\n\n outer_rect: numeric::Rect,\n\n) -> numeric::Point2f {\n\n let rect_pos = window_rect.point();\n\n let mut found_pos = numeric::Point2f::new(rect_pos.x, rect_pos.y - window_rect.h);\n\n let window_rect = numeric::Rect::new(\n\n window_rect.x,\n\n rect_pos.y - window_rect.h,\n\n window_rect.w,\n\n window_rect.h,\n\n );\n\n\n\n if window_rect.right() > outer_rect.right() {\n\n found_pos.x -= window_rect.right() - outer_rect.right();\n\n } else if window_rect.left() < outer_rect.left() {\n\n found_pos.x += outer_rect.left() - window_rect.right();\n\n }\n\n\n\n if window_rect.bottom() > outer_rect.bottom() {\n", "file_path": "src/core/util.rs", "rank": 18, "score": 110652.36696353008 }, { "content": "pub fn random_select<T>(mut i: std::slice::Iter<T>) -> Option<&T> {\n\n i.nth(rand::random::<usize>() % i.len())\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum DayOfWeek {\n\n Sunday,\n\n Monday,\n\n TuesDay,\n\n Wednesday,\n\n Thursday,\n\n Friday,\n\n Saturday,\n\n}\n\n\n\nimpl DayOfWeek {\n\n pub fn from_char(day_str: &str) -> Self {\n\n match day_str {\n\n \"Sun\" => Self::Sunday,\n\n \"Mon\" => Self::Monday,\n", "file_path": "src/core/util.rs", "rank": 19, "score": 109107.27582510636 }, { "content": "pub fn create_character<'a>(\n\n order: CharacterFactoryOrder,\n\n ctx: &mut SuzuContext<'a>,\n\n camera: &numeric::Rect,\n\n map_position: numeric::Point2f,\n\n) -> MapObject {\n\n match order {\n\n CharacterFactoryOrder::PlayableDoremy1 => {\n\n create_playable_doremy1(ctx, camera, map_position)\n\n }\n\n CharacterFactoryOrder::CustomerSample => create_customer_sample(ctx, camera, map_position),\n\n }\n\n}\n", "file_path": "src/object/character_factory.rs", "rank": 20, "score": 107919.52541868502 }, { "content": "pub fn font_information_from_toml_value<'a>(\n\n game_data: &'a GameResource,\n\n toml_value: &toml::Value,\n\n) -> FontInformation {\n\n let font_str = toml_value[\"FontID\"].as_str().unwrap();\n\n\n\n let scale_table = toml_value[\"scale\"].as_table().unwrap();\n\n\n\n let scale = numeric::Vector2f::new(\n\n scale_table[\"x\"].as_float().unwrap() as f32,\n\n scale_table[\"y\"].as_float().unwrap() as f32,\n\n );\n\n\n\n let color_hex_code = toml_value[\"color\"].as_integer().unwrap() as u32;\n\n\n\n FontInformation::new(\n\n game_data.get_font(FontID::from_str(font_str).unwrap()),\n\n scale,\n\n ggraphics::Color::from_rgba_u32(color_hex_code),\n\n )\n\n}\n", "file_path": "src/core.rs", "rank": 21, "score": 107919.52541868502 }, { "content": "pub fn create_coin<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n value: u32,\n\n pos: numeric::Point2f,\n\n t: Clock,\n\n) -> TaskItem {\n\n let (texture_id, scale) = match value {\n\n 500 => (TextureID::Coin500Yen, numeric::Vector2f::new(0.11, 0.11)),\n\n 100 => (TextureID::Coin100Yen, numeric::Vector2f::new(0.1, 0.1)),\n\n 50 => (TextureID::Coin50Yen, numeric::Vector2f::new(0.08, 0.08)),\n\n _ => panic!(\"failed to create coin texture\"),\n\n };\n\n\n\n let s_texture = UniTexture::new(ctx.ref_texture(texture_id), pos, scale, 0.0, 0);\n\n\n\n let l_texture = UniTexture::new(ctx.ref_texture(texture_id), pos, scale, 0.0, 0);\n\n\n\n TaskItem::Coin(TaskTexture::new(\n\n OnDeskTexture::new(ctx.context, s_texture, OnDeskType::Coin),\n\n OnDeskTexture::new(ctx.context, l_texture, OnDeskType::Coin),\n\n 1,\n\n true,\n\n true,\n\n DeskObjectType::Coin,\n\n t,\n\n ))\n\n}\n\n\n", "file_path": "src/object/task_object/factory.rs", "rank": 22, "score": 104967.39575020946 }, { "content": "pub fn get_unique_id() -> u64 {\n\n UNIQUE_ID.with(|id| {\n\n // インクリメント\n\n *id.borrow_mut() += 1;\n\n id.borrow().clone()\n\n })\n\n}\n\n\n", "file_path": "src/core/util.rs", "rank": 23, "score": 104967.39575020946 }, { "content": "pub fn create_kuyou_kosuzu<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n camera: &numeric::Rect,\n\n map_position: numeric::Point2f,\n\n) -> MapObject {\n\n let mut textures = vec![\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotFront2),\n\n ctx.ref_texture(TextureID::KosuzuDotFront3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotBack2),\n\n ctx.ref_texture(TextureID::KosuzuDotBack3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotRight1),\n\n ctx.ref_texture(TextureID::KosuzuDotRight2),\n\n ctx.ref_texture(TextureID::KosuzuDotRight1),\n\n ctx.ref_texture(TextureID::KosuzuDotRight3),\n\n ],\n", "file_path": "src/object/character_factory.rs", "rank": 24, "score": 104967.39575020946 }, { "content": "pub fn create_customer_kuyou<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n camera: &numeric::Rect,\n\n map_position: numeric::Point2f,\n\n) -> MapObject {\n\n let mut textures = vec![\n\n vec![\n\n ctx.ref_texture(TextureID::Mob1DotFront2),\n\n ctx.ref_texture(TextureID::Mob1DotFront3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::Mob1DotBack2),\n\n ctx.ref_texture(TextureID::Mob1DotBack3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::Mob1DotRight1),\n\n ctx.ref_texture(TextureID::Mob1DotRight2),\n\n ctx.ref_texture(TextureID::Mob1DotRight1),\n\n ctx.ref_texture(TextureID::Mob1DotRight3),\n\n ],\n", "file_path": "src/object/character_factory.rs", "rank": 25, "score": 104967.39575020946 }, { "content": "pub fn create_endroll_sample<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n camera: &numeric::Rect,\n\n map_position: numeric::Point2f,\n\n) -> MapObject {\n\n let mut textures = vec![\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotFront2),\n\n ctx.ref_texture(TextureID::KosuzuDotFront3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotBack2),\n\n ctx.ref_texture(TextureID::KosuzuDotBack3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotRight1),\n\n ctx.ref_texture(TextureID::KosuzuDotRight2),\n\n ctx.ref_texture(TextureID::KosuzuDotRight1),\n\n ctx.ref_texture(TextureID::KosuzuDotRight3),\n\n ],\n", "file_path": "src/object/character_factory.rs", "rank": 26, "score": 104967.39575020946 }, { "content": "pub fn create_dobj_book<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n obj_type: DeskObjectType,\n\n pos: numeric::Point2f,\n\n book_info: BookInformation,\n\n t: Clock,\n\n) -> TaskItem {\n\n let (texture, scale) = match book_info.size.as_str() {\n\n \"大判本\" => (\n\n *util::random_select(LARGE_BOOK_TEXTURE.iter()).unwrap(),\n\n numeric::Vector2f::new(0.1, 0.1),\n\n ),\n\n \"中判本\" => (\n\n *util::random_select(MIDDLE_BOOK_TEXTURE.iter()).unwrap(),\n\n numeric::Vector2f::new(0.15, 0.15),\n\n ),\n\n _ => panic!(\"invalid book size info\"),\n\n };\n\n\n\n let uni_texture = UniTexture::new(\n", "file_path": "src/object/task_object/factory.rs", "rank": 27, "score": 102249.0629171737 }, { "content": "pub fn halt(pos: numeric::Point2f) -> Option<GenericMoveFn> {\n\n Some(Box::new(\n\n move |_: &dyn tg::object::MovableObject, _: Clock| Some(pos),\n\n ))\n\n}\n\n\n", "file_path": "src/object/move_fn.rs", "rank": 28, "score": 100818.9501860817 }, { "content": "struct SeqTexture {\n\n textures: Vec<ggraphics::Image>,\n\n index: usize,\n\n}\n\n\n\nimpl SeqTexture {\n\n pub fn new(textures: Vec<ggraphics::Image>) -> Self {\n\n SeqTexture {\n\n textures: textures,\n\n index: 0,\n\n }\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.index = 0;\n\n }\n\n\n\n pub fn current_frame(&self) -> ggraphics::Image {\n\n self.textures[self.index % self.textures.len()].clone()\n\n }\n", "file_path": "src/object/util_object.rs", "rank": 29, "score": 100557.59674352454 }, { "content": "pub fn move_constant(speed: numeric::Vector2f) -> Option<GenericMoveFn> {\n\n Some(Box::new(\n\n move |p: &dyn tg::object::MovableObject, _t: Clock| {\n\n let current_pos = p.get_position();\n\n Some(numeric::Point2f::new(\n\n current_pos.x + speed.x,\n\n current_pos.y + speed.y,\n\n ))\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/object/move_fn.rs", "rank": 30, "score": 98943.24191244708 }, { "content": "pub fn year_to_season(year: i64) -> i64 {\n\n year\n\n}\n\n\n", "file_path": "src/core/util.rs", "rank": 31, "score": 94512.22173034263 }, { "content": "pub fn min<T>(a: T, b: T) -> T\n\nwhere\n\n T: PartialOrd,\n\n{\n\n if a < b {\n\n a\n\n } else {\n\n b\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! perf_measure {\n\n ( $x:expr) => {{\n\n let start = std::time::Instant::now();\n\n let _ = $x;\n\n let end = start.elapsed();\n\n end.subsec_nanos()\n\n }};\n\n}\n\n\n", "file_path": "src/core/util.rs", "rank": 32, "score": 87590.79743217719 }, { "content": "pub fn max<T>(a: T, b: T) -> T\n\nwhere\n\n T: PartialOrd,\n\n{\n\n if a > b {\n\n a\n\n } else {\n\n b\n\n }\n\n}\n\n\n", "file_path": "src/core/util.rs", "rank": 33, "score": 87590.79743217719 }, { "content": "pub trait StackableWindow: TextureObject {\n\n fn stacked_handler<'a>(&mut self, _ctx: &mut SuzuContext<'a>) {}\n\n\n\n fn close_check(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/object/scenario_object.rs", "rank": 34, "score": 84104.06036191247 }, { "content": "pub trait OnDesk: TextureObject + Clickable {\n\n fn ondesk_whose(&self) -> i32;\n\n\n\n fn click_hold_data(&self, ctx: &mut ggez::Context, point: numeric::Point2f) -> HoldData;\n\n\n\n fn get_hold_data(&self) -> HoldData {\n\n HoldData::None\n\n }\n\n\n\n fn get_type(&self) -> OnDeskType;\n\n\n\n fn start_dragging<'a>(&mut self, _: &mut SuzuContext<'a>) {}\n\n\n\n fn finish_dragging<'a>(&mut self, _: &mut SuzuContext<'a>) {}\n\n}\n\n\n\npub struct OnDeskTexture {\n\n texture: UniTexture,\n\n shadow: ShadowShape,\n\n on_desk_type: OnDeskType,\n", "file_path": "src/object/task_object/tt_sub_component.rs", "rank": 35, "score": 74938.06534279812 }, { "content": "pub fn map_to_display(map_pos: &numeric::Point2f, camera: &numeric::Rect) -> numeric::Point2f {\n\n numeric::Point2f::new(map_pos.x - camera.x, map_pos.y - camera.y)\n\n}\n", "file_path": "src/core/map_parser.rs", "rank": 36, "score": 70271.28205896822 }, { "content": "struct SceneStack {\n\n stack: VecDeque<TopScene>,\n\n}\n\n\n\nimpl SceneStack {\n\n pub fn new() -> SceneStack {\n\n SceneStack {\n\n stack: VecDeque::new(),\n\n }\n\n }\n\n\n\n pub fn push(&mut self, scene: TopScene) {\n\n self.stack.push_back(scene);\n\n }\n\n\n\n pub fn pop(&mut self) -> Option<TopScene> {\n\n self.stack.pop_back()\n\n }\n\n}\n\n\n", "file_path": "src/core.rs", "rank": 37, "score": 66756.5621314149 }, { "content": "struct SceneController {\n\n current_scene: TopScene,\n\n scene_stack: SceneStack,\n\n key_map: tdev::ProgramableGenericKey,\n\n global_clock: u64,\n\n root_screen: SubScreen,\n\n game_status: Option<SavableData>,\n\n game_config: GameConfig,\n\n redraw_request: scene::DrawRequest,\n\n permanent_save_data: PermanentSaveData,\n\n}\n\n\n\nimpl SceneController {\n\n pub fn new<'a>(ctx: &mut ggez::Context, game_data: &'a mut GameResource) -> SceneController {\n\n let window_size = ggraphics::drawable_size(ctx);\n\n\n\n let mut root_screen = SubScreen::new(\n\n ctx,\n\n numeric::Rect::new(0.0, 0.0, 1366.0, 768.0),\n\n 0,\n", "file_path": "src/core.rs", "rank": 38, "score": 66756.5621314149 }, { "content": "#[test]\n\nfn day_diff_works() {\n\n assert_eq!(GensoDate::new(112, 7, 23).diff_day(&GensoDate::new(112, 7, 23)), 0);\n\n assert_eq!(GensoDate::new(112, 7, 23).diff_day(&GensoDate::new(112, 8, 1)), 9);\n\n assert_eq!(GensoDate::new(112, 8, 1).diff_day(&GensoDate::new(112, 7, 23)), -9);\n\n}\n", "file_path": "tests/scenario.rs", "rank": 39, "score": 64159.45868120773 }, { "content": "///\n\n/// ## マップ上のデータをまとめる構造体\n\n///\n\n/// ### tile_map\n\n/// tilesetで構成された描画可能なマップ\n\n///\n\n/// ### event_map\n\n/// マップ上のイベントをまとめておく構造体\n\n///\n\n/// ### scenario_box\n\n/// マップ上に表示されるテキストボックス\n\n///\n\nstruct MapData {\n\n pub tile_map: mp::StageObjectMap,\n\n pub event_map: MapEventList,\n\n pub scenario_event: Option<ScenarioEvent>,\n\n}\n\n\n\nimpl MapData {\n\n pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n map_id: u32,\n\n camera: Rc<RefCell<numeric::Rect>>,\n\n ) -> Self {\n\n let map_constract_data = ctx.resource.get_map_data(map_id).unwrap();\n\n\n\n MapData {\n\n tile_map: mp::StageObjectMap::new(\n\n ctx.context,\n\n &map_constract_data.map_file_path,\n\n camera.clone(),\n\n numeric::Rect::new(0.0, 0.0, 1366.0, 768.0),\n", "file_path": "src/scene/shop_scene.rs", "rank": 40, "score": 63056.08615068093 }, { "content": "struct CharacterGroup {\n\n group: Vec<CustomerCharacter>,\n\n drwob_essential: DrawableObjectEssential,\n\n}\n\n\n\nimpl CharacterGroup {\n\n pub fn new() -> Self {\n\n CharacterGroup {\n\n group: Vec::new(),\n\n drwob_essential: DrawableObjectEssential::new(true, 0),\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n pub fn add(&mut self, character: CustomerCharacter) {\n\n self.group.push(character);\n\n }\n\n\n\n #[inline(always)]\n\n pub fn drain_remove_if<F>(&mut self, f: F) -> Vec<CustomerCharacter>\n", "file_path": "src/scene/shop_scene.rs", "rank": 41, "score": 63056.08615068093 }, { "content": "pub trait Clickable {\n\n fn button_down<'a>(\n\n &mut self,\n\n _ctx: &mut SuzuContext<'a>,\n\n _: Clock,\n\n _button: ggez::input::mouse::MouseButton,\n\n _point: numeric::Point2f,\n\n ) {\n\n }\n\n\n\n fn button_up(\n\n &mut self,\n\n _ctx: &mut SuzuContext,\n\n _: Clock,\n\n _button: ggez::input::mouse::MouseButton,\n\n _point: numeric::Point2f,\n\n ) {\n\n }\n\n\n\n fn on_click<'a>(\n", "file_path": "src/object.rs", "rank": 42, "score": 62026.9696776101 }, { "content": "struct TemporaryConfigData {\n\n bgm_volume: f32,\n\n se_volume: f32,\n\n pause_when_inactive: bool,\n\n fullscreen_mode: bool,\n\n}\n\n\n\nimpl TemporaryConfigData {\n\n pub fn new<'a>(ctx: &mut SuzuContext<'a>) -> Self {\n\n TemporaryConfigData {\n\n bgm_volume: ctx.config.get_bgm_volume(),\n\n se_volume: ctx.config.get_se_volume(),\n\n pause_when_inactive: ctx.config.is_pause_when_inactive(),\n\n\t fullscreen_mode: ctx.config.is_fullscreen_mode_configed(),\n\n }\n\n }\n\n}\n\n\n\npub struct ConfigPanel {\n\n canvas: sub_screen::SubScreen,\n", "file_path": "src/object/title_object.rs", "rank": 43, "score": 61444.54949686541 }, { "content": "struct ShopTutorialList {\n\n hello: bool,\n\n go_ret_box: bool,\n\n first_ret_box: bool,\n\n go_shelving: bool,\n\n shelving_is_done: bool,\n\n customer_is_comming: bool,\n\n checking_customer_count: i64,\n\n}\n\n\n\nimpl ShopTutorialList {\n\n pub fn new() -> Self {\n\n ShopTutorialList {\n\n hello: false,\n\n go_ret_box: false,\n\n first_ret_box: false,\n\n go_shelving: false,\n\n shelving_is_done: false,\n\n customer_is_comming: false,\n\n checking_customer_count: 0,\n", "file_path": "src/scene/shop_scene.rs", "rank": 44, "score": 61444.54949686541 }, { "content": "pub trait SceneManager {\n\n fn key_down_event<'a>(&mut self, _: &mut SuzuContext<'a>, _vkey: tdev::VirtualKey) {}\n\n\n\n fn key_up_event<'a>(&mut self, _ctx: &mut SuzuContext<'a>, _vkey: tdev::VirtualKey) {}\n\n\n\n fn mouse_motion_event<'a>(\n\n &mut self,\n\n _ctx: &mut SuzuContext<'a>,\n\n _point: numeric::Point2f,\n\n _offset: numeric::Vector2f,\n\n ) {\n\n }\n\n\n\n fn mouse_button_down_event<'a>(\n\n &mut self,\n\n _ctx: &mut SuzuContext<'a>,\n\n _button: ginput::mouse::MouseButton,\n\n _point: numeric::Point2f,\n\n ) {\n\n }\n", "file_path": "src/scene.rs", "rank": 45, "score": 60093.38181075579 }, { "content": "#[derive(Clone)]\n\nstruct DragDistanceCalculator {\n\n distance: f32,\n\n last: Option<numeric::Point2f>,\n\n}\n\n\n\nimpl DragDistanceCalculator {\n\n pub fn new() -> Self {\n\n DragDistanceCalculator {\n\n distance: 0.0,\n\n last: None,\n\n }\n\n }\n\n\n\n pub fn add_point(&mut self, point: numeric::Point2f) {\n\n if self.last.is_some() {\n\n self.distance += distance!(self.last.unwrap(), point);\n\n }\n\n\n\n self.last = Some(point);\n\n }\n", "file_path": "src/object/copy_scene_object.rs", "rank": 46, "score": 59965.90223955216 }, { "content": "struct DrawableEvaluationFlow {\n\n eval_frame: TableFrame,\n\n desc_text: Vec<VerticalText>,\n\n yet_effect_text: VecDeque<EffectableWrap<MovableWrap<VerticalText>>>,\n\n effect_time_list: VecDeque<Clock>,\n\n now_effect_text: VecDeque<EffectableWrap<MovableWrap<VerticalText>>>,\n\n drwob_essential: DrawableObjectEssential,\n\n}\n\n\n\nimpl DrawableEvaluationFlow {\n\n pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n pos: numeric::Point2f,\n\n result_report: ResultReport,\n\n effect_clock_offset: Clock,\n\n depth: i8,\n\n t: Clock,\n\n ) -> Self {\n\n let eval_frame = TableFrame::new(\n\n ctx.resource,\n", "file_path": "src/object/task_result_object.rs", "rank": 47, "score": 59965.90223955216 }, { "content": "struct Counter<T> {\n\n count: T,\n\n}\n\n\n\nimpl<T: Clone + Copy + std::ops::AddAssign> Counter<T> {\n\n pub fn new(init: T) -> Self {\n\n Counter { count: init }\n\n }\n\n\n\n pub fn add(&mut self, value: T) {\n\n self.count += value;\n\n }\n\n\n\n pub fn set_value(&mut self, value: T) {\n\n self.count = value;\n\n }\n\n\n\n pub fn get_value(&self) -> T {\n\n self.count\n\n }\n", "file_path": "src/object/simulation_ui.rs", "rank": 48, "score": 59051.82433748381 }, { "content": "struct TaskSilhouette {\n\n character: Option<SimpleObject>,\n\n name: Option<String>,\n\n canvas: SubScreen,\n\n}\n\n\n\nimpl TaskSilhouette {\n\n pub fn new_empty(ctx: &mut ggez::Context, pos_rect: numeric::Rect) -> Self {\n\n TaskSilhouette {\n\n character: None,\n\n name: None,\n\n canvas: SubScreen::new(ctx, pos_rect, 0, ggraphics::Color::from_rgba_u32(0)),\n\n }\n\n }\n\n\n\n pub fn is_some(&self) -> bool {\n\n self.character.is_some()\n\n }\n\n\n\n pub fn change_character(&mut self, character: SimpleObject) -> &mut Self {\n", "file_path": "src/object/task_object/tt_main_component.rs", "rank": 49, "score": 58604.35791751412 }, { "content": "struct MapObjectDrawer<'a> {\n\n ref_list: Vec<Box<&'a mut dyn OnMap>>,\n\n}\n\n\n\nimpl<'a> MapObjectDrawer<'a> {\n\n pub fn new() -> MapObjectDrawer<'a> {\n\n MapObjectDrawer {\n\n ref_list: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn add(&mut self, onmap: &'a mut dyn OnMap) {\n\n self.ref_list.push(Box::new(onmap));\n\n }\n\n\n\n pub fn sort(&mut self, ctx: &mut ggez::Context) {\n\n self.ref_list.sort_by(|a, b| {\n\n a.get_map_position_bottom_right(ctx)\n\n .y\n\n .partial_cmp(&b.get_map_position_bottom_right(ctx).y)\n", "file_path": "src/scene/shop_scene.rs", "rank": 50, "score": 57440.28768366829 }, { "content": "struct FloatingMemoryObject {\n\n header_text: UniText,\n\n text: Vec<MovableWrap<UniText>>,\n\n appearance_frame: TileBatchFrame,\n\n canvas: SubScreen,\n\n padding: f32,\n\n font_info: FontInformation,\n\n}\n\n\n\nimpl FloatingMemoryObject {\n\n pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n init_rect: numeric::Rect,\n\n title: String,\n\n padding: f32,\n\n appear_frame_id: TileBatchTextureID,\n\n depth: i8,\n\n ) -> Self {\n\n let font_info = FontInformation::new(\n\n ctx.resource.get_font(FontID::Cinema),\n", "file_path": "src/object/task_object/tt_main_component.rs", "rank": 51, "score": 57346.535277019284 }, { "content": "///\n\n/// メニューに表示するやつ\n\n///\n\nstruct TaskInfoContents {\n\n book_info_memory: FloatingMemoryObject,\n\n general_table_frame: TableFrame,\n\n header_text: UniText,\n\n desc_text: Vec<VerticalText>,\n\n request_info_text: HashMap<String, VerticalText>,\n\n drwob_essential: DrawableObjectEssential,\n\n}\n\n\n\nimpl TaskInfoContents {\n\n pub fn new<'a>(ctx: &mut SuzuContext<'a>, customer_request: Option<CustomerRequest>) -> Self {\n\n let normal_scale_font = FontInformation::new(\n\n ctx.resource.get_font(FontID::Cinema),\n\n numeric::Vector2f::new(24.0, 24.0),\n\n ggraphics::Color::from_rgba_u32(0x000000ff),\n\n );\n\n\n\n let large_scale_font = FontInformation::new(\n\n ctx.resource.get_font(FontID::Cinema),\n\n numeric::Vector2f::new(40.0, 40.0),\n", "file_path": "src/object/task_object/tt_main_component.rs", "rank": 52, "score": 57346.535277019284 }, { "content": "fn create_playable_doremy1<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n camera: &numeric::Rect,\n\n map_position: numeric::Point2f,\n\n) -> MapObject {\n\n let textures = vec![\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotFront2),\n\n ctx.ref_texture(TextureID::KosuzuDotFront3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotBack2),\n\n ctx.ref_texture(TextureID::KosuzuDotBack3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::KosuzuDotRight1),\n\n ctx.ref_texture(TextureID::KosuzuDotRight2),\n\n ctx.ref_texture(TextureID::KosuzuDotRight1),\n\n ctx.ref_texture(TextureID::KosuzuDotRight3),\n\n ],\n", "file_path": "src/object/character_factory.rs", "rank": 53, "score": 56879.38175451134 }, { "content": "fn create_customer_sample<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n camera: &numeric::Rect,\n\n map_position: numeric::Point2f,\n\n) -> MapObject {\n\n let textures = vec![\n\n vec![\n\n ctx.ref_texture(TextureID::Mob1DotFront2),\n\n ctx.ref_texture(TextureID::Mob1DotFront3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::Mob1DotBack2),\n\n ctx.ref_texture(TextureID::Mob1DotBack3),\n\n ],\n\n vec![\n\n ctx.ref_texture(TextureID::Mob1DotRight1),\n\n ctx.ref_texture(TextureID::Mob1DotRight2),\n\n ctx.ref_texture(TextureID::Mob1DotRight1),\n\n ctx.ref_texture(TextureID::Mob1DotRight3),\n\n ],\n", "file_path": "src/object/character_factory.rs", "rank": 54, "score": 56879.38175451134 }, { "content": "pub trait MapEvent {\n\n fn get_trigger_method(&self) -> EventTrigger;\n\n}\n\n\n\npub struct MapTextEvent {\n\n trigger: EventTrigger,\n\n text: String,\n\n}\n\n\n\nimpl MapTextEvent {\n\n pub fn from_toml_object(toml_script: &toml::value::Value) -> Self {\n\n MapTextEvent {\n\n trigger: EventTrigger::from_str(toml_script.get(\"trigger\").unwrap().as_str().unwrap())\n\n .unwrap(),\n\n text: toml_script\n\n .get(\"text\")\n\n .unwrap()\n\n .as_str()\n\n .unwrap()\n\n .to_string(),\n", "file_path": "src/object/map_object.rs", "rank": 55, "score": 56725.114877494285 }, { "content": "pub trait NotificationContents: DrawableComponent {\n\n fn required_size(&self) -> numeric::Vector2f;\n\n fn get_notification_type(&self) -> NotificationType;\n\n}\n\n\n\npub struct NotificationContentsData {\n\n pub header_text: String,\n\n pub main_text: String,\n\n pub notification_type: NotificationType,\n\n}\n\n\n\nimpl NotificationContentsData {\n\n pub fn new(\n\n header_text: String,\n\n main_text: String,\n\n notification_type: NotificationType,\n\n ) -> Self {\n\n NotificationContentsData {\n\n header_text: header_text,\n\n main_text: main_text,\n", "file_path": "src/object/notify.rs", "rank": 56, "score": 52416.44445184015 }, { "content": "pub trait Scrollable: DrawableComponent {\n\n fn scroll<'a>(\n\n &mut self,\n\n ctx: &mut SuzuContext<'a>,\n\n point: numeric::Point2f,\n\n offset: numeric::Vector2f,\n\n );\n\n}\n\n\n\npub enum ScrollDirection {\n\n Vertical = 0,\n\n Horizon,\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum ObjectDirection {\n\n MoveUp,\n\n MoveDown,\n\n MoveRight,\n\n MoveLeft,\n", "file_path": "src/object/util_object.rs", "rank": 57, "score": 52416.44445184015 }, { "content": "///\n\n/// マップ上に描画されるオブジェクトが実装すべきトレイト\n\n///\n\npub trait OnMap: DrawableComponent {\n\n // マップ上のテクスチャ描画開始地点を返す\n\n fn get_map_position(&self) -> numeric::Point2f;\n\n\n\n // マップ上のテクスチャ描画領域の右下の位置を返す\n\n fn get_map_position_bottom_right(&self, ctx: &mut ggez::Context) -> numeric::Point2f;\n\n\n\n // マップ上のテクスチャ描画開始地点を設定する\n\n fn set_map_position(&mut self, position: numeric::Point2f);\n\n}\n\n\n\n///\n\n/// マップ上に描画するオブジェクト\n\n/// 基本的に、マップ上に描画するオブジェクトはこの構造体を移譲して使う\n\n///\n\npub struct MapObject {\n\n last_position: numeric::Point2f,\n\n object: TextureAnimation,\n\n speed_info: TextureSpeedInfo,\n\n map_position: TwoStepPoint,\n", "file_path": "src/object/map_object.rs", "rank": 58, "score": 52416.44445184015 }, { "content": "pub trait StackMessagePassingWindow<Msg>: StackableWindow {\n\n fn check_message(&self) -> Option<Msg> {\n\n None\n\n }\n\n\n\n fn apply_message<'a>(&mut self, _ctx: &mut SuzuContext<'a>, _msg: Msg) {}\n\n\n\n fn mouse_down_handler<'a>(\n\n &mut self,\n\n _ctx: &mut SuzuContext<'a>,\n\n _point: numeric::Point2f,\n\n _button: MouseButton,\n\n ) -> Option<Box<dyn StackMessagePassingWindow<Msg>>> {\n\n None\n\n }\n\n\n\n fn mouse_click_handler<'a>(\n\n &mut self,\n\n _ctx: &mut SuzuContext<'a>,\n\n _point: numeric::Point2f,\n", "file_path": "src/object/scenario_object.rs", "rank": 59, "score": 45400.40512850619 }, { "content": "use torifune::core::Clock;\n\nuse torifune::distance;\n\nuse torifune::graphics as tg;\n\nuse torifune::graphics::object::GenericMoveFn;\n\nuse torifune::numeric;\n\n\n", "file_path": "src/object/move_fn.rs", "rank": 60, "score": 41125.07879591176 }, { "content": " pub black_return: Clock,\n\n}\n\n\n\nimpl BlackOutParam {\n\n pub fn new(black_out: Clock, black_keep: Clock, black_return: Clock) -> Self {\n\n BlackOutParam {\n\n black_out: black_out,\n\n black_keep: black_keep,\n\n black_return: black_return,\n\n }\n\n }\n\n}\n\n\n\npub struct BlackOutTexture {\n\n texture: EffectableWrap<MovableWrap<UniTexture>>,\n\n}\n\n\n\nimpl BlackOutTexture {\n\n pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n", "file_path": "src/object.rs", "rank": 61, "score": 35.38877870595571 }, { "content": "impl std::fmt::Display for ShopClock {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"({}:{})\", self.hour, self.minute)\n\n }\n\n}\n\n\n\npub struct DrawableShopClock {\n\n background: UniTexture,\n\n long_needle: UniTexture,\n\n short_needle: UniTexture,\n\n time: ShopClock,\n\n center_position: numeric::Vector2f,\n\n drwob_essential: DrawableObjectEssential,\n\n}\n\n\n\nimpl DrawableShopClock {\n\n pub fn from_toml<'a>(ctx: &mut SuzuContext<'a>, path: &str, time: ShopClock) -> Self {\n\n let root = parse_toml_file!(ctx.context, path);\n\n\n\n let background_texture_id = root[\"background-texture\"].as_str().unwrap();\n", "file_path": "src/object/shop_object.rs", "rank": 63, "score": 32.276102727743975 }, { "content": "}\n\n\n\nimpl ScenarioBox {\n\n pub fn new<'a>(ctx: &mut SuzuContext, rect: numeric::Rect, t: Clock) -> Self {\n\n let background = tobj::SimpleObject::new(\n\n tobj::MovableUniTexture::new(\n\n Box::new(UniTexture::new(\n\n ctx.ref_texture(TextureID::TextBackground),\n\n numeric::Point2f::new(0.0, 0.0),\n\n numeric::Vector2f::new(1.0, 1.0),\n\n 0.0,\n\n 0,\n\n )),\n\n None,\n\n 0,\n\n ),\n\n Vec::new(),\n\n );\n\n ScenarioBox {\n\n text_box: TextBox::new(\n", "file_path": "src/object/scenario.rs", "rank": 64, "score": 32.02128378709669 }, { "content": "}\n\n\n\npub const WINDOW_SIZE_X: i16 = 1366;\n\npub const WINDOW_SIZE_Y: i16 = 768;\n\n\n\npub struct InitialDisplay {\n\n texture: Vec<ggraphics::Image>,\n\n index: usize,\n\n}\n\n\n\nimpl InitialDisplay {\n\n pub fn new(ctx: &mut ggez::Context) -> Self {\n\n InitialDisplay {\n\n texture: vec![ggraphics::Image::new(ctx, \"/textures/sumire_logo.png\").unwrap()],\n\n index: 0,\n\n }\n\n }\n\n\n\n pub fn draw(&self, ctx: &mut ggez::Context) {\n\n ggraphics::clear(ctx, [0.0, 0.0, 0.0, 0.0].into());\n", "file_path": "src/core.rs", "rank": 66, "score": 30.642490670076615 }, { "content": "\n\n pub fn make_this_none_status<'a>(&mut self, ctx: &mut SuzuContext<'a>) {\n\n self.button_status = ButtonStatus::None;\n\n self.texture\n\n .set_inner_color_filter(ggraphics::Color::from_rgba_u32(0xffffffff));\n\n self.texture\n\n .set_outer_color_filter(ggraphics::Color::from_rgba_u32(0xf0f0f0ff));\n\n ctx.process_utility.redraw();\n\n }\n\n\n\n pub fn contains(&self, p: numeric::Point2f) -> bool {\n\n self.texture.contains(p)\n\n }\n\n\n\n pub fn mouse_motion_handler<'a>(&mut self, ctx: &mut SuzuContext<'a>, p: numeric::Point2f) {\n\n if self.contains(p) {\n\n match self.button_status {\n\n ButtonStatus::None => {\n\n\t\t self.make_this_hovered_status(ctx);\n\n\t\t ctx.process_utility.redraw();\n", "file_path": "src/object/util_object.rs", "rank": 67, "score": 29.987496981577067 }, { "content": "}\n\n\n\nimpl RecordRoom {\n\n pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n pos_rect: numeric::Rect,\n\n depth: i8,\n\n t: Clock,\n\n ) -> Self {\n\n ctx.permanent_save_data.sort_records();\n\n\n\n let font_info = FontInformation::new(\n\n ctx.resource.get_font(FontID::Cinema),\n\n numeric::Vector2f::new(28.0, 28.0),\n\n ggraphics::Color::from_rgba_u32(0xccccccff),\n\n );\n\n\n\n let mut background = DarkEffectPanel::new(\n\n ctx.context,\n\n numeric::Rect::new(0.0, 0.0, WINDOW_SIZE_X as f32, WINDOW_SIZE_X as f32),\n", "file_path": "src/object/title_object.rs", "rank": 68, "score": 29.6404938138391 }, { "content": "\n\npub struct EndScene {\n\n mouse_info: MouseInformation,\n\n background: UniTexture,\n\n event_list: DelayEventList<Self>,\n\n end_flow: EndSceneFlow,\n\n scene_transition_effect: Option<effect_object::ScreenTileEffect>,\n\n scene_transition: SceneID,\n\n scene_transition_type: SceneTransition,\n\n kosuzu_speed: numeric::Vector2f,\n\n walking_kosuzu: MapObject,\n\n clock: Clock,\n\n}\n\n\n\nimpl EndScene {\n\n pub fn new<'a>(ctx: &mut SuzuContext<'a>) -> Self {\n\n let background = UniTexture::new(\n\n ctx.ref_texture(TextureID::TextBackground),\n\n numeric::Point2f::new(0.0, 0.0),\n\n numeric::Vector2f::new(2.0, 2.0),\n", "file_path": "src/scene/end_scene.rs", "rank": 69, "score": 29.341514599030734 }, { "content": "\n\n entry.update_entry_contents(ctx.context, ctx.resource, &savable_data);\n\n entry\n\n }\n\n\n\n fn new_none<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n background: UniTexture,\n\n appr_frame: TileBatchFrame,\n\n pos_rect: numeric::Rect,\n\n table_frame: TableFrame,\n\n save_button: FramedButton,\n\n load_button: FramedButton,\n\n delete_button: FramedButton,\n\n slot_id: u8,\n\n ) -> Self {\n\n let mut entry = DrawableSaveEntry {\n\n background: background,\n\n date_text: None,\n\n money_text: None,\n", "file_path": "src/object/save_scene_object.rs", "rank": 70, "score": 29.337399330768896 }, { "content": " canvas: SubScreen,\n\n graph_area: numeric::Rect,\n\n data: Vec<numeric::Vector2f>,\n\n shapes: ggraphics::Mesh,\n\n}\n\n\n\nimpl GraphDrawer {\n\n pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n rect: numeric::Rect,\n\n graph_area: numeric::Rect,\n\n data: Vec<numeric::Vector2f>,\n\n point_radius: f32,\n\n point_color: ggraphics::Color,\n\n line_width: f32,\n\n line_color: ggraphics::Color,\n\n depth: i8,\n\n ) -> Self {\n\n let mut builder = ggraphics::MeshBuilder::new();\n\n\n", "file_path": "src/object/util_object.rs", "rank": 71, "score": 29.2224232566418 }, { "content": " pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n texture_id: TextureID,\n\n savable_data: Option<SavableData>,\n\n pos_rect: numeric::Rect,\n\n slot_id: u8,\n\n ) -> Self {\n\n let mut background = UniTexture::new(\n\n ctx.ref_texture(texture_id),\n\n numeric::Point2f::new(0.0, 0.0),\n\n numeric::Vector2f::new(1.0, 1.0),\n\n 0.0,\n\n 0,\n\n );\n\n\n\n\tlet save_button = FramedButton::create_design_small(\n\n\t ctx,\n\n\t numeric::Point2f::new(30.0, pos_rect.h - 70.0),\n\n\t \"保存\",\n\n\t numeric::Vector2f::new(18.0, 18.0)\n", "file_path": "src/object/save_scene_object.rs", "rank": 72, "score": 29.211524419786194 }, { "content": "\n\n///\n\n/// # 遅延イベントを起こすための情報を保持する\n\n///\n\n/// ## run_time\n\n/// 処理が走る時間\n\n///\n\n/// ## func\n\n/// run_time時に実行される処理\n\n///\n\npub struct DelayEvent<T> {\n\n pub run_time: Clock,\n\n pub func: Box<dyn FnOnce(&mut T, &mut SuzuContext, Clock) -> ()>,\n\n}\n\n\n\nimpl<T> DelayEvent<T> {\n\n pub fn new(f: Box<dyn FnOnce(&mut T, &mut SuzuContext, Clock) -> ()>, t: Clock) -> Self {\n\n DelayEvent::<T> {\n\n run_time: t,\n\n func: f,\n", "file_path": "src/scene.rs", "rank": 73, "score": 29.063441713303405 }, { "content": " self.drwob_essential.drawing_depth\n\n }\n\n}\n\n\n\npub struct ShopMenu {\n\n canvas: MovableWrap<SubScreen>,\n\n menu_contents: ShopMenuContents,\n\n background: UniTexture,\n\n menu_canvas_size: numeric::Vector2f,\n\n now_appear: bool,\n\n}\n\n\n\nimpl ShopMenu {\n\n pub fn new<'a>(ctx: &mut SuzuContext<'a>, size: numeric::Vector2f, t: Clock) -> Self {\n\n let mut canvas = SubScreen::new(\n\n ctx.context,\n\n numeric::Rect::new(-size.x, 0.0, size.x, size.y),\n\n 0,\n\n ggraphics::Color::from_rgba_u32(0xffffffff),\n\n );\n", "file_path": "src/object/shop_object.rs", "rank": 74, "score": 28.891319376948893 }, { "content": " Box::new(SubScreen::new(\n\n ctx,\n\n rect,\n\n 0,\n\n ggraphics::Color::from_rgba_u32(0),\n\n )),\n\n None,\n\n now,\n\n ),\n\n vec![],\n\n ),\n\n }\n\n }\n\n\n\n pub fn new_effect(\n\n &mut self,\n\n required_time: Clock,\n\n now: Clock,\n\n init_dark_alpha: u8,\n\n fin_dark_alpha: u8,\n", "file_path": "src/object.rs", "rank": 75, "score": 28.412676928042337 }, { "content": " ) -> Self {\n\n let mut background_object = MovableUniTexture::new(\n\n Box::new(UniTexture::new(\n\n ctx.ref_texture(TextureID::Paper1),\n\n numeric::Point2f::new(0.0, 0.0),\n\n numeric::Vector2f::new(1.0, 1.0),\n\n 0.0,\n\n 0,\n\n )),\n\n None,\n\n 0,\n\n );\n\n\n\n background_object.fit_scale(\n\n ctx.context,\n\n numeric::Vector2f::new(\n\n crate::core::WINDOW_SIZE_X as f32,\n\n crate::core::WINDOW_SIZE_Y as f32,\n\n ),\n\n );\n", "file_path": "src/scene/suzuna_scene/suzuna_sub_scene/task_result_scene.rs", "rank": 76, "score": 28.394462136945627 }, { "content": " if let Some(grid_position) = maybe_grid_position {\n\n match grid_position.y {\n\n 0 => Some(SignFrameEntry::BorrowingSign),\n\n 1 => Some(SignFrameEntry::ReturningSign),\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn sign_borrowing_frame(&mut self, ctx: &mut SuzuContext) {\n\n let mut sign_texture = UniTexture::new(\n\n ctx.ref_texture(TextureID::Hanko),\n\n numeric::Point2f::new(0.0, 0.0),\n\n numeric::Vector2f::new(0.5, 0.5),\n\n 0.0,\n\n 0,\n\n );\n\n\n", "file_path": "src/object/task_object/tt_sub_component.rs", "rank": 77, "score": 28.392781870488342 }, { "content": "}\n\n\n\nimpl TextureObject for SimpleBookListViewer {\n\n impl_texture_object_for_wrapped! {canvas}\n\n}\n\n\n\nimpl Clickable for SimpleBookListViewer {\n\n fn on_click<'a>(\n\n &mut self,\n\n _ctx: &mut SuzuContext<'a>,\n\n _clock: Clock,\n\n _button: ggez::input::mouse::MouseButton,\n\n _point: numeric::Point2f,\n\n ) {\n\n }\n\n\n\n fn clickable_status(\n\n &mut self,\n\n _ctx: &mut ggez::Context,\n\n _point: numeric::Point2f,\n", "file_path": "src/object/shop_object.rs", "rank": 78, "score": 27.916173658668864 }, { "content": " minute: u8,\n\n}\n\n\n\nimpl ShopClock {\n\n pub fn new(hour: u8, minute: u8) -> Self {\n\n ShopClock {\n\n hour: hour,\n\n minute: minute,\n\n }\n\n }\n\n\n\n pub fn add_minute(&mut self, minute: u8) {\n\n self.minute += minute;\n\n\n\n self.add_hour(self.minute / 60);\n\n\n\n self.minute = self.minute % 60;\n\n }\n\n\n\n pub fn add_hour(&mut self, hour: u8) {\n", "file_path": "src/object/shop_object.rs", "rank": 79, "score": 27.589771527018215 }, { "content": " slf.current_page -= 1;\n\n slf.redraw_request = DrawRequest::Draw;\n\n slf.check_move_page_icon_visibility();\n\n }),\n\n t + (i * 2) as Clock,\n\n );\n\n } else {\n\n break;\n\n }\n\n }\n\n ctx.play_sound_as_se(SoundID::SeTurnThePage, None);\n\n }\n\n\n\n false\n\n }\n\n\n\n pub fn sign_with_mouse_click<'a>(\n\n &mut self,\n\n ctx: &mut SuzuContext<'a>,\n\n point: numeric::Point2f,\n", "file_path": "src/object/task_object/tt_sub_component.rs", "rank": 80, "score": 27.566300452242984 }, { "content": "}\n\n\n\nimpl ScenarioElement {\n\n pub fn get_scenario_id(&self) -> ScenarioElementID {\n\n match self {\n\n Self::Text(text) => text.get_scenario_id(),\n\n Self::ChoiceSwitch(choice) => choice.get_scenario_id(),\n\n Self::SceneTransition(transition_data) => transition_data.2,\n\n Self::FinishAndWait(data) => data.get_scenario_id(),\n\n Self::BuiltinCommand(command) => command.get_scenario_id(),\n\n\t Self::Switch(switch) => switch.get_self_scenario_id(),\n\n }\n\n }\n\n\n\n pub fn get_background_texture(&self) -> Option<TextureID> {\n\n match self {\n\n Self::Text(text) => text.get_background_texture_id(),\n\n Self::ChoiceSwitch(choice) => choice.get_background_texture_id(),\n\n Self::SceneTransition(_) => None,\n\n Self::FinishAndWait(data) => data.get_background_texture_id(),\n", "file_path": "src/object/scenario.rs", "rank": 81, "score": 27.446379775451263 }, { "content": " // font_info: FontInformation,\n\n // t: Clock,\n\n // ) -> Self {\n\n // let background = tobj::SimpleObject::new(\n\n // tobj::MovableUniTexture::new(\n\n // Box::new(UniTexture::new(\n\n // ctx.ref_texture(TextureID::TextBackground),\n\n // numeric::Point2f::new(20.0, 20.0),\n\n // numeric::Vector2f::new(0.8, 0.8),\n\n // 0.0,\n\n // 0,\n\n // )),\n\n // None,\n\n // 0,\n\n // ),\n\n // Vec::new(),\n\n // );\n\n\n\n // \tlet mut choice_box = ChoiceBox::new(\n\n // ctx,\n", "file_path": "src/object/scenario.rs", "rank": 82, "score": 27.43621428454534 }, { "content": " // numeric::Point2f::new(area.x + (area.w / 2.0), area.y + (area.h / 2.0)),\n\n // );\n\n\n\n // let mut button = FramedButton {\n\n // button_status: ButtonStatus::None,\n\n // texture: texture,\n\n // text: text,\n\n // drwob_essential: DrawableObjectEssential::new(true, depth),\n\n // };\n\n\n\n // button.make_this_none_status(ctx);\n\n // button\n\n // }\n\n\n\n pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n rect: numeric::Rect,\n\n r: f32,\n\n frame_width: f32,\n\n inner_color: ggraphics::Color,\n", "file_path": "src/object/util_object.rs", "rank": 83, "score": 27.4139343267505 }, { "content": " match ctx.permanent_save_data.save() {\n\n Ok(_) => (),\n\n Err(_) => (),\n\n }\n\n }\n\n\n\n pub fn update<'a>(&mut self, ctx: &mut SuzuContext<'a>, t: Clock) {\n\n for vtext in self.result_vtext_list.iter_mut() {\n\n vtext.effect(ctx.context, t);\n\n vtext.move_with_func(t);\n\n }\n\n\n\n for vtext in self.credit_vtext_list.iter_mut() {\n\n vtext.effect(ctx.context, t);\n\n vtext.move_with_func(t);\n\n }\n\n\n\n for texture in self.book_collection.iter_mut() {\n\n texture.effect(ctx.context, t);\n\n texture.move_with_func(t);\n", "file_path": "src/object/end_object.rs", "rank": 84, "score": 27.325458101423695 }, { "content": " impl_drawable_object_for_wrapped! {canvas}\n\n}\n\n\n\nimpl TextureObject for SelectShelvingBookUI {\n\n impl_texture_object_for_wrapped! {canvas}\n\n}\n\n\n\nimpl Clickable for SelectShelvingBookUI {\n\n fn on_click<'a>(\n\n &mut self,\n\n ctx: &mut SuzuContext<'a>,\n\n clock: Clock,\n\n button: ggez::input::mouse::MouseButton,\n\n point: numeric::Point2f,\n\n ) {\n\n // それぞれのオブジェクトに処理を渡すだけ\n\n\n\n let rpoint = self.canvas.relative_point(point);\n\n\n\n if self.box_info_window.contains(ctx.context, rpoint) {\n", "file_path": "src/object/shop_object.rs", "rank": 85, "score": 27.01419706802789 }, { "content": " }\n\n}\n\n\n\nimpl DrawableObject for BlackOutTexture {\n\n impl_drawable_object_for_wrapped! {texture}\n\n}\n\n\n\nimpl TextureObject for BlackOutTexture {\n\n impl_texture_object_for_wrapped! {texture}\n\n}\n\n\n\npub struct DarkEffectPanel {\n\n canvas: EffectableWrap<MovableWrap<SubScreen>>,\n\n}\n\n\n\nimpl DarkEffectPanel {\n\n pub fn new(ctx: &mut ggez::Context, rect: numeric::Rect, now: Clock) -> Self {\n\n DarkEffectPanel {\n\n canvas: EffectableWrap::new(\n\n MovableWrap::new(\n", "file_path": "src/object.rs", "rank": 87, "score": 26.9009766584696 }, { "content": " logo: UniTexture,\n\n event_list: DelayEventList<Self>,\n\n scene_transition_effect: Option<effect_object::ScreenTileEffect>,\n\n scene_transition: SceneID,\n\n scene_transition_type: SceneTransition,\n\n current_title_contents: Option<TitleContents>,\n\n title_contents_set: TitleContentsSet,\n\n scene_transition_lock: bool,\n\n clock: Clock,\n\n}\n\n\n\nimpl TitleScene {\n\n pub fn new<'a>(ctx: &mut SuzuContext<'a>) -> Self {\n\n let background = UniTexture::new(\n\n ctx.ref_texture(TextureID::JpHouseTexture),\n\n numeric::Point2f::new(0.0, 0.0),\n\n numeric::Vector2f::new(1.0, 1.0),\n\n 0.0,\n\n 0,\n\n );\n", "file_path": "src/scene/title_scene.rs", "rank": 88, "score": 26.78802725434855 }, { "content": " text: VecDeque<SimpleText>,\n\n line_arrow: UniTexture,\n\n text_box_status: TextBoxStatus,\n\n appearance_frame: TileBatchFrame,\n\n complete_and_wait_current_line: bool,\n\n background: SimpleObject,\n\n canvas: SubScreen,\n\n const_canvas: SubScreen,\n\n}\n\n\n\nimpl TextBox {\n\n pub fn new<'a>(\n\n ctx: &mut SuzuContext<'a>,\n\n rect: numeric::Rect,\n\n mut background: SimpleObject,\n\n tile_batch_texture_id: TileBatchTextureID,\n\n box_lines: usize,\n\n _t: Clock,\n\n ) -> Self {\n\n background.fit_scale(ctx.context, numeric::Vector2f::new(rect.w, rect.h));\n", "file_path": "src/object/scenario.rs", "rank": 89, "score": 26.710994292367108 }, { "content": " }\n\n}\n\n\n\nimpl DrawableObject for TaskTable {\n\n impl_drawable_object_for_wrapped! {canvas}\n\n}\n\n\n\nimpl TextureObject for TaskTable {\n\n impl_texture_object_for_wrapped! {canvas}\n\n}\n\n\n\nimpl Clickable for TaskTable {\n\n fn button_down<'a>(\n\n &mut self,\n\n ctx: &mut SuzuContext<'a>,\n\n _: Clock,\n\n _: ggez::input::mouse::MouseButton,\n\n point: numeric::Point2f,\n\n ) {\n\n self.select_dragging_object(ctx, point);\n", "file_path": "src/object/task_object.rs", "rank": 91, "score": 26.588114941764637 }, { "content": " pub dropping: Vec<TaskItem>,\n\n pub dropping_to_desk: Vec<TaskItem>,\n\n pub silhouette: SuzuMiniSightSilhouette,\n\n appearance_frame: TileBatchFrame,\n\n draw_request: DrawRequest,\n\n}\n\n\n\nimpl SuzuMiniSight {\n\n pub fn new<'a>(ctx: &mut SuzuContext<'a>, rect: ggraphics::Rect, t: Clock) -> Self {\n\n let appr_frame = TileBatchFrame::new(\n\n ctx.resource,\n\n TileBatchTextureID::BlackFrame,\n\n numeric::Rect::new(0.0, 0.0, rect.w, rect.h),\n\n numeric::Vector2f::new(1.0, 1.0),\n\n 0,\n\n );\n\n\n\n let silhouette_paper_texture = UniTexture::new(\n\n ctx.ref_texture(TextureID::Library),\n\n numeric::Point2f::new(0.0, 0.0),\n", "file_path": "src/object/task_object/tt_main_component.rs", "rank": 92, "score": 26.54718070235841 }, { "content": " \"update-panel\".to_string()\n\n }\n\n\n\n pub fn mouse_button_down<'a>(\n\n &mut self,\n\n _ctx: &mut SuzuContext<'a>,\n\n button: MouseButton,\n\n _point: numeric::Point2f,\n\n _t: Clock,\n\n ) {\n\n match button {\n\n MouseButton::Left => (),\n\n _ => (),\n\n }\n\n }\n\n\n\n pub fn mouse_button_up<'a>(\n\n &mut self,\n\n ctx: &mut SuzuContext<'a>,\n\n point: numeric::Point2f,\n", "file_path": "src/object/title_object.rs", "rank": 93, "score": 26.517672567942213 }, { "content": " balloon.add_effect(vec![effect::fade_out(10, t)]);\n\n self.event_list.add_event(\n\n Box::new(|slf: &mut Self, _, _| slf.text_balloon = None),\n\n t + 11,\n\n );\n\n }\n\n }\n\n\n\n ///\n\n /// # 再描画要求有り\n\n ///\n\n pub fn update<'a>(&mut self, ctx: &mut SuzuContext<'a>, t: Clock) {\n\n flush_delay_event_and_redraw_check!(self, self.event_list, ctx, t, {});\n\n\n\n if let Some(balloon) = self.text_balloon.as_mut() {\n\n if !balloon.is_stop() || !balloon.is_empty_effect() {\n\n ctx.process_utility.redraw();\n\n }\n\n\n\n balloon.effect(ctx.context, t);\n", "file_path": "src/object/task_object/tt_main_component.rs", "rank": 94, "score": 26.483688682608722 }, { "content": " pub fn scroll_handler<'a>(\n\n &mut self,\n\n ctx: &mut SuzuContext<'a>,\n\n point: numeric::Point2f,\n\n x: f32,\n\n y: f32,\n\n ) {\n\n let rpoint = self.canvas.relative_point(point);\n\n if self.contents.contains(ctx.context, rpoint) {\n\n self.contents.scroll(ctx, rpoint, x, y);\n\n }\n\n }\n\n\n\n pub fn remaining_books_capacity(&self) -> usize {\n\n self.contents.ref_object().remaining_books_capacity()\n\n }\n\n}\n\n\n\nimpl DrawableComponent for SelectBookWindow {\n\n fn draw(&mut self, ctx: &mut ggez::Context) -> ggez::GameResult<()> {\n", "file_path": "src/object/shop_object.rs", "rank": 95, "score": 26.339750928145726 }, { "content": " }\n\n }\n\n\n\n pub fn run_black_out(&mut self, param: BlackOutParam, now: Clock) {\n\n self.texture.clear_effect();\n\n self.texture.add_effect(vec![\n\n effect::fade_in(param.black_out, now),\n\n effect::fade_out(param.black_return, now + param.black_out + param.black_keep),\n\n ]);\n\n }\n\n}\n\n\n\nimpl DrawableComponent for BlackOutTexture {\n\n fn draw(&mut self, ctx: &mut ggez::Context) -> ggez::GameResult<()> {\n\n self.texture.draw(ctx)\n\n }\n\n\n\n #[inline(always)]\n\n fn hide(&mut self) {\n\n self.texture.hide()\n", "file_path": "src/object.rs", "rank": 96, "score": 26.327219302887674 }, { "content": " }\n\n\n\n fn get_padding(&self) -> f32 {\n\n self.text.get_position().x - self.button_pos.x\n\n }\n\n}\n\n\n\nimpl DrawableComponent for TextButtonTexture {\n\n fn draw(&mut self, ctx: &mut ggez::Context) -> ggez::GameResult<()> {\n\n if self.is_visible() {\n\n let draw_param = ggraphics::DrawParam::default()\n\n .dest(mintp_new!(self.button_pos.x, self.button_pos.y));\n\n\n\n ggraphics::draw(ctx, &self.background, draw_param)?;\n\n self.text.draw(ctx)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "src/object/util_object.rs", "rank": 97, "score": 26.22071017291345 }, { "content": " point: numeric::Point2f,\n\n t: Clock,\n\n ) -> bool {\n\n if let Some(ui) = self.new_books_viewer.as_mut() {\n\n if ui.click_and_maybe_hide(ctx, t, button, point) {\n\n self.hide_new_books_viewer(t);\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n pub fn mouse_wheel_scroll_action<'a>(\n\n &mut self,\n\n ctx: &mut SuzuContext<'a>,\n\n point: numeric::Point2f,\n\n x: f32,\n\n y: f32,\n\n ) {\n", "file_path": "src/object/shop_object.rs", "rank": 98, "score": 26.177769874619013 }, { "content": "impl TextureObject for SelectStoreBookUI {\n\n impl_texture_object_for_wrapped! {canvas}\n\n}\n\n\n\nimpl Clickable for SelectStoreBookUI {\n\n fn on_click<'a>(\n\n &mut self,\n\n ctx: &mut SuzuContext<'a>,\n\n clock: Clock,\n\n button: ggez::input::mouse::MouseButton,\n\n point: numeric::Point2f,\n\n ) {\n\n let rpoint = self.canvas.relative_point(point);\n\n\n\n if self.select_book_window.contains(ctx.context, rpoint) {\n\n self.select_book_window.on_click(ctx, clock, button, rpoint);\n\n self.redraw_request = DrawRequest::Draw;\n\n }\n\n\n\n if self.reset_select_button.contains(rpoint) {\n", "file_path": "src/object/shop_object.rs", "rank": 99, "score": 26.146917432301514 } ]
Rust
src/lib.rs
tosus/ranpaman-core
d65943b4cb8f9d644927277c6c33d2752b559b02
use num_bigint::BigUint; use num_traits::cast::{FromPrimitive, ToPrimitive}; use orion::aead; #[allow(dead_code)] use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::io::prelude::*; use blake2::{Blake2b, Digest}; use zeroize::Zeroize; const ENCRYPTION_SALT: [u8; 64] = [ 0xe3, 0x1a, 0x0c, 0x9b, 0x6b, 0x01, 0xbe, 0x19, 0xc5, 0x44, 0x7f, 0xb9, 0x2f, 0x79, 0x94, 0x91, 0xcf, 0xae, 0xb6, 0xda, 0x09, 0x0c, 0x24, 0xf3, 0x0f, 0xab, 0x2b, 0xf2, 0x4a, 0x1c, 0x39, 0xf7, 0xc1, 0xfc, 0xdc, 0x61, 0xc3, 0xf3, 0x15, 0xcf, 0x64, 0x76, 0x96, 0x25, 0xf9, 0xe6, 0xb1, 0x18, 0x62, 0xbd, 0x03, 0x6a, 0x67, 0x2d, 0xbb, 0x42, 0x1c, 0xbb, 0xb3, 0x24, 0x83, 0x5f, 0x7e, 0x53, ]; const MASTER_PASS_SALT: [u8; 64] = [ 0xa1, 0x48, 0x48, 0x5a, 0x76, 0x31, 0xe5, 0x45, 0x65, 0xf4, 0xde, 0xb0, 0xbb, 0x3a, 0x8f, 0xcc, 0xaa, 0x35, 0xff, 0x87, 0x7c, 0xd5, 0xcd, 0x4c, 0x4a, 0xbb, 0xbe, 0x21, 0x56, 0x5b, 0xe2, 0x7e, 0x60, 0x70, 0xd6, 0x5c, 0x0e, 0x3a, 0xa6, 0x02, 0xf9, 0xa1, 0xc9, 0x37, 0x88, 0x2a, 0xe0, 0xdc, 0x06, 0xcc, 0x25, 0xa6, 0x05, 0x8d, 0x75, 0x91, 0xc5, 0xdb, 0x0d, 0x90, 0xdb, 0xf3, 0x05, 0x8f, ]; type Result<T> = std::result::Result<T, Box<std::error::Error>>; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Ranpaman { master_password: Vec<u8>, encryption_key: Vec<u8>, file_path: Option<String>, data: HashMap<(String, String), Settings>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Settings { include_special_characters: bool, revision: u32, password_length: u32, } impl Default for Settings { fn default() -> Settings { Settings { include_special_characters: true, revision: 0, password_length: 30, } } } impl Drop for Ranpaman { fn drop(&mut self) { self.master_password.zeroize(); self.encryption_key.zeroize(); } } impl Ranpaman { pub fn new(mut master_password: String, file_path: Option<String>) -> Ranpaman { let config = argon2::Config::default(); let pw = argon2::hash_raw(&master_password.as_bytes(), &MASTER_PASS_SALT, &config).unwrap(); let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap(); master_password.zeroize(); Ranpaman { master_password: pw, encryption_key: key, file_path, data: HashMap::new(), } } pub fn add_account( &mut self, login: String, service_name: String, settings: Settings, ) -> Result<()> { if service_name.is_empty() || login.is_empty() || settings.password_length < 4 { } let key = (service_name, login); if self.data.contains_key(&key) { } else { self.data.insert(key, settings); } Ok(()) } pub fn get_password(&self, login: String, service_name: String) -> Result<String> { match self .data .get(&(service_name.to_string(), login.to_string())) { Some(settings) => { let salt: &[u8] = &[ login.as_bytes(), service_name.as_bytes(), &settings.revision.to_le_bytes(), ] .concat(); let argon_config = argon2::Config::default(); let hash = argon2::hash_raw(&self.master_password, salt, &argon_config).unwrap(); let char_sets = generate_character_sets(settings); return encode_password(&hash, char_sets, settings.password_length as usize); } None => { Ok(String::from("")) } } } pub fn change_file_path(&mut self, new_path: Option<String>) -> Result<()> { match new_path { None => { if let Some(old_path) = &self.file_path { std::fs::remove_file(old_path)?; self.file_path = None; } } Some(new_path) => { let mut new_file = std::fs::File::create(&new_path)?; if let Some(old_path) = &self.file_path { std::fs::remove_file(old_path)?; } self.file_path = Some(new_path); let encoded_self = bincode::serialize(&self).unwrap(); let encrypted_self = aead::seal( &aead::SecretKey::from_slice(&self.encryption_key).unwrap(), &encoded_self, ) .unwrap(); new_file.write(&encrypted_self)?; } } Ok(()) } pub fn write_to_file(&self) -> Result<()> { let encoded_self = bincode::serialize(&self).unwrap(); let encrypted_self = aead::seal( &aead::SecretKey::from_slice(&self.encryption_key).unwrap(), &encoded_self, ) .unwrap(); std::fs::write( self.file_path.as_ref().ok_or("No file path specified")?, encrypted_self, )?; Ok(()) } pub fn read_from_file(mut master_password: String, path: &str) -> Result<Ranpaman> { let read = std::fs::read(path)?; let config = argon2::Config::default(); let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap(); master_password.zeroize(); let decrypted = aead::open(&aead::SecretKey::from_slice(&key).unwrap(), &read).unwrap(); Ok(bincode::deserialize(&decrypted)?) } pub fn get_file_path(&self) -> Option<&String> { self.file_path.as_ref() } } fn generate_character_sets(settings: &Settings) -> Vec<Vec<char>> { let mut char_sets = Vec::new(); char_sets.push((b'A'..=b'Z').map(char::from).collect()); char_sets.push((b'a'..=b'z').map(char::from).collect()); if settings.include_special_characters { char_sets.push(vec!['1', '2', '3', '4', '5', '6', '7', '8', '9']); char_sets.push(vec!['%', '&', '#', '$', '+', '-', '@']); } char_sets } fn encode_password( raw_password: &[u8], char_sets: Vec<Vec<char>>, length: usize, ) -> Result<String> { if char_sets.iter().any(|set| set.is_empty()) { } let mut entropy = BigUint::from_bytes_le(raw_password); let mut char_set_use_flags: Vec<bool> = char_sets.iter().map(|_| false).collect(); let set_length = char_sets.iter().map(|set| set.len()).sum(); let mut encoded_password = String::new(); while encoded_password.len() < length { if entropy < BigUint::from_usize(set_length).unwrap() { } let new_char: usize = (entropy.clone() % set_length).to_usize().unwrap(); entropy /= set_length; let mut collective_length = 0; for (index, set) in char_sets.iter().enumerate() { if new_char < set.len() + collective_length { encoded_password.push(set[new_char - collective_length]); char_set_use_flags[index] = true; break; } collective_length += set.len(); } } if char_set_use_flags.into_iter().all(|flag| flag){ return Ok(encoded_password); }else{ let mut hasher = Blake2b::new(); hasher.input(raw_password); return encode_password(&hasher.result(), char_sets, length); } } #[cfg(test)] mod tests { use super::*; #[test] fn service_password_generation() { let mut ranpaman = Ranpaman::new("masterpass".to_string(), None); let site = String::from("somesite.com"); let mail = String::from("[email protected]"); let settings = Settings::default(); ranpaman .add_account(site.clone(), mail.clone(), settings) .unwrap(); let password = ranpaman.get_password(site, mail).unwrap(); assert_eq!("#DnLScQHt4zu%QDLqP$7VD535UjExb", password); } #[test] fn key_generation() { let ranpaman = Ranpaman::new("masterpass".to_string(), None); assert_eq!( ranpaman.master_password, [ 223, 108, 222, 141, 127, 89, 120, 143, 166, 127, 41, 255, 155, 5, 5, 195, 198, 186, 182, 18, 209, 221, 182, 64, 164, 34, 27, 230, 196, 48, 187, 237 ] ); assert_eq!( ranpaman.encryption_key, [ 110, 249, 117, 224, 82, 86, 66, 21, 42, 235, 243, 204, 137, 226, 46, 12, 116, 161, 243, 48, 201, 170, 187, 179, 80, 147, 37, 111, 124, 108, 191, 182 ] ); } #[test] fn read_write() { let path = "read_write_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); ranpaman.write_to_file().unwrap(); let decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap(); std::fs::remove_file(path).unwrap(); assert_eq!(ranpaman, decoded); } #[test] fn change_file_path() { let path = "change_file_path_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); ranpaman.write_to_file().unwrap(); let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap(); let new_path = "change_file_path_other_test_file"; decoded .change_file_path(Some(new_path.to_string())) .unwrap(); let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), new_path).unwrap(); decoded.change_file_path(Some(path.to_string())).unwrap(); std::fs::remove_file(path).unwrap(); assert_eq!(ranpaman, decoded); } #[test] fn get_file_path() { let path = "get_file_path_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); assert_eq!(ranpaman.get_file_path(), Some(&path.to_string())); } }
use num_bigint::BigUint; use num_traits::cast::{FromPrimitive, ToPrimitive}; use orion::aead; #[allow(dead_code)] use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::io::prelude::*; use blake2::{Blake2b, Digest}; use zeroize::Zeroize; const ENCRYPTION_SALT: [u8; 64] = [ 0xe3, 0x1a, 0x0c, 0x9b, 0x6b, 0x01, 0xbe, 0x19, 0xc5, 0x44, 0x7f, 0xb9, 0x2f, 0x79, 0x94, 0x91, 0xcf, 0xae, 0xb6, 0xda, 0x09, 0x0c, 0x24, 0xf3, 0x0f, 0xab, 0x2b, 0xf2, 0x4a, 0x1c, 0x39, 0xf7, 0xc1, 0xfc, 0xdc, 0x61, 0xc3, 0xf3, 0x15, 0xcf, 0x64, 0x76, 0x96, 0x25, 0xf9, 0xe6, 0xb1, 0x18, 0x62, 0xbd, 0x03, 0x6a, 0x67, 0x2d, 0xbb, 0x42, 0x1c, 0xbb, 0xb3, 0x24, 0x83, 0x5f, 0x7e, 0x53, ]; const MASTER_PASS_SALT: [u8; 64] = [ 0xa1, 0x48, 0x48, 0x5a, 0x76, 0x31, 0xe5, 0x45, 0x65, 0xf4, 0xde, 0xb0, 0xbb, 0x3a, 0x8f, 0xcc, 0xaa, 0x35, 0xff, 0x87, 0x7c, 0xd5, 0xcd, 0x4c, 0x4a, 0xbb, 0xbe, 0x21, 0x56, 0x5b, 0xe2, 0x7e, 0x60, 0x70, 0xd6, 0x5c, 0x0e, 0x3a, 0xa6, 0x02, 0xf9, 0xa1, 0xc9, 0x37, 0x88, 0x2a, 0xe0, 0xdc, 0x06, 0xcc, 0x25, 0xa6, 0x05, 0x8d, 0x75, 0x91, 0xc5, 0xdb, 0x0d, 0x90, 0xdb, 0xf3, 0x05, 0x8f, ]; type Result<T> = std::result::Result<T, Box<std::error::Error>>; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Ranpaman { master_password: Vec<u8>, encryption_key: Vec<u8>, file_path: Option<String>, data: HashMap<(String, String), Settings>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct Settings { include_special_characters: bool, revision: u32, password_length: u32, } impl Default for Settings { fn default() -> Settings { Settings { include_special_characters: true, revision: 0, password_length: 30, } } } impl Drop for Ranpaman { fn drop(&mut self) { self.master_password.zeroize(); self.encryption_key.zeroize(); } } impl Ranpaman { pub fn new(mut master_password: String, file_path: Option<String>) -> Ranpaman { let config = argon2::Config::default(); let pw = argon2::hash_raw(&master_password.as_bytes(), &MASTER_PASS_SALT, &config).unwrap(); let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap(); master_password.zeroize(); Ranpaman { master_password: pw, encryption_key: key, file_path, data: HashMap::new(), } }
pub fn get_password(&self, login: String, service_name: String) -> Result<String> { match self .data .get(&(service_name.to_string(), login.to_string())) { Some(settings) => { let salt: &[u8] = &[ login.as_bytes(), service_name.as_bytes(), &settings.revision.to_le_bytes(), ] .concat(); let argon_config = argon2::Config::default(); let hash = argon2::hash_raw(&self.master_password, salt, &argon_config).unwrap(); let char_sets = generate_character_sets(settings); return encode_password(&hash, char_sets, settings.password_length as usize); } None => { Ok(String::from("")) } } } pub fn change_file_path(&mut self, new_path: Option<String>) -> Result<()> { match new_path { None => { if let Some(old_path) = &self.file_path { std::fs::remove_file(old_path)?; self.file_path = None; } } Some(new_path) => { let mut new_file = std::fs::File::create(&new_path)?; if let Some(old_path) = &self.file_path { std::fs::remove_file(old_path)?; } self.file_path = Some(new_path); let encoded_self = bincode::serialize(&self).unwrap(); let encrypted_self = aead::seal( &aead::SecretKey::from_slice(&self.encryption_key).unwrap(), &encoded_self, ) .unwrap(); new_file.write(&encrypted_self)?; } } Ok(()) } pub fn write_to_file(&self) -> Result<()> { let encoded_self = bincode::serialize(&self).unwrap(); let encrypted_self = aead::seal( &aead::SecretKey::from_slice(&self.encryption_key).unwrap(), &encoded_self, ) .unwrap(); std::fs::write( self.file_path.as_ref().ok_or("No file path specified")?, encrypted_self, )?; Ok(()) } pub fn read_from_file(mut master_password: String, path: &str) -> Result<Ranpaman> { let read = std::fs::read(path)?; let config = argon2::Config::default(); let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap(); master_password.zeroize(); let decrypted = aead::open(&aead::SecretKey::from_slice(&key).unwrap(), &read).unwrap(); Ok(bincode::deserialize(&decrypted)?) } pub fn get_file_path(&self) -> Option<&String> { self.file_path.as_ref() } } fn generate_character_sets(settings: &Settings) -> Vec<Vec<char>> { let mut char_sets = Vec::new(); char_sets.push((b'A'..=b'Z').map(char::from).collect()); char_sets.push((b'a'..=b'z').map(char::from).collect()); if settings.include_special_characters { char_sets.push(vec!['1', '2', '3', '4', '5', '6', '7', '8', '9']); char_sets.push(vec!['%', '&', '#', '$', '+', '-', '@']); } char_sets } fn encode_password( raw_password: &[u8], char_sets: Vec<Vec<char>>, length: usize, ) -> Result<String> { if char_sets.iter().any(|set| set.is_empty()) { } let mut entropy = BigUint::from_bytes_le(raw_password); let mut char_set_use_flags: Vec<bool> = char_sets.iter().map(|_| false).collect(); let set_length = char_sets.iter().map(|set| set.len()).sum(); let mut encoded_password = String::new(); while encoded_password.len() < length { if entropy < BigUint::from_usize(set_length).unwrap() { } let new_char: usize = (entropy.clone() % set_length).to_usize().unwrap(); entropy /= set_length; let mut collective_length = 0; for (index, set) in char_sets.iter().enumerate() { if new_char < set.len() + collective_length { encoded_password.push(set[new_char - collective_length]); char_set_use_flags[index] = true; break; } collective_length += set.len(); } } if char_set_use_flags.into_iter().all(|flag| flag){ return Ok(encoded_password); }else{ let mut hasher = Blake2b::new(); hasher.input(raw_password); return encode_password(&hasher.result(), char_sets, length); } } #[cfg(test)] mod tests { use super::*; #[test] fn service_password_generation() { let mut ranpaman = Ranpaman::new("masterpass".to_string(), None); let site = String::from("somesite.com"); let mail = String::from("[email protected]"); let settings = Settings::default(); ranpaman .add_account(site.clone(), mail.clone(), settings) .unwrap(); let password = ranpaman.get_password(site, mail).unwrap(); assert_eq!("#DnLScQHt4zu%QDLqP$7VD535UjExb", password); } #[test] fn key_generation() { let ranpaman = Ranpaman::new("masterpass".to_string(), None); assert_eq!( ranpaman.master_password, [ 223, 108, 222, 141, 127, 89, 120, 143, 166, 127, 41, 255, 155, 5, 5, 195, 198, 186, 182, 18, 209, 221, 182, 64, 164, 34, 27, 230, 196, 48, 187, 237 ] ); assert_eq!( ranpaman.encryption_key, [ 110, 249, 117, 224, 82, 86, 66, 21, 42, 235, 243, 204, 137, 226, 46, 12, 116, 161, 243, 48, 201, 170, 187, 179, 80, 147, 37, 111, 124, 108, 191, 182 ] ); } #[test] fn read_write() { let path = "read_write_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); ranpaman.write_to_file().unwrap(); let decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap(); std::fs::remove_file(path).unwrap(); assert_eq!(ranpaman, decoded); } #[test] fn change_file_path() { let path = "change_file_path_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); ranpaman.write_to_file().unwrap(); let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap(); let new_path = "change_file_path_other_test_file"; decoded .change_file_path(Some(new_path.to_string())) .unwrap(); let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), new_path).unwrap(); decoded.change_file_path(Some(path.to_string())).unwrap(); std::fs::remove_file(path).unwrap(); assert_eq!(ranpaman, decoded); } #[test] fn get_file_path() { let path = "get_file_path_test_file"; let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string())); assert_eq!(ranpaman.get_file_path(), Some(&path.to_string())); } }
pub fn add_account( &mut self, login: String, service_name: String, settings: Settings, ) -> Result<()> { if service_name.is_empty() || login.is_empty() || settings.password_length < 4 { } let key = (service_name, login); if self.data.contains_key(&key) { } else { self.data.insert(key, settings); } Ok(()) }
function_block-full_function
[ { "content": "# ranpaman-core\n\nA library for creating MasterPassword-style password managers. Written in Rust and uses Argon2.\n\n\n\nCurrently a proof of concept and obviously insecure.\n", "file_path": "README.md", "rank": 3, "score": 7357.13440566655 } ]
Rust
server/src/server/mod.rs
JAD3N/mc-server
ce1ad57f2417f722d6c00ae21fdc3cd0efdadd7a
mod settings; mod status; mod executor; pub use settings::*; pub use status::*; pub use executor::*; use crate::core::Registries; use crate::chat::component::TextComponent; use crate::world::level::Level; use crate::network::{Listener, Connection}; use std::collections::HashMap; use std::net::SocketAddr; use std::sync::Arc; use std::path::Path; use tokio::sync::Mutex; use tokio::runtime; use futures::future; use flume::{Sender, Receiver}; pub static VERSION_NAME: &str = "1.15.2"; pub static VERSION_STABLE: bool = true; pub static WORLD_VERSION: u32 = 2230; pub static PROTOCOL_VERSION: u32 = 578; pub static PACK_VERSION: u32 = 5; pub static RELEASE_TARGET: &str = "1.15.2"; pub enum ServerRequest { Connected(Connection), } pub struct ServerShared { pub registries: Arc<Registries>, pub settings: Arc<ServerSettings>, pub status: Arc<Mutex<ServerStatus>>, } pub struct Server { pub shared: Arc<ServerShared>, pub connections: Vec<Arc<Mutex<Connection>>>, pub levels: HashMap<String, Arc<Mutex<Level>>>, pub tx: Sender<ServerRequest>, pub rx: Receiver<ServerRequest>, } impl Server { pub fn get_level(&self, dimension: &String) -> Option<&Arc<Mutex<Level>>> { self.levels.get(dimension) } pub async fn tick(&mut self) -> anyhow::Result<()> { for request in self.rx.try_iter() { match request { ServerRequest::Connected(connection) => { if !connection.is_connected() { continue; } let connection = Arc::new(Mutex::new(connection)); self.connections.push(connection); }, } } let mut disconnected = vec![]; for (i, connection) in self.connections.iter().enumerate() { let mut connection = connection.lock().await; connection.tick(); if !connection.is_connected() { disconnected.push(i); } } for &i in disconnected.iter().rev() { self.connections.remove(i); } Ok(()) } } pub struct ServerContainer { pub server: Arc<Mutex<Server>>, pub shared: Arc<ServerShared>, } impl ServerContainer { pub fn new(registries: Registries, settings: ServerSettings) -> Self { let mut status = ServerStatus { description: TextComponent::new(settings.motd()).into(), version: ServerStatusVersion { name: String::from(VERSION_NAME), protocol: PROTOCOL_VERSION, }, players: ServerStatusPlayers { max_players: settings.max_players(), num_players: 0, sample: vec![], }, favicon: None, }; let favicon_path = Path::new("server-icon.png"); if !favicon_path.is_file() { } if favicon_path.is_file() { if let Err(e) = status.load_favicon(favicon_path) { error!("Couldn't load server icon: {}", e); } } let shared = Arc::new(ServerShared { registries: Arc::new(registries), settings: Arc::new(settings), status: Arc::new(Mutex::new(status)), }); let (tx, rx) = flume::unbounded(); let server = Arc::new(Mutex::new(Server { shared: shared.clone(), connections: vec![], levels: HashMap::new(), tx, rx, })); Self { server, shared } } async fn load_levels(&self) -> anyhow::Result<()> { info!("loading levels"); let mut server = self.server.lock().await; server.levels.insert( String::from("level_1"), Arc::new(Mutex::new(Level { name: String::from("Level 1"), server: self.server.clone(), })) ); info!("loaded levels"); Ok(()) } async fn bind(&self, addr: SocketAddr) -> anyhow::Result<Listener> { let shared = self.shared.clone(); let server = self.server.clone(); let server_tx = server.lock().await .tx.clone(); let listener = Listener::bind( server_tx, shared, addr, ).await?; Ok(listener) } async fn execute(&self) -> anyhow::Result<()> { let mut executor = ServerExecutor::new(self.server.clone()); loop { executor.execute().await?; executor.wait().await; } } pub fn start(&self) -> anyhow::Result<()> { let addr = self.shared.settings.addr().parse::<SocketAddr>()?; let mut network_rt = runtime::Builder::new() .thread_name("network") .core_threads(2) .threaded_scheduler() .enable_all() .build() .unwrap(); let mut server_rt = runtime::Builder::new() .thread_name("server") .threaded_scheduler() .enable_all() .build() .unwrap(); match network_rt.block_on(self.bind(addr)) { Err(e) => error!("Network error: {}", e), Ok(listener) => { let (load_levels, stop_handle_1) = future::abortable(self.load_levels()); let (execute, stop_handle_2) = future::abortable(self.execute()); ctrlc::set_handler(move || { stop_handle_1.abort(); stop_handle_2.abort(); }).ok(); if let Err(e) = server_rt.block_on(load_levels) { error!("Fatal error loading levels: {}", e); } else { network_rt.spawn(listener.listen()); server_rt.block_on(execute).ok(); drop(network_rt); } } }; info!("Server shutdown."); Ok(()) } }
mod settings; mod status; mod executor; pub use settings::*; pub use status::*; pub use executor::*; use crate::core::Registries; use crate::chat::component::TextComponent; use crate::world::level::Level; use crate::network::{Listener, Connection}; use std::collections::HashMap; use std::net::SocketAddr; use std::sync::Arc; use std::path::Path; use tokio::sync::Mutex; use tokio::runtime; use futures::future; use flume::{Sender, Receiver}; pub static VERSION_NAME: &str = "1.15.2"; pub static VERSION_STABLE: bool = true; pub static WORLD_VERSION: u32 = 2230; pub static PROTOCOL_VERSION: u32 = 578; pub static PACK_VERSION: u32 = 5; pub static RELEASE_TARGET: &str = "1.15.2"; pub enum ServerRequest { Connected(Connection), } pub struct ServerShared { pub registries: Arc<Registries>, pub settings: Arc<ServerSettings>, pub status: Arc<Mutex<ServerStatus>>, } pub struct Server { pub shared: Arc<ServerShared>, pub connections: Vec<Arc<Mutex<Connection>>>, pub levels: HashMap<String, Arc<Mutex<Level>>>, pub tx: Sender<ServerRequest>, pub rx: Receiver<ServerRequest>, } impl Server { pub fn get_level(&self, dimension: &String) -> Option<&Arc<Mutex<Level>>> { self.levels.get(dimension) } pub async fn tick(&mut self) -> anyhow::Result<()> { for request in self.rx.try_iter() { match request { ServerRequest::Connected(connection) => { if !connection.is_connected() { continue; } let connection = Arc::new(Mutex::new(connection)); self.connections.push(connection); }, } } let mut disconnected = vec![]; for (i, connection) in self.connections.iter().enumerate() { let mut connection = connection.lock().await; connection.tick(); if !connection.is_connected() { disconnected.push(i); } } for &i in disconnected.iter().rev() { self.connections.remove(i); } Ok(()) } } pub struct ServerContainer { pub server: Arc<Mutex<Server>>, pub shared: Arc<ServerShared>, } impl ServerContainer { pub fn new(registries: Registries, settings: ServerSettings) -> Self { let mut status = ServerStatus { description: TextComponent::new(settings.motd()).into(), version: ServerStatusVersion { name: String::from(VERSION_NAME), protocol: PROTOCOL_VERSION, }, players: ServerStatusPlayers { max_players: settings.max_players(), num_players: 0, sample: vec![], }, favicon: None, }; let favicon_path = Path::new("server-icon.png"); if !favicon_path.is_file() { } if favicon_path.is_file() { if let Err(e) = status.load_favicon(favicon_path) { error!("Couldn't load server icon: {}", e); } } let shared = Arc::new(ServerShared { registries: Arc::new(registries), settings: Arc::new(settings), status: Arc::new(Mutex::new(status)), }); let (tx, rx) = flume::unbounded(); let server = Arc::new(Mutex::new(Server { shared: shared.clone(), connections: vec![], levels: HashMap::new(), tx, rx, })); Self { server, shared } } async fn load_levels(&self) -> anyhow::Result<()> { info!("loading levels"); let mut server = self.server.lock().await; server.levels.insert( String::from("level_1"), Arc::new(Mutex::new(Level { name: String::from("Level 1"), server: self.server.clon
async fn bind(&self, addr: SocketAddr) -> anyhow::Result<Listener> { let shared = self.shared.clone(); let server = self.server.clone(); let server_tx = server.lock().await .tx.clone(); let listener = Listener::bind( server_tx, shared, addr, ).await?; Ok(listener) } async fn execute(&self) -> anyhow::Result<()> { let mut executor = ServerExecutor::new(self.server.clone()); loop { executor.execute().await?; executor.wait().await; } } pub fn start(&self) -> anyhow::Result<()> { let addr = self.shared.settings.addr().parse::<SocketAddr>()?; let mut network_rt = runtime::Builder::new() .thread_name("network") .core_threads(2) .threaded_scheduler() .enable_all() .build() .unwrap(); let mut server_rt = runtime::Builder::new() .thread_name("server") .threaded_scheduler() .enable_all() .build() .unwrap(); match network_rt.block_on(self.bind(addr)) { Err(e) => error!("Network error: {}", e), Ok(listener) => { let (load_levels, stop_handle_1) = future::abortable(self.load_levels()); let (execute, stop_handle_2) = future::abortable(self.execute()); ctrlc::set_handler(move || { stop_handle_1.abort(); stop_handle_2.abort(); }).ok(); if let Err(e) = server_rt.block_on(load_levels) { error!("Fatal error loading levels: {}", e); } else { network_rt.spawn(listener.listen()); server_rt.block_on(execute).ok(); drop(network_rt); } } }; info!("Server shutdown."); Ok(()) } }
e(), })) ); info!("loaded levels"); Ok(()) }
function_block-function_prefixed
[ { "content": "fn init_resource_registry<T: 'static>(name: &str) -> Arc<ResourceRegistry<T>> {\n\n let mut event = RegisterEvent(ResourceRegistry::new());\n\n\n\n // send event to all subscribers to add to registry\n\n dispatch_event!(\"main\", &mut event);\n\n\n\n // log completion\n\n info!(\"registries -> Loaded {}: {}\", name, event.0.len());\n\n\n\n Arc::new(event.0)\n\n}\n\n\n", "file_path": "server/src/core/registries.rs", "rank": 0, "score": 199629.0323553287 }, { "content": "fn register_protocols(event: &mut RegisterEvent<MappedRegistry<i32, Protocol>>) {\n\n let protocols = &mut event.0;\n\n\n\n protocols.register(-1, protocol! {\n\n id: -1,\n\n handler: handshake::HandshakeProtocolHandler,\n\n server: [handshake::IntentionPacket],\n\n });\n\n\n\n protocols.register(1, protocol! {\n\n id: 1,\n\n handler: status::StatusProtocolHandler,\n\n server: [\n\n status::StatusRequestPacket,\n\n status::PingRequestPacket,\n\n ],\n\n client: [\n\n status::StatusResponsePacket,\n\n status::PongResponsePacket,\n\n ],\n\n });\n\n}\n\n\n", "file_path": "src/minecraft/protocol/mod.rs", "rank": 1, "score": 172840.63450616726 }, { "content": "pub fn init() {\n\n subscribe_event!(\"main\", register_protocols);\n\n}", "file_path": "src/minecraft/protocol/mod.rs", "rank": 2, "score": 149456.94572875835 }, { "content": "fn init_protocols() -> Arc<MappedRegistry<i32, Protocol>> {\n\n let mut event = RegisterEvent(MappedRegistry::new());\n\n\n\n // send event to protocols\n\n dispatch_event!(\"main\", &mut event);\n\n\n\n // log completion\n\n info!(\"registries -> Loaded protocols: {}\", event.0.len());\n\n\n\n Arc::new(event.0)\n\n}\n\n\n\nimpl Registries {\n\n pub fn new() -> Self {\n\n // load registries\n\n info!(\"registries -> Loading registries...\");\n\n\n\n let protocols = init_protocols();\n\n let blocks = init_resource_registry(\"blocks\");\n\n let sounds = init_resource_registry(\"sounds\");\n", "file_path": "server/src/core/registries.rs", "rank": 3, "score": 139321.21043953966 }, { "content": "type PacketWriteInit = fn(&Box<dyn Packet>, &mut BytesMut) -> anyhow::Result<()>;\n\n\n\npub struct PacketSet {\n\n id_read_map: HashMap<usize, PacketReadInit>,\n\n id_write_map: HashMap<usize, PacketWriteInit>,\n\n type_map: HashMap<TypeId, usize>,\n\n}\n\n\n\nimpl PacketSet {\n\n pub fn new() -> Self {\n\n Self {\n\n id_read_map: HashMap::new(),\n\n id_write_map: HashMap::new(),\n\n type_map: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn read_packet(&self, id: usize, cursor: &mut Cursor<&[u8]>) -> Option<Box<dyn Packet>> {\n\n match self.id_read_map.get(&id) {\n\n Some(packet_init) => {\n", "file_path": "server/src/network/protocol/packet/set.rs", "rank": 4, "score": 131423.05936150913 }, { "content": "type PacketReadInit = fn(&mut Cursor<&[u8]>) -> Result<Box<dyn Packet>, ProtocolError>;\n", "file_path": "server/src/network/protocol/packet/set.rs", "rank": 5, "score": 127588.09791681691 }, { "content": "pub fn init() {\n\n blocks::init();\n\n items::init();\n\n sounds::init();\n\n protocol::init();\n\n\n\n info!(\"init -> Minecraft module initialized.\");\n\n}", "file_path": "src/minecraft/mod.rs", "rank": 6, "score": 123694.27707038527 }, { "content": "pub fn init() {\n\n let stdout = ConsoleAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(LOG_PATTERN)))\n\n .build();\n\n\n\n let file = RollingFileAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(LOG_PATTERN)))\n\n .build(\"logs/server.log\", Box::new(\n\n CompoundPolicy::new(\n\n Box::new(SizeTrigger::new(10_000_000)),\n\n Box::new(FixedWindowRoller::builder().build(\"logs/server-{}.log.gz\", 10).unwrap()),\n\n ),\n\n ))\n\n .unwrap();\n\n\n\n let config = Config::builder()\n\n .appender(Appender::builder().build(\"stdout\", Box::new(stdout)))\n\n .appender(Appender::builder().build(\"file\", Box::new(file)))\n\n .build(\n\n Root::builder()\n", "file_path": "server/src/logger.rs", "rank": 7, "score": 111569.28137143576 }, { "content": "pub fn init() {\n\n *MAIN.lock().unwrap() = Some(EventBus::new(\"main\"));\n\n\n\n // log that events have been set up\n\n info!(\"init -> Events initialized.\");\n\n}", "file_path": "server/src/events.rs", "rank": 8, "score": 111569.28137143576 }, { "content": "pub fn init() {\n\n // set up\n\n logger::init();\n\n events::init();\n\n}", "file_path": "server/src/lib.rs", "rank": 9, "score": 111569.28137143576 }, { "content": "fn register_sounds(event: &mut RegisterEvent<ResourceRegistry<Sound>>) {\n\n let registry = &mut event.0;\n\n\n\n registry.register_locatable(Sound::new(\"minecraft:test\"));\n\n}\n\n\n", "file_path": "src/minecraft/sounds.rs", "rank": 10, "score": 103815.72889106284 }, { "content": "pub trait Block: mopa::Any + Send + Sync {}\n\n\n\nmopafy!(Block);", "file_path": "server/src/world/level/block/mod.rs", "rank": 11, "score": 103602.18466236282 }, { "content": "pub fn get_millis() -> u64 {\n\n chrono::Local::now().timestamp_millis() as u64\n\n}\n\n\n", "file_path": "server/src/util/time.rs", "rank": 12, "score": 102069.05570556881 }, { "content": "pub fn get_nanos() -> u64 {\n\n chrono::Local::now().timestamp_nanos() as u64\n\n}\n\n\n", "file_path": "server/src/util/time.rs", "rank": 13, "score": 102069.05570556881 }, { "content": "#[async_trait]\n\npub trait Packet: mopa::Any + ProtocolRead + ProtocolWrite + Send + Sync + fmt::Debug {\n\n async fn handle(&mut self, _handler: &mut Box<dyn ProtocolHandler>) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n fn into_box(self) -> Box<dyn Packet> where Self: Sized {\n\n Box::new(self)\n\n }\n\n}\n\n\n\npub type PacketPayload = (usize, Box<dyn Packet>);\n\n\n\nmopafy!(Packet);\n\n\n\n#[macro_export]\n\nmacro_rules! packet {\n\n ($handler:tt, $name:tt, $lfname:tt) => {\n\n #[async_trait::async_trait]\n\n impl $crate::network::protocol::Packet for $name {\n\n async fn handle(&mut self, handler: &mut Box<dyn $crate::network::protocol::ProtocolHandler>) -> Result<(), anyhow::Error> {\n\n let handler: &mut $handler = handler.downcast_mut::<$handler>().unwrap();\n\n handler.$lfname(self).await\n\n }\n\n }\n\n };\n\n ($name:ident) => {\n\n impl $crate::network::protocol::Packet for $name {}\n\n };\n\n}", "file_path": "server/src/network/protocol/packet/mod.rs", "rank": 14, "score": 100761.09800550147 }, { "content": "pub fn sleep(millis: u64) {\n\n thread::sleep(Duration::from_millis(millis));\n\n}", "file_path": "server/src/util/time.rs", "rank": 15, "score": 100179.5263531562 }, { "content": "#[derive(Clone, Copy, PartialEq)]\n\nenum DimensionTrait {\n\n HasSkyLight,\n\n}\n\n\n\ntraitable!(DimensionTrait, Dimension {\n\n id: i32,\n\n name: &'static str,\n\n suffix: &'static str,\n\n folder: &'static str,\n\n});\n\n\n\nimpl Clone for Dimension {\n\n fn clone(&self) -> Self {\n\n Dimension {\n\n id: self.id,\n\n name: self.name,\n\n suffix: self.suffix,\n\n folder: self.folder,\n\n traits: self.traits.clone(),\n\n }\n", "file_path": "server/src/world/level/dimension.rs", "rank": 16, "score": 98145.32314765124 }, { "content": "fn register_blocks(event: &mut RegisterEvent<ResourceRegistry<Box<dyn Block>>>) {\n\n let registry = &mut event.0;\n\n\n\n registry.register(\"minecraft:air\", Box::new(AirBlock::new()));\n\n}\n\n\n", "file_path": "src/minecraft/blocks.rs", "rank": 17, "score": 97031.80788576062 }, { "content": "pub trait ProtocolLength {\n\n fn len(&self) -> usize;\n\n}\n\n\n", "file_path": "server/src/network/protocol/io.rs", "rank": 18, "score": 95119.7522370971 }, { "content": "pub trait ProtocolWrite: ProtocolLength {\n\n fn write<U: BufMut>(&self, _dst: &mut U) -> Result<(), ProtocolError> where Self: Sized {\n\n unimplemented!(\"protocol write not implemented\");\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! protocol_struct {\n\n ($name:ident { $($fname:ident: $fty:ty),* $(,)? }) => {\n\n #[derive(Debug)]\n\n pub struct $name {\n\n $(pub $fname: $fty),*\n\n }\n\n\n\n impl $crate::network::protocol::ProtocolLength for $name {\n\n fn len(&self) -> usize {\n\n 0 $(+ self.$fname.len())*\n\n }\n\n }\n\n\n", "file_path": "server/src/network/protocol/io.rs", "rank": 19, "score": 94340.29703184485 }, { "content": "pub trait ProtocolRead: ProtocolLength {\n\n fn read<U: Buf>(_src: &mut U) -> Result<Self, ProtocolError> where Self: Sized {\n\n unimplemented!(\"protocol read not implemented\");\n\n }\n\n}\n\n\n", "file_path": "server/src/network/protocol/io.rs", "rank": 20, "score": 94340.29703184485 }, { "content": "pub fn init() {\n\n subscribe_event!(\"main\", register_blocks, 1000);\n\n}", "file_path": "src/minecraft/blocks.rs", "rank": 21, "score": 93394.39400685951 }, { "content": "pub fn init() {\n\n \n\n}", "file_path": "src/minecraft/items.rs", "rank": 22, "score": 93394.39400685951 }, { "content": "pub fn init() {\n\n subscribe_event!(\"main\", register_sounds, 1000);\n\n}", "file_path": "src/minecraft/sounds.rs", "rank": 23, "score": 93394.39400685951 }, { "content": "#[async_trait]\n\npub trait ProtocolHandler: mopa::Any + Send + Sync {\n\n fn new(state: ProtocolHandlerState) -> Self where Self: Sized;\n\n\n\n fn new_box(state: ProtocolHandlerState) -> Box<dyn ProtocolHandler> where Self: Sized {\n\n Box::new(Self::new(state))\n\n }\n\n\n\n async fn tick(&mut self) -> anyhow::Result<()> { Ok(()) }\n\n async fn handle_disconnect(&mut self) -> anyhow::Result<()> { Ok(()) }\n\n}\n\n\n\nmopafy!(ProtocolHandler);\n", "file_path": "server/src/network/protocol/handler.rs", "rank": 24, "score": 85897.44271889498 }, { "content": "pub trait Component: ToJsonValue {\n\n fn append_extra_json(&self, json: &mut serde_json::Value) {\n\n let extra: Vec<serde_json::Value> = self.siblings()\n\n .iter()\n\n .map(|sibling| sibling.to_json().unwrap())\n\n .collect();\n\n\n\n if !extra.is_empty() {\n\n json[\"extra\"] = json!(extra);\n\n }\n\n }\n\n\n\n fn append_style_json(&self, json: &mut serde_json::Value) {\n\n let style_json = self.style().to_json();\n\n\n\n if let Some(style_json) = style_json {\n\n json[\"style\"] = style_json;\n\n }\n\n }\n\n\n", "file_path": "server/src/chat/component/mod.rs", "rank": 25, "score": 84243.99994156808 }, { "content": "mod kind;\n\nmod dimension;\n\nmod block;\n\npub mod chunk;\n\n\n\npub use kind::*;\n\npub use dimension::*;\n\npub use block::*;\n\n\n\nuse std::sync::Arc;\n\nuse tokio::sync::Mutex;\n\nuse crate::server::Server;\n\n\n\npub struct Level {\n\n pub name: String,\n\n pub server: Arc<Mutex<Server>>,\n\n // pub dimension: String,\n\n // pub chunks: ChunkStore,\n\n}\n\n\n\nimpl Level {\n\n pub async fn tick(&mut self) -> anyhow::Result<()> {\n\n // let _server = self.server.lock().await;\n\n info!(\"did level tick {}!\", self.name);\n\n Ok(())\n\n }\n\n}", "file_path": "server/src/world/level/mod.rs", "rank": 26, "score": 79416.26153232554 }, { "content": " pub fn new<T: ProtocolHandler>(id: i32) -> Self {\n\n Self {\n\n id,\n\n handler: T::new_box,\n\n server: PacketSet::new(),\n\n client: PacketSet::new(),\n\n }\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! protocol {\n\n {\n\n id: $id:expr,\n\n handler: $handler:ty\n\n $(, server: [$($sp:ty),* $(,)?] $(,)?)?\n\n $(, client: [$($cp:ty),* $(,)?] $(,)?)?\n\n } => {\n\n {\n\n use $crate::network::protocol::{ProtocolRead, ProtocolWrite, PacketSet};\n", "file_path": "server/src/network/protocol/mod.rs", "rank": 27, "score": 78219.79169024964 }, { "content": "\n\n #[error(\"value is too large\")]\n\n TooLarge,\n\n\n\n #[error(\"unknown protocol error\")]\n\n Unknown,\n\n}\n\n\n\npub type ProtocolHandlerInit = fn(ProtocolHandlerState) -> Box<dyn ProtocolHandler>;\n\n\n\npub struct Protocol {\n\n pub id: i32,\n\n pub handler: ProtocolHandlerInit,\n\n pub server: PacketSet,\n\n pub client: PacketSet,\n\n}\n\n\n\nimpl Protocol {\n\n pub const DEFAULT: i32 = -1;\n\n\n", "file_path": "server/src/network/protocol/mod.rs", "rank": 28, "score": 78218.33034884068 }, { "content": "\n\n let mut protocol = Protocol::new::<$handler>($id);\n\n\n\n $({\n\n $(protocol.server.add::<$sp>(\n\n |src| PacketSet::wrap(<$sp>::read(src)),\n\n |packet, dst| {\n\n packet.downcast_ref::<$sp>()\n\n .unwrap()\n\n .write(dst)\n\n .map_err(|err| err.into())\n\n },\n\n );)*\n\n })?\n\n\n\n $({\n\n $(protocol.client.add::<$cp>(\n\n |src| PacketSet::wrap(<$cp>::read(src)),\n\n |packet, dst| {\n\n packet.downcast_ref::<$cp>()\n", "file_path": "server/src/network/protocol/mod.rs", "rank": 29, "score": 78217.30763495118 }, { "content": "#[macro_use]\n\nmod io;\n\n#[macro_use]\n\nmod packet;\n\n#[macro_use]\n\nmod handler;\n\n\n\npub use io::*;\n\npub use packet::*;\n\npub use handler::*;\n\n\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum ProtocolError {\n\n #[error(\"not enough bytes remaining\")]\n\n NotEnoughBytes,\n\n\n\n #[error(\"invalid value\")]\n\n Invalid,\n", "file_path": "server/src/network/protocol/mod.rs", "rank": 30, "score": 78216.71487885588 }, { "content": " .unwrap()\n\n .write(dst)\n\n .map_err(|err| err.into())\n\n },\n\n );)*\n\n })?\n\n\n\n protocol\n\n }\n\n };\n\n}", "file_path": "server/src/network/protocol/mod.rs", "rank": 31, "score": 78211.50030637627 }, { "content": "mod section;\n\nmod store;\n\n\n\npub use section::*;\n\npub use store::*;\n\n\n\npub struct Chunk {\n\n _sections: [ChunkSection; 16],\n\n}", "file_path": "server/src/world/level/chunk/mod.rs", "rank": 32, "score": 76813.02418743042 }, { "content": "mod air;\n\n\n\npub use air::*;\n\n\n", "file_path": "server/src/world/level/block/mod.rs", "rank": 33, "score": 76812.12105350452 }, { "content": " match packet_init(cursor) {\n\n Ok(packet) => Some(packet),\n\n Err(err) => {\n\n error!(\"Error reading packet: {}\", err);\n\n None\n\n }\n\n }\n\n },\n\n None => None,\n\n }\n\n }\n\n\n\n pub fn write_packet(&self, payload: &PacketPayload, dst: &mut BytesMut) -> anyhow::Result<()> {\n\n match self.id_write_map.get(&payload.0) {\n\n Some(packet_init) => packet_init(&payload.1, dst),\n\n None => Err(anyhow::anyhow!(\"cannot write unknown packet\")),\n\n }\n\n }\n\n\n\n pub fn id_of<T: 'static + Packet>(&self) -> Option<usize> {\n", "file_path": "server/src/network/protocol/packet/set.rs", "rank": 34, "score": 76414.51988512557 }, { "content": " let type_id = TypeId::of::<T>();\n\n\n\n match self.type_map.get(&type_id) {\n\n Some(id) => Some(*id),\n\n None => None,\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.type_map.len()\n\n }\n\n\n\n pub fn add<T: Packet + 'static>(&mut self, packet_read_init: PacketReadInit, packet_write_init: PacketWriteInit) {\n\n let id = self.len();\n\n let type_id = TypeId::of::<T>();\n\n\n\n self.id_read_map.insert(id, packet_read_init);\n\n self.id_write_map.insert(id, packet_write_init);\n\n self.type_map.insert(type_id, id);\n\n }\n\n\n\n pub fn wrap<T: Packet + 'static>(packet: Result<T, ProtocolError>) -> Result<Box<dyn Packet>, ProtocolError> {\n\n Ok(Box::new(packet?))\n\n }\n\n}", "file_path": "server/src/network/protocol/packet/set.rs", "rank": 35, "score": 76409.7566003464 }, { "content": "use crate::network::protocol::ProtocolError;\n\nuse super::{Packet, PacketPayload};\n\nuse std::collections::HashMap;\n\nuse std::any::{TypeId};\n\nuse std::io::Cursor;\n\nuse bytes::BytesMut;\n\n\n", "file_path": "server/src/network/protocol/packet/set.rs", "rank": 36, "score": 76406.1045361698 }, { "content": "mod codec;\n\nmod direction;\n\nmod set;\n\n\n\npub use codec::*;\n\npub use direction::*;\n\npub use set::*;\n\n\n\nuse super::{ProtocolHandler, ProtocolRead, ProtocolWrite};\n\nuse async_trait::async_trait;\n\nuse std::fmt;\n\n\n\n#[async_trait]\n", "file_path": "server/src/network/protocol/packet/mod.rs", "rank": 37, "score": 75678.05463091076 }, { "content": "pub mod status;\n\npub mod handshake;\n\n\n\nuse server::core::{RegisterEvent, MappedRegistry};\n\nuse server::network::protocol::Protocol;\n\n\n", "file_path": "src/minecraft/protocol/mod.rs", "rank": 38, "score": 62140.36246504251 }, { "content": "pub trait ResourceLocatable {\n\n fn resource_location(&self) -> &ResourceLocation;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ResourceLocation {\n\n namespace: String,\n\n path: String,\n\n}\n\n\n\nimpl PartialEq for ResourceLocation {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.namespace == other.namespace && self.path == other.path\n\n }\n\n}\n\n\n\nimpl ResourceLocation {\n\n pub fn new(namespace: &str, path: &str) -> ResourceLocation {\n\n ResourceLocation {\n\n namespace: String::from(namespace),\n", "file_path": "server/src/core/resource_location.rs", "rank": 39, "score": 59915.50792483112 }, { "content": "pub trait ToJsonValue {\n\n fn to_json(&self) -> Option<serde_json::Value>;\n\n}", "file_path": "server/src/util/json_value.rs", "rank": 40, "score": 59915.50792483112 }, { "content": "use tokio::time::{Duration, Instant};\n\nuse std::sync::Arc;\n\nuse std::sync::atomic::{AtomicU32, Ordering};\n\nuse tokio::sync::Mutex;\n\nuse futures::future;\n\nuse super::Server;\n\nuse crate::world::level::Level;\n\n\n\nlazy_static! {\n\n static ref TICK_RATE: AtomicU32 = AtomicU32::new(20);\n\n static ref TICK_WARNING_THRESHOLD: AtomicU32 = AtomicU32::new(15_000);\n\n}\n\n\n\npub struct ServerExecutor {\n\n server: Arc<Mutex<Server>>,\n\n next_tick_time: Instant,\n\n last_warning_time: Option<Instant>,\n\n}\n\n\n\nimpl ServerExecutor {\n", "file_path": "server/src/server/executor.rs", "rank": 41, "score": 55827.72299629299 }, { "content": " ).await;\n\n }\n\n\n\n async fn tick(&self) -> anyhow::Result<()> {\n\n let levels;\n\n\n\n {\n\n let mut server = self.server.lock().await;\n\n\n\n levels = server.levels.values().cloned()\n\n .collect::<Vec<Arc<Mutex<Level>>>>();\n\n\n\n server.tick().await?;\n\n }\n\n\n\n let mut handles = vec![];\n\n\n\n for level in levels.into_iter() {\n\n handles.push(tokio::spawn(async move {\n\n let mut level = level.lock().await;\n", "file_path": "server/src/server/executor.rs", "rank": 42, "score": 55823.56044790399 }, { "content": " pub fn new(server: Arc<Mutex<Server>>) -> Self {\n\n Self {\n\n server,\n\n next_tick_time: Instant::now(),\n\n last_warning_time: None,\n\n }\n\n }\n\n\n\n pub async fn execute(&mut self) -> anyhow::Result<()> {\n\n let now = Instant::now();\n\n let delta = now - self.next_tick_time;\n\n let delta_millis = delta.as_millis() as u32;\n\n\n\n let single_tick = 1000 / TICK_RATE.load(Ordering::Relaxed);\n\n let single_tick_dur = Duration::from_millis(single_tick as u64);\n\n\n\n if delta_millis > 2000 && {\n\n if let Some(last_warning_time) = self.last_warning_time {\n\n (self.next_tick_time - last_warning_time).as_millis() as u32 >= TICK_WARNING_THRESHOLD.load(Ordering::Relaxed)\n\n } else {\n", "file_path": "server/src/server/executor.rs", "rank": 43, "score": 55822.81088737257 }, { "content": "\n\n if let Err(e) = level.tick().await {\n\n error!(\"Failed to tick level '{}': {}\", level.name, e);\n\n }\n\n }));\n\n }\n\n\n\n // wait until all handles are done\n\n future::join_all(handles).await;\n\n\n\n Ok(())\n\n }\n\n}", "file_path": "server/src/server/executor.rs", "rank": 44, "score": 55818.95346352295 }, { "content": " true\n\n }\n\n } {\n\n let missed_ticks = delta_millis / single_tick;\n\n\n\n warn!(\"Can't keep up! Is the server overloaded? Running {}ms or {} ticks behind\", delta_millis, missed_ticks);\n\n\n\n self.next_tick_time += single_tick_dur * missed_ticks;\n\n self.last_warning_time = Some(self.next_tick_time.clone());\n\n }\n\n\n\n self.next_tick_time += single_tick_dur;\n\n self.tick().await?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub async fn wait(&self) {\n\n tokio::time::delay_until(\n\n self.next_tick_time\n", "file_path": "server/src/server/executor.rs", "rank": 45, "score": 55815.43414301085 }, { "content": " resource_pack_sha1: String,\n\n server_ip: String,\n\n server_port: u32,\n\n spawn_animals: bool,\n\n spawn_monsters: bool,\n\n spawn_npcs: bool,\n\n spawn_protection: i32,\n\n use_native_transport: bool,\n\n view_distance: u32,\n\n white_list: bool,\n\n}\n\n\n\nimpl ServerSettings {\n\n pub fn load() -> ServerSettings {\n\n let mut path = env::current_dir().unwrap();\n\n path.push(\"server.properties\");\n\n Self::load_file(path)\n\n }\n\n\n\n pub fn load_file(path: PathBuf) -> ServerSettings {\n", "file_path": "server/src/server/settings.rs", "rank": 46, "score": 55560.39777218159 }, { "content": " self.max_players\n\n }\n\n}\n\n\n\nmacro_rules! add_custom_fn {\n\n ($fn: ident, $type: ty) => {\n\n impl ServerSettings {\n\n fn $fn(properties: &mut Properties, key: &str, default: $type) -> $type {\n\n let value = properties.get(key);\n\n\n\n if value.is_some() {\n\n let value = value.unwrap();\n\n let mut custom_value = <$type>::from_name(&value);\n\n\n\n // check if name failed to match\n\n if custom_value.is_none() {\n\n // try convert string to integer\n\n if let Ok(id) = value.parse::<i32>() {\n\n // check if id matched\n\n custom_value = <$type>::from_id(id);\n", "file_path": "server/src/server/settings.rs", "rank": 47, "score": 55557.40735598164 }, { "content": " generator_settings: String,\n\n hardcore: bool,\n\n level_name: String,\n\n level_seed: String,\n\n level_type: LevelKind,\n\n max_build_height: i32,\n\n max_players: u32,\n\n max_tick_time: i32,\n\n max_world_size: u32,\n\n motd: String,\n\n network_compression_threshold: i32,\n\n online_mode: bool,\n\n op_permission_level: u32,\n\n player_idle_timeout: u32,\n\n prevent_proxy_connections: bool,\n\n pvp: bool,\n\n query_port: u32,\n\n rcon_password: String,\n\n rcon_port: u32,\n\n resource_pack: String,\n", "file_path": "server/src/server/settings.rs", "rank": 48, "score": 55555.05254415181 }, { "content": "\n\n pub fn addr(&self) -> String {\n\n // clean up ip\n\n let mut ip = self.server_ip\n\n .trim()\n\n .to_string();\n\n\n\n // check ip is valid\n\n if ip.len() == 0 {\n\n ip.push_str(\"0.0.0.0\");\n\n }\n\n\n\n format!(\"{}:{}\", ip, self.server_port)\n\n }\n\n\n\n pub fn motd(&self) -> &str {\n\n &self.motd\n\n }\n\n\n\n pub fn max_players(&self) -> u32 {\n", "file_path": "server/src/server/settings.rs", "rank": 49, "score": 55552.67492555954 }, { "content": " let mut properties = Properties::load(path);\n\n\n\n ServerSettings {\n\n allow_flight: properties.get_bool_default(\"allow-flight\", false),\n\n allow_nether: properties.get_bool_default(\"allow-nether\", true),\n\n broadcast_console_to_ops: properties.get_bool_default(\"broadcast-console-to-ops\", true),\n\n broadcast_rcon_to_ops: properties.get_bool_default(\"broadcast-rcon-to-ops\", true),\n\n difficulty: Self::get_difficulty(&mut properties, \"difficulty\", Difficulty::Easy),\n\n enable_command_block: properties.get_bool_default(\"enable-command-block\", false),\n\n enable_query: properties.get_bool_default(\"enable-query\", false),\n\n enable_rcon: properties.get_bool_default(\"enable-rcon\", false),\n\n enforce_whitlist: properties.get_bool_default(\"enforce-whitelist\", false),\n\n force_gamemode: properties.get_bool_default(\"force-gamemode\", false),\n\n function_permission_level: properties.get_u32_default(\"function-permission-level\", 2),\n\n game_mode: Self::get_game_mode(&mut properties, \"gamemode\", GameMode::Survival),\n\n generate_structures: properties.get_bool_default(\"generate-structures\", true),\n\n generator_settings: properties.get_default(\"generator-settings\", \"\"),\n\n hardcore: properties.get_bool_default(\"hardcore\", false),\n\n level_name: properties.get_default(\"level-name\", \"world\"),\n\n level_seed: properties.get_default(\"level-seed\", \"\"),\n", "file_path": "server/src/server/settings.rs", "rank": 50, "score": 55551.23131730386 }, { "content": " level_type: Self::get_level_kind(&mut properties, \"level-type\", LevelKind::Default),\n\n max_build_height: properties.get_i32_default(\"max-build-height\", 256),\n\n max_players: properties.get_u32_default(\"max-players\", 20),\n\n max_tick_time: properties.get_i32_default(\"max-tick-time\", 60 * 1000),\n\n max_world_size: properties.get_u32_default(\"max-world-size\", 29999984),\n\n motd: properties.get_default(\"motd\", \"A Rusty Minecraft Server\"),\n\n network_compression_threshold: properties.get_i32_default(\"network-compression-threshold\", 256),\n\n online_mode: properties.get_bool_default(\"online-mode\", true),\n\n op_permission_level: properties.get_u32_default(\"op-permission-level\", 4),\n\n player_idle_timeout: properties.get_u32_default(\"player-idle-timeout\", 0),\n\n prevent_proxy_connections: properties.get_bool_default(\"prevent-proxy-connections\", false),\n\n pvp: properties.get_bool_default(\"pvp\", true),\n\n query_port: properties.get_u32_default(\"query.port\", 25565),\n\n rcon_password: properties.get_default(\"rcon.password\", \"\"),\n\n rcon_port: properties.get_u32_default(\"rcon.port\", 25565),\n\n resource_pack: properties.get_default(\"resource-pack\", \"\"),\n\n resource_pack_sha1: properties.get_default(\"resource-pack-sha1\", \"\"),\n\n server_ip: properties.get_default(\"server-ip\", \"\"),\n\n server_port: properties.get_u32_default(\"server-port\", 25565),\n\n spawn_animals: properties.get_bool_default(\"spawn-animals\", true),\n", "file_path": "server/src/server/settings.rs", "rank": 51, "score": 55550.6068715028 }, { "content": "use crate::util::Properties;\n\nuse crate::world::{level::LevelKind, Difficulty, GameMode};\n\nuse std::path::PathBuf;\n\nuse std::env;\n\n\n\n#[derive(Debug)]\n\npub struct ServerSettings {\n\n allow_flight: bool,\n\n allow_nether: bool,\n\n broadcast_console_to_ops: bool,\n\n broadcast_rcon_to_ops: bool,\n\n difficulty: Difficulty,\n\n enable_command_block: bool,\n\n enable_query: bool,\n\n enable_rcon: bool,\n\n enforce_whitlist: bool,\n\n force_gamemode: bool,\n\n function_permission_level: u32,\n\n game_mode: GameMode,\n\n generate_structures: bool,\n", "file_path": "server/src/server/settings.rs", "rank": 52, "score": 55549.447464322904 }, { "content": " spawn_monsters: properties.get_bool_default(\"spawn-monsters\", true),\n\n spawn_npcs: properties.get_bool_default(\"spawn-npcs\", true),\n\n spawn_protection: properties.get_i32_default(\"spawn-protection\", 16),\n\n use_native_transport: properties.get_bool_default(\"use-native-transport\", true),\n\n view_distance: properties.get_u32_default(\"view-distance\", 10),\n\n white_list: properties.get_bool_default(\"white-list\", true),\n\n }\n\n }\n\n\n\n pub fn difficulty(&self) -> Difficulty {\n\n self.difficulty\n\n }\n\n\n\n pub fn game_mode(&self) -> GameMode {\n\n self.game_mode\n\n }\n\n\n\n pub fn max_tick_time(&self) -> i32 {\n\n self.max_tick_time\n\n }\n", "file_path": "server/src/server/settings.rs", "rank": 53, "score": 55545.87348838513 }, { "content": " }\n\n }\n\n\n\n // only return if found\n\n if custom_value.is_some() {\n\n return custom_value.unwrap();\n\n }\n\n }\n\n\n\n // apply default and override existing\n\n properties.set(key, default.name());\n\n default\n\n }\n\n }\n\n };\n\n}\n\n\n\nadd_custom_fn!(get_difficulty, Difficulty);\n\nadd_custom_fn!(get_game_mode, GameMode);\n\nadd_custom_fn!(get_level_kind, LevelKind);\n", "file_path": "server/src/server/settings.rs", "rank": 54, "score": 55544.044492530855 }, { "content": "pub trait Traitable<T: Copy + PartialEq> {\n\n fn traits(&self) -> &Vec<T>;\n\n fn mut_traits(&mut self) -> &mut Vec<T>;\n\n\n\n fn has(&self, t: T) -> bool {\n\n self.traits().contains(&t)\n\n }\n\n\n\n fn add(&mut self, t: T) -> &mut Self {\n\n if !self.has(t) {\n\n self.mut_traits().push(t);\n\n }\n\n\n\n self\n\n }\n\n\n\n fn remove(&mut self, t: T) -> &mut Self {\n\n if self.has(t) {\n\n let traits = self.mut_traits();\n\n\n", "file_path": "server/src/util/traitable.rs", "rank": 68, "score": 54567.459960288805 }, { "content": "use crate::chat::component::ComponentContainer;\n\nuse super::{WorkerRequest};\n\nuse flume::Sender;\n\n\n\npub struct Connection {\n\n is_disconnected: bool,\n\n worker_tx: Option<Sender<WorkerRequest>>,\n\n}\n\n\n\nimpl Connection {\n\n pub fn new() -> Self {\n\n Self {\n\n is_disconnected: false,\n\n worker_tx: None,\n\n }\n\n }\n\n\n\n pub fn is_connected(&self) -> bool {\n\n !self.is_disconnected && self.worker_tx.is_some()\n\n }\n", "file_path": "server/src/network/connection.rs", "rank": 69, "score": 51318.0674697807 }, { "content": "\n\n pub fn is_connecting(&self) -> bool {\n\n !self.worker_tx.is_some()\n\n }\n\n\n\n pub fn attach_worker(&mut self, worker_tx: Sender<WorkerRequest>) {\n\n self.is_disconnected = false;\n\n self.worker_tx = Some(worker_tx);\n\n }\n\n\n\n pub fn send(&mut self, request: WorkerRequest) -> bool {\n\n if !self.is_connected() {\n\n return false;\n\n }\n\n\n\n // returns whether the request sent\n\n if let Err(_) = self.worker_tx.as_ref()\n\n .unwrap()\n\n .send(request) {\n\n self.is_disconnected = true;\n", "file_path": "server/src/network/connection.rs", "rank": 70, "score": 51315.113094304164 }, { "content": "\n\n false\n\n } else {\n\n true\n\n }\n\n }\n\n\n\n pub fn tick(&mut self) {\n\n self.send(WorkerRequest::Tick);\n\n }\n\n\n\n pub fn disconnect(&mut self, reason: ComponentContainer) {\n\n self.is_disconnected = true;\n\n self.send(WorkerRequest::Disconnect(reason));\n\n }\n\n}", "file_path": "server/src/network/connection.rs", "rank": 71, "score": 51305.45284499123 }, { "content": "use super::{ResourceRegistry, Sound};\n\nuse crate::network::protocol::Protocol;\n\nuse crate::world::level::Block;\n\nuse event_bus::Event;\n\nuse std::sync::Arc;\n\nuse std::collections::HashMap;\n\nuse std::hash::Hash;\n\n\n\npub struct RegisterEvent<T>(pub T);\n\n\n\nimpl<T: 'static> Event for RegisterEvent<T> {}\n\n\n\npub struct Registries {\n\n pub protocols: Arc<MappedRegistry<i32, Protocol>>,\n\n pub blocks: Arc<ResourceRegistry<Box<dyn Block>>>,\n\n pub sounds: Arc<ResourceRegistry<Sound>>,\n\n}\n\n\n", "file_path": "server/src/core/registries.rs", "rank": 72, "score": 51156.10728064909 }, { "content": "\n\n info!(\"registries -> Finished loading registries.\");\n\n\n\n Self { protocols, blocks, sounds }\n\n }\n\n}\n\n\n\npub struct MappedRegistry<K, V> {\n\n map: HashMap<K, Arc<V>>,\n\n}\n\n\n\nimpl<K: Eq + Hash, V> MappedRegistry<K, V> {\n\n pub fn new() -> Self {\n\n Self { map: HashMap::new() }\n\n }\n\n\n\n pub fn get(&self, key: &K) -> Option<&Arc<V>> {\n\n self.map.get(key)\n\n }\n\n\n", "file_path": "server/src/core/registries.rs", "rank": 73, "score": 51153.22794643912 }, { "content": " pub fn register(&mut self, key: K, value: V) {\n\n self.map.insert(key, Arc::new(value));\n\n }\n\n\n\n pub fn values(&self) -> Vec<&Arc<V>> {\n\n self.map.values().collect()\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.map.len()\n\n }\n\n}", "file_path": "server/src/core/registries.rs", "rank": 74, "score": 51145.825749599724 }, { "content": "fn main() {\n\n server::init();\n\n minecraft::init();\n\n\n\n let registries = Registries::new();\n\n let settings = ServerSettings::load();\n\n let server = ServerContainer::new(registries, settings);\n\n\n\n if let Err(e) = server.start() {\n\n error!(\"Server error: {}\", e);\n\n }\n\n}", "file_path": "src/main.rs", "rank": 75, "score": 50678.1880988098 }, { "content": "mod connection;\n\nmod listener;\n\nmod worker;\n\n\n\n#[macro_use]\n\npub mod protocol;\n\n\n\npub use connection::*;\n\npub use listener::*;\n\npub use worker::*;", "file_path": "server/src/network/mod.rs", "rank": 76, "score": 50181.4684545685 }, { "content": "#[macro_use]\n\nmod registries;\n\npub use registries::*;\n\n\n\n#[macro_use]\n\nmod sound;\n\npub use sound::*;\n\n\n\nmod resource_location;\n\npub use resource_location::*;", "file_path": "server/src/core/mod.rs", "rank": 77, "score": 50178.44498352487 }, { "content": "mod difficulty;\n\nmod game_mode;\n\n\n\npub mod level;\n\n\n\npub use difficulty::*;\n\npub use game_mode::*;", "file_path": "server/src/world/mod.rs", "rank": 78, "score": 50177.621744850556 }, { "content": "mod properties;\n\nmod time;\n\nmod json_value;\n\n\n\n#[macro_use]\n\npub mod traitable;\n\n\n\npub use properties::*;\n\npub use time::*;\n\npub use traitable::*;\n\npub use json_value::*;", "file_path": "server/src/util/mod.rs", "rank": 79, "score": 50175.21388549286 }, { "content": "#[macro_use]\n\npub mod component;\n\n\n\nmod color;\n\nmod style;\n\nmod click_event;\n\nmod hover_event;\n\n\n\npub use color::*;\n\npub use style::*;\n\npub use click_event::*;\n\npub use hover_event::*;", "file_path": "server/src/chat/mod.rs", "rank": 80, "score": 50175.16997708993 }, { "content": "mod profile;\n\n\n\npub use profile::*;", "file_path": "server/src/auth/mod.rs", "rank": 81, "score": 50173.391953657396 }, { "content": " }\n\n\n\n pub fn from_name(name: &str) -> Option<LevelKind> {\n\n for type_ in Self::iter() {\n\n if type_.name() == name {\n\n return Some(*type_);\n\n }\n\n }\n\n\n\n None\n\n }\n\n}", "file_path": "server/src/world/level/kind.rs", "rank": 82, "score": 49107.08730520212 }, { "content": "lazy_static! {\n\n static ref DIMENSIONS: HashMap<&'static str, Dimension> = {\n\n let mut m = HashMap::new();\n\n m.insert(\"overworld\", dimension_type!(1, \"overworld\", \"\", \"\", [HasSkyLight]));\n\n m.insert(\"the_nether\", dimension_type!(0, \"the_nether\", \"_nether\", \"DIM-1\"));\n\n m.insert(\"the_end\", dimension_type!(2, \"the_nether\", \"_end\", \"DIM1\"));\n\n m\n\n };\n\n}\n\n\n\nimpl Dimension {\n\n pub fn from_name(name: &str) -> Option<&Dimension> {\n\n DIMENSIONS.get(name)\n\n }\n\n}", "file_path": "server/src/world/level/dimension.rs", "rank": 83, "score": 49102.666008417735 }, { "content": " LevelKind::Amplified => AMPLIFIED.3,\n\n LevelKind::Customized => CUSTOMIZED.3,\n\n LevelKind::Buffet => BUFFET.3,\n\n LevelKind::Debug => DEBUG.3,\n\n // LevelKind::Normal11 => NORMAL_1_1.3,\n\n }\n\n }\n\n\n\n pub fn iter() -> Iter<'static, LevelKind> {\n\n ALL.iter()\n\n }\n\n\n\n pub fn from_id(id: i32) -> Option<LevelKind> {\n\n for type_ in Self::iter() {\n\n if type_.id() == id {\n\n return Some(*type_);\n\n }\n\n }\n\n\n\n None\n", "file_path": "server/src/world/level/kind.rs", "rank": 84, "score": 49100.98154745267 }, { "content": " }\n\n\n\n pub fn serialization(&self) -> &str {\n\n match self {\n\n LevelKind::Default => DEFAULT.2,\n\n LevelKind::Flat => FLAT.2,\n\n LevelKind::LargeBiomes => LARGE_BIOMES.2,\n\n LevelKind::Amplified => AMPLIFIED.2,\n\n LevelKind::Customized => CUSTOMIZED.2,\n\n LevelKind::Buffet => BUFFET.2,\n\n LevelKind::Debug => DEBUG.2,\n\n // LevelKind::Normal11 => NORMAL_1_1.2,\n\n }\n\n }\n\n\n\n pub fn version(&self) -> u8 {\n\n match self {\n\n LevelKind::Default => DEFAULT.3,\n\n LevelKind::Flat => FLAT.3,\n\n LevelKind::LargeBiomes => LARGE_BIOMES.3,\n", "file_path": "server/src/world/level/kind.rs", "rank": 85, "score": 49100.63040658463 }, { "content": "use std::slice::Iter;\n\n\n\n// id, name, serialization, version\n\nconst DEFAULT: (i32, &str, &str, u8) = (0, \"default\", \"default\", 1);\n\nconst FLAT: (i32, &str, &str, u8) = (1, \"flat\", \"flat\", 0);\n\nconst LARGE_BIOMES: (i32, &str, &str, u8) = (2, \"largeBiomes\", \"largeBiomes\", 0);\n\nconst AMPLIFIED: (i32, &str, &str, u8) = (3, \"amplified\", \"amplified\", 0);\n\nconst CUSTOMIZED: (i32, &str, &str, u8) = (4, \"customized\", \"normal\", 0);\n\nconst BUFFET: (i32, &str, &str, u8) = (5, \"buffet\", \"buffet\", 0);\n\nconst DEBUG: (i32, &str, &str, u8) = (6, \"debug_all_block_states\", \"debug_all_block_states\", 0);\n\n// const NORMAL_1_1: (i32, &str, &str, u8) = (8, \"default_1_1\", \"default_1_1\", 0);\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum LevelKind {\n\n Default,\n\n Flat,\n\n LargeBiomes,\n\n Amplified,\n\n Customized,\n\n Buffet,\n", "file_path": "server/src/world/level/kind.rs", "rank": 86, "score": 49098.759280777325 }, { "content": " LevelKind::LargeBiomes => LARGE_BIOMES.0,\n\n LevelKind::Amplified => AMPLIFIED.0,\n\n LevelKind::Customized => CUSTOMIZED.0,\n\n LevelKind::Buffet => BUFFET.0,\n\n LevelKind::Debug => DEBUG.0,\n\n // LevelKind::Normal11 => NORMAL_1_1.0,\n\n }\n\n }\n\n\n\n pub fn name(&self) -> &str {\n\n match self {\n\n LevelKind::Default => DEFAULT.1,\n\n LevelKind::Flat => FLAT.1,\n\n LevelKind::LargeBiomes => LARGE_BIOMES.1,\n\n LevelKind::Amplified => AMPLIFIED.1,\n\n LevelKind::Customized => CUSTOMIZED.1,\n\n LevelKind::Buffet => BUFFET.1,\n\n LevelKind::Debug => DEBUG.1,\n\n // LevelKind::Normal11 => NORMAL_1_1.1,\n\n }\n", "file_path": "server/src/world/level/kind.rs", "rank": 87, "score": 49098.579455304476 }, { "content": " Debug,\n\n // Normal11,\n\n}\n\n\n\nconst ALL: [LevelKind; 7] = [\n\n LevelKind::Default,\n\n LevelKind::Flat,\n\n LevelKind::LargeBiomes,\n\n LevelKind::Amplified,\n\n LevelKind::Customized,\n\n LevelKind::Buffet,\n\n LevelKind::Debug,\n\n // LevelKind::Normal11,\n\n];\n\n\n\nimpl LevelKind {\n\n pub fn id(&self) -> i32 {\n\n match self {\n\n LevelKind::Default => DEFAULT.0,\n\n LevelKind::Flat => FLAT.0,\n", "file_path": "server/src/world/level/kind.rs", "rank": 88, "score": 49097.92959733754 }, { "content": "pub struct Region;\n\n\n\nimpl Region {\n\n\n\n}", "file_path": "server/src/world/level/region.rs", "rank": 89, "score": 49094.198619332434 }, { "content": " }\n\n}\n\n\n\nmacro_rules! dimension_type {\n\n ($id:expr, $name:expr, $suffix:expr, $folder:expr) => {\n\n dimension_type!($id, $name, $suffix, $folder, []);\n\n };\n\n ($id:expr, $name:expr, $suffix:expr, $folder:expr, [ $( $trait:ident ),* ]) => {\n\n Dimension {\n\n id: $id,\n\n name: $name,\n\n suffix: $suffix,\n\n folder: $folder,\n\n traits: vec![\n\n $({ DimensionTrait::$trait }),*\n\n ],\n\n }\n\n };\n\n}\n\n\n", "file_path": "server/src/world/level/dimension.rs", "rank": 90, "score": 49093.75095905974 }, { "content": "use std::collections::HashMap;\n\n\n\n#[derive(Clone, Copy, PartialEq)]\n", "file_path": "server/src/world/level/dimension.rs", "rank": 91, "score": 49090.87142239445 }, { "content": "mod text;\n\nmod translatable;\n\n\n\npub use text::*;\n\npub use translatable::*;\n\n\n\nuse crate::chat::Style;\n\nuse crate::util::ToJsonValue;\n\n\n\n#[derive(Clone)]\n\npub enum ComponentContainer {\n\n Text(TextComponent),\n\n Translatable(TranslatableComponent),\n\n}\n\n\n\nimpl ToJsonValue for ComponentContainer {\n\n fn to_json(&self) -> Option<serde_json::Value> {\n\n match self {\n\n ComponentContainer::Text(c) => c.to_json(),\n\n ComponentContainer::Translatable(c) => c.to_json(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "server/src/chat/component/mod.rs", "rank": 92, "score": 48487.51776035192 }, { "content": " fn style(&self) -> &Style;\n\n fn style_mut(&mut self) -> &mut Style;\n\n\n\n fn siblings(&self) -> &Vec<ComponentContainer>;\n\n fn siblings_mut(&mut self) -> &mut Vec<ComponentContainer>;\n\n\n\n fn append<T: Into<ComponentContainer>>(&mut self, sibling: T) {\n\n self.siblings_mut().push(sibling.into());\n\n }\n\n\n\n fn contents(&self) -> &str { \"\" }\n\n}", "file_path": "server/src/chat/component/mod.rs", "rank": 93, "score": 48484.42230442024 }, { "content": "use crate::server::ServerShared;\n\nuse crate::chat::component::ComponentContainer;\n\nuse crate::network::WorkerRequest;\n\nuse super::{Protocol, Packet};\n\nuse std::sync::Arc;\n\nuse async_trait::async_trait;\n\nuse flume::Sender;\n\n\n\npub struct ProtocolHandlerState {\n\n pub shared: Arc<ServerShared>,\n\n pub protocol: Arc<Protocol>,\n\n pub worker_tx: Sender<WorkerRequest>,\n\n}\n\n\n\nimpl ProtocolHandlerState {\n\n pub fn send_packet<T: Packet>(&self, packet: T) -> anyhow::Result<()> {\n\n // create payload\n\n let payload = (self.protocol.client.id_of::<T>().ok_or_else(||\n\n anyhow::anyhow!(\"tried to send unknown packet\"),\n\n )?, packet.into_box());\n", "file_path": "server/src/network/protocol/handler.rs", "rank": 94, "score": 47928.173472592876 }, { "content": "\n\nimpl ProtocolRead for bool {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n let value = <u8>::read(src)?;\n\n\n\n if value > 1 {\n\n Err(ProtocolError::Invalid)\n\n } else {\n\n Ok(value == 1)\n\n }\n\n }\n\n}\n\n\n\nimpl ProtocolWrite for bool {\n\n fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n <u8>::write(&(*self as u8), dst)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "server/src/network/protocol/io.rs", "rank": 95, "score": 47922.43444719535 }, { "content": "\n\n impl ProtocolRead for $t {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n if src.remaining() < $len {\n\n Err(ProtocolError::NotEnoughBytes)\n\n } else {\n\n Ok(src.$r())\n\n }\n\n }\n\n }\n\n\n\n impl ProtocolWrite for $t {\n\n fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n if dst.remaining_mut() < $len {\n\n Err(ProtocolError::NotEnoughBytes)\n\n } else {\n\n dst.$w(*self);\n\n Ok(())\n\n }\n\n }\n", "file_path": "server/src/network/protocol/io.rs", "rank": 96, "score": 47921.47204752691 }, { "content": " let len = String::len(self);\n\n let len_var: Var<i32> = (len as i32).into();\n\n\n\n len_var.len() + len\n\n }\n\n}\n\n\n\nimpl ProtocolRead for String {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n let len_var = <Var<i32>>::read(src)?;\n\n let len = len_var.0 as usize;\n\n\n\n if len > 32767 {\n\n Err(ProtocolError::TooLarge)\n\n } else {\n\n let mut bytes = vec![0u8; len];\n\n\n\n // copy bytes to vec\n\n src.copy_to_slice(&mut bytes);\n\n\n", "file_path": "server/src/network/protocol/io.rs", "rank": 97, "score": 47920.62868087848 }, { "content": " impl $crate::network::protocol::ProtocolRead for $name {\n\n fn read<U: bytes::Buf>(_src: &mut U) -> Result<Self, $crate::network::protocol::ProtocolError> {\n\n Ok($name { $($fname: <$fty>::read(_src)?,)* })\n\n }\n\n }\n\n\n\n impl $crate::network::protocol::ProtocolWrite for $name {\n\n fn write<U: bytes::BufMut>(&self, _dst: &mut U) -> Result<(), $crate::network::protocol::ProtocolError> {\n\n $(self.$fname.write(_dst)?;)*\n\n Ok(())\n\n }\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! protocol_data_scalar {\n\n ($t:ty, $len:expr, $w:ident, $r:ident) => {\n\n impl ProtocolLength for $t {\n\n fn len(&self) -> usize { $len }\n\n }\n", "file_path": "server/src/network/protocol/io.rs", "rank": 98, "score": 47920.443161405135 }, { "content": " // convert bytes to string\n\n String::from_utf8(bytes)\n\n .map_err(|_| ProtocolError::Invalid)\n\n }\n\n }\n\n}\n\n\n\nimpl ProtocolWrite for String {\n\n fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n let len = String::len(self);\n\n let len_var: Var<i32> = (len as i32).into();\n\n\n\n if len > 32767 {\n\n Err(ProtocolError::TooLarge)\n\n } else {\n\n <Var<i32>>::write(&len_var, dst)?;\n\n\n\n for &byte in self.as_bytes() {\n\n dst.put_u8(byte);\n\n }\n\n\n\n Ok(())\n\n }\n\n }\n\n}", "file_path": "server/src/network/protocol/io.rs", "rank": 99, "score": 47919.80284953003 } ]
Rust
sf/src/twin.rs
SetTheorist/rust-special-functions
a03ea7f07677a79dc8c80c468e90c6525b64bf96
use std::ops::{Add,Sub,Mul,Div,Rem}; use std::ops::{AddAssign,SubAssign,MulAssign,DivAssign,RemAssign}; use std::ops::{Neg}; #[derive(Clone,Copy,Debug,Default,PartialEq,PartialOrd)] pub struct Twin<F>{hi:F, lo:F} pub trait Base: Sized + Copy + Add<Output=Self> + Sub<Output=Self> + Mul<Output=Self> + Div<Output=Self> + Neg<Output=Self> + PartialOrd + PartialEq + Default { fn SPLIT() -> Self; fn mul_add(self, b:Self, c:Self) -> Self; fn HAS_MUL_ADD() -> bool; fn recip(self) -> Self; fn sqrt(self) -> Self; fn cbrt(self) -> Self; fn ceil(self) -> Self; fn floor(self) -> Self; fn round(self) -> Self; fn trunc(self) -> Self; fn abs(self) -> Self; fn ci(c:isize) -> Self; fn cf(c:f64) -> Self; fn to64(self) -> f64; fn epsilon() -> Self; } impl Base for f32 { #[inline] fn SPLIT() -> Self { 4097.0 } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { self.mul_add(b, c) } #[inline] fn HAS_MUL_ADD() -> bool { true } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { c as f32 } #[inline] fn cf(c:f64) -> Self { c as f32 } #[inline] fn to64(self) -> f64 { self as f64 } #[inline] fn epsilon() -> Self { f32::EPSILON } } impl Base for f64 { #[inline] fn SPLIT() -> Self { 134217729.0 } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { self.mul_add(b, c) } #[inline] fn HAS_MUL_ADD() -> bool { true } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { c as f64 } #[inline] fn cf(c:f64) -> Self { c as f64 } #[inline] fn to64(self) -> f64 { self } #[inline] fn epsilon() -> Self { f64::EPSILON } } impl<F:Base> Base for Twin<F> { #[inline] fn SPLIT() -> Twin<F> { Twin::new((F::SPLIT()-F::ci(1))*(F::SPLIT()-F::ci(1)), F::ci(1)) } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { unimplemented!() } #[inline] fn HAS_MUL_ADD() -> bool { false } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { Twin::new(F::ci(c),F::default()) } #[inline] fn cf(c:f64) -> Self { Twin::new(F::cf(c),F::default()) } #[inline] fn to64(self) -> f64 { self.hi.to64() } #[inline] fn epsilon() -> Self { Twin{hi:F::epsilon(),lo:F::default()}*Twin{hi:F::epsilon(),lo:F::default()} } } use crate::f128::*; impl Base for f128 { #[inline] fn SPLIT() -> Self { f128::from_bits(0x4037_0000_0000_0000__0100_0000_0000_0000) } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { unimplemented!() } #[inline] fn HAS_MUL_ADD() -> bool { false } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { f128::from(c) } #[inline] fn cf(c:f64) -> Self { f128::from(c) } #[inline] fn to64(self) -> f64 { f64::from(self) } #[inline] fn epsilon() -> Self { f128::from(f64::EPSILON).sqr() } } #[inline] fn qtsum<F:Base>(a:F, b:F) -> (F, F) { let s = a + b; let e = b + (a - s); (s, e) } #[inline] fn ddsum<F:Base>(a:F, b:F) -> (F, F) { let s = a + b; let v = s - a; let e = (a + (v - s)) + (b - v); (s, e) } #[inline] fn split<F:Base>(a:F) -> (F, F) { let t = F::SPLIT() * a; let ahi = t - (t - a); let alo = a - ahi; (ahi, alo) } #[inline] fn ddprod<F:Base>(a:F, b:F) -> (F, F) { if F::HAS_MUL_ADD() { let p = a * b; let e = a.mul_add(b, -p); (p, e) } else { let (ahi, alo) = split(a); let (bhi, blo) = split(b); let p = a * b; let e = (((ahi * bhi - p) + ahi * blo) + alo * bhi) + alo * blo; (p, e) } } #[inline] fn qdadd<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, y:F) -> Twin<F> { let (shi, slo) = ddsum(y, xhi); let (hhi, hlo) = qtsum(shi, slo + xlo); let (hi, lo) = qtsum(hhi, hlo); Twin{hi, lo} } #[inline] fn dqadd<F:Base>(x:F, y:Twin<F>) -> Twin<F> { qdadd(y, x) } #[inline] fn qqadd<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let (hs, he) = ddsum(xhi, yhi); let (ls, le) = ddsum(xlo, ylo); let (h, k) = qtsum(hs, he + ls); let (hi, lo) = qtsum(h, le + k); Twin{hi, lo} } #[inline] fn qnegate<F:Base>(Twin{hi, lo}:Twin<F>) -> Twin<F> { Twin{hi:-hi, lo:-lo} } #[inline] fn qdprod<F:Base>(Twin{hi:xhi, lo:xlo}: Twin<F>, y:F) -> Twin<F> { let (thi, tlo) = ddprod(xhi, y); let (hi, lo) = qtsum(thi, tlo + y * xlo); Twin{hi, lo} } #[inline] fn dqprod<F:Base>(x:F, y: Twin<F>) -> Twin<F> { qdprod(y, x) } #[inline] fn qqprod<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let (p, e) = ddprod(xhi, yhi); let (hi, lo) = qtsum(p, e + (xhi * ylo + xlo * yhi)); Twin{hi, lo} } #[inline] fn qqdivide<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let cc = xhi / yhi; let (uu, u) = ddprod(cc, yhi); let c = ((((xhi - uu) - u) + xlo) - cc * ylo) / yhi; let (hi, lo) = qtsum(cc, c); Twin{hi, lo} } #[inline] fn dqdivide<F:Base>(x:F, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let cc = x / yhi; let (uu, u) = ddprod(cc, yhi); let c = (((x - uu) - u) - cc * ylo) / yhi; let (hi, lo) = qtsum(cc, c); Twin{hi, lo} } #[inline] fn qddivide<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, y:F) -> Twin<F> { let xdy = xhi / y; let (uu, u) = ddprod(xdy, y); let c = (((xhi - uu) - u) + xlo) / y; let (hi, lo) = qtsum(xdy, c); Twin{hi, lo} } impl<F:Base> Twin<F> { #[inline] pub fn new(a:F, b:F) -> Self { let (hi, lo) = ddsum(a, b); Twin{hi, lo} } #[inline] pub unsafe fn new_raw(a:F, b:F) -> Self { Twin{hi:a, lo:b} } #[inline] pub fn parts(Twin{hi, lo}: Self) -> (F, F) { (hi, lo) } #[inline] pub fn hi(self) -> F { self.hi } #[inline] pub fn lo(self) -> F { self.lo } #[inline] pub fn recip(self) -> Self { Self::new(F::ci(1),F::default()) / self } #[inline] pub fn sqr(self) -> Self { self*self } pub fn sqrt(self) -> Self { let q0 = self.hi.sqrt(); let x = Self::new(q0, F::default()); let x = (x+self/x)*F::cf(0.5); x } pub fn sqrt_recip(self) -> Self { let z = F::default(); let c3 = F::ci(3); let c1_2 = F::cf(0.5); let q0 = self.hi.sqrt().recip(); let x = Self::new(q0, z); let x = x*(-self*x.sqr() + c3)*c1_2; x } pub fn cbrt(self) -> Self { let z = F::default(); let c2 = F::ci(2); let c3 = F::ci(3); let q0 = self.hi.cbrt(); let x = Self::new(q0, z); let x = (x*c2 + self/x.sqr())/c3; x } pub fn cbrt_recip(self) -> Self { let z = F::default(); let c3 = F::ci(3); let c4 = F::ci(4); let q0 = self.hi.cbrt().recip(); let x = Self::new(q0, z); let x = x*(-self*x*x.sqr() + c4)/c3; let x = x*(-self*x*x.sqr() + c4)/c3; x } pub fn floor(self) -> Self { let xhi = self.hi.floor(); if self.hi == xhi { let xlo = self.lo.floor(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } pub fn ceil(self) -> Self { let xhi = self.hi.ceil(); if self.hi == xhi { let xlo = self.lo.ceil(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } pub fn round(self) -> Self { let xhi = self.hi.round(); if self.hi == xhi { let xlo = self.lo.round(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { if (xhi-self.hi).abs()==F::cf(0.5) && self.lo < F::default() { Twin{hi:xhi-F::ci(1), lo:F::default()} } else { Twin{hi:xhi, lo:F::default()} } } } pub fn trunc(self) -> Self { let xhi = self.hi.trunc(); if self.hi == xhi { let xlo = self.lo.trunc(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } #[inline] pub fn abs(self) -> Self { if self < Self::default() { -self } else { self } } } impl<F:Base> Neg for Twin<F> { type Output = Self; fn neg(self) -> Self { qnegate(self) } } impl<F:Base> Add<Self> for Twin<F> { type Output = Self; fn add(self, y: Self) -> Self { qqadd(self, y) } } impl<F:Base> Sub<Self> for Twin<F> { type Output = Self; fn sub(self, y: Self) -> Self { qqadd(self, -y) } } impl<F:Base> Mul<Self> for Twin<F> { type Output = Self; fn mul(self, y: Self) -> Self { qqprod(self, y) } } impl<F:Base> Div<Self> for Twin<F> { type Output = Self; fn div(self, y: Self) -> Self { qqdivide(self, y) } } impl<F:Base> Add<F> for Twin<F> { type Output = Self; fn add(self, y:F) -> Self { qdadd(self, y) } } impl<F:Base> Sub<F> for Twin<F> { type Output = Self; fn sub(self, y:F) -> Self { qdadd(self, -y) } } impl<F:Base> Mul<F> for Twin<F> { type Output = Self; fn mul(self, y:F) -> Self { qdprod(self, y) } } impl<F:Base> Div<F> for Twin<F> { type Output = Self; fn div(self, y:F) -> Self { qddivide(self, y) } } /* // Rust restrictions block these (generic) implementation, sigh impl<F:Base> Add<Twin<F>> for F { type Output = Twin<F>; fn add(self, y:Twin<F>) -> Self { dqadd(self, y) } } impl<F:Base> Sub<Twin<F>> for F { type Output = Twin<F>; fn sub(self, y:Twin<F>) -> Self { dqadd(self, -y) } } impl<F:Base> Mul<Twin<F>> for F { type Output = Twin<F>; fn mul(self, y:Twin<F>) -> Self { dqprod(self, y) } } impl<F:Base> Div<Twin<F>> for F { type Output = Twin<F>; fn div(self, y:Twin<F>) -> Self { dqdivide(self, y) } } */ impl<F:Base> std::fmt::Display for Twin<F> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let ZERO = Self::default(); let ONE = Self::ci(1); let TEN = Self::ci(10); let mut z = *self; if z < ZERO { z = -z; write!(f, "-")?; } let mut e = 0; if z == ZERO { while z >= TEN { e += 1; z = z / TEN; } while z < ONE { e -= 1; z = z * TEN; } } let digs = ((Self::epsilon().to64().recip()*1.1).log10().ceil() as isize) + 4; println!("{}", digs); for n in 0..digs { if n == 1 { write!(f, ".")?; } let d = z.floor().to64(); if d<0.0 || d>=10.0 { eprintln!("<<{}>>", d); } let dd = ((d as u8) + b'0') as char; write!(f, "{}", dd)?; let d0 = Self::cf(d); z = (z - d0) * TEN; } if e != 0 { write!(f, "e{}", e)?; } write!(f, "") } }
use std::ops::{Add,Sub,Mul,Div,Rem}; use std::ops::{AddAssign,SubAssign,MulAssign,DivAssign,RemAssign}; use std::ops::{Neg}; #[derive(Clone,Copy,Debug,Default,PartialEq,PartialOrd)] pub struct Twin<F>{hi:F, lo:F} pub trait Base: Sized + Copy + Add<Output=Self> + Sub<Output=Self> + Mul<Output=Self> + Div<Output=Self> + Neg<Output=Self> + PartialOrd + PartialEq + Default { fn SPLIT() -> Self; fn mul_add(self, b:Self, c:Self) -> Self; fn HAS_MUL_ADD() -> bool; fn recip(self) -> Self; fn sqrt(self) -> Self; fn cbrt(self) -> Self; fn ceil(self) -> Self; fn floor(self) -> Self; fn round(self) -> Self; fn trunc(self) -> Self; fn abs(self) -> Self; fn ci(c:isize) -> Self; fn cf(c:f64) -> Self; fn to64(self) -> f64; fn epsilon() -> Self; } impl Base for f32 { #[inline] fn SPLIT() -> Self { 4097.0 } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { self.mul_add(b, c) } #[inline] fn HAS_MUL_ADD() -> bool { true } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { c as f32 } #[inline] fn cf(c:f64) -> Self { c as f32 } #[inline] fn to64(self) -> f64 { self as f64 } #[inline] fn epsilon() -> Self { f32::EPSILON } } impl Base for f64 { #[inline] fn SPLIT() -> Self { 134217729.0 } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { self.mul_add(b, c) } #[inline] fn HAS_MUL_ADD() -> bool { true } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { c as f64 } #[inline] fn cf(c:f64) -> Self { c as f64 } #[inline] fn to64(self) -> f64 { self } #[inline] fn epsilon() -> Self { f64::EPSILON } } impl<F:Base> Base for Twin<F> { #[inline] fn SPLIT() -> Twin<F> { Twin::new((F::SPLIT()-F::ci(1))*(F::SPLIT()-F::ci(1)), F::ci(1)) } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { unimplemented!() } #[inline] fn HAS_MUL_ADD() -> bool { false } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { Twin::new(F::ci(c),F::default()) } #[inline] fn cf(c:f64) -> Self { Twin::new(F::cf(c),F::default()) } #[inline] fn to64(self) -> f64 { self.hi.to64() } #[inline] fn epsilon() -> Self { Twin{hi:F::epsilon(),lo:F::default()}*Twin{hi:F::epsilon(),lo:F::default()} } } use crate::f128::*; impl Base for f128 { #[inline] fn SPLIT() -> Self { f128::from_bits(0x4037_0000_0000_0000__0100_0000_0000_0000) } #[inline] fn mul_add(self, b:Self, c:Self) -> Self { unimplemented!() } #[inline] fn HAS_MUL_ADD() -> bool { false } #[inline] fn recip(self) -> Self { self.recip() } #[inline] fn sqrt(self) -> Self { self.sqrt() } #[inline] fn cbrt(self) -> Self { self.cbrt() } #[inline] fn ceil(self) -> Self { self.ceil() } #[inline] fn floor(self) -> Self { self.floor() } #[inline] fn round(self) -> Self { self.round() } #[inline] fn trunc(self) -> Self { self.trunc() } #[inline] fn abs(self) -> Self { self.abs() } #[inline] fn ci(c:isize) -> Self { f128::from(c) } #[inline] fn cf(c:f64) -> Self { f128::from(c) } #[inline] fn to64(self) -> f64 { f64::from(self) } #[inline] fn epsilon() -> Self { f128::from(f64::EPSILON).sqr() } } #[inline] fn qtsum<F:Base>(a:F, b:F) -> (F, F) { let s = a + b; let e = b + (a - s); (s, e) } #[inline] fn ddsum<F:Base>(a:F, b:F) -> (F, F) { let s = a + b; let v = s - a; let e = (a + (v - s)) + (b - v); (s, e) } #[inline] fn split<F:Base>(a:F) -> (F, F) { let t = F::SPLIT() * a; let ahi = t - (t - a); let alo = a - ahi; (ahi, alo) } #[inline] fn ddprod<F:Base>(a:F, b:F) -> (F, F) {
} #[inline] fn qdadd<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, y:F) -> Twin<F> { let (shi, slo) = ddsum(y, xhi); let (hhi, hlo) = qtsum(shi, slo + xlo); let (hi, lo) = qtsum(hhi, hlo); Twin{hi, lo} } #[inline] fn dqadd<F:Base>(x:F, y:Twin<F>) -> Twin<F> { qdadd(y, x) } #[inline] fn qqadd<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let (hs, he) = ddsum(xhi, yhi); let (ls, le) = ddsum(xlo, ylo); let (h, k) = qtsum(hs, he + ls); let (hi, lo) = qtsum(h, le + k); Twin{hi, lo} } #[inline] fn qnegate<F:Base>(Twin{hi, lo}:Twin<F>) -> Twin<F> { Twin{hi:-hi, lo:-lo} } #[inline] fn qdprod<F:Base>(Twin{hi:xhi, lo:xlo}: Twin<F>, y:F) -> Twin<F> { let (thi, tlo) = ddprod(xhi, y); let (hi, lo) = qtsum(thi, tlo + y * xlo); Twin{hi, lo} } #[inline] fn dqprod<F:Base>(x:F, y: Twin<F>) -> Twin<F> { qdprod(y, x) } #[inline] fn qqprod<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let (p, e) = ddprod(xhi, yhi); let (hi, lo) = qtsum(p, e + (xhi * ylo + xlo * yhi)); Twin{hi, lo} } #[inline] fn qqdivide<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let cc = xhi / yhi; let (uu, u) = ddprod(cc, yhi); let c = ((((xhi - uu) - u) + xlo) - cc * ylo) / yhi; let (hi, lo) = qtsum(cc, c); Twin{hi, lo} } #[inline] fn dqdivide<F:Base>(x:F, Twin{hi:yhi, lo:ylo}:Twin<F>) -> Twin<F> { let cc = x / yhi; let (uu, u) = ddprod(cc, yhi); let c = (((x - uu) - u) - cc * ylo) / yhi; let (hi, lo) = qtsum(cc, c); Twin{hi, lo} } #[inline] fn qddivide<F:Base>(Twin{hi:xhi, lo:xlo}:Twin<F>, y:F) -> Twin<F> { let xdy = xhi / y; let (uu, u) = ddprod(xdy, y); let c = (((xhi - uu) - u) + xlo) / y; let (hi, lo) = qtsum(xdy, c); Twin{hi, lo} } impl<F:Base> Twin<F> { #[inline] pub fn new(a:F, b:F) -> Self { let (hi, lo) = ddsum(a, b); Twin{hi, lo} } #[inline] pub unsafe fn new_raw(a:F, b:F) -> Self { Twin{hi:a, lo:b} } #[inline] pub fn parts(Twin{hi, lo}: Self) -> (F, F) { (hi, lo) } #[inline] pub fn hi(self) -> F { self.hi } #[inline] pub fn lo(self) -> F { self.lo } #[inline] pub fn recip(self) -> Self { Self::new(F::ci(1),F::default()) / self } #[inline] pub fn sqr(self) -> Self { self*self } pub fn sqrt(self) -> Self { let q0 = self.hi.sqrt(); let x = Self::new(q0, F::default()); let x = (x+self/x)*F::cf(0.5); x } pub fn sqrt_recip(self) -> Self { let z = F::default(); let c3 = F::ci(3); let c1_2 = F::cf(0.5); let q0 = self.hi.sqrt().recip(); let x = Self::new(q0, z); let x = x*(-self*x.sqr() + c3)*c1_2; x } pub fn cbrt(self) -> Self { let z = F::default(); let c2 = F::ci(2); let c3 = F::ci(3); let q0 = self.hi.cbrt(); let x = Self::new(q0, z); let x = (x*c2 + self/x.sqr())/c3; x } pub fn cbrt_recip(self) -> Self { let z = F::default(); let c3 = F::ci(3); let c4 = F::ci(4); let q0 = self.hi.cbrt().recip(); let x = Self::new(q0, z); let x = x*(-self*x*x.sqr() + c4)/c3; let x = x*(-self*x*x.sqr() + c4)/c3; x } pub fn floor(self) -> Self { let xhi = self.hi.floor(); if self.hi == xhi { let xlo = self.lo.floor(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } pub fn ceil(self) -> Self { let xhi = self.hi.ceil(); if self.hi == xhi { let xlo = self.lo.ceil(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } pub fn round(self) -> Self { let xhi = self.hi.round(); if self.hi == xhi { let xlo = self.lo.round(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { if (xhi-self.hi).abs()==F::cf(0.5) && self.lo < F::default() { Twin{hi:xhi-F::ci(1), lo:F::default()} } else { Twin{hi:xhi, lo:F::default()} } } } pub fn trunc(self) -> Self { let xhi = self.hi.trunc(); if self.hi == xhi { let xlo = self.lo.trunc(); let (hi, lo) = qtsum(xhi, xlo); Twin{hi, lo} } else { Twin{hi:xhi, lo:F::default()} } } #[inline] pub fn abs(self) -> Self { if self < Self::default() { -self } else { self } } } impl<F:Base> Neg for Twin<F> { type Output = Self; fn neg(self) -> Self { qnegate(self) } } impl<F:Base> Add<Self> for Twin<F> { type Output = Self; fn add(self, y: Self) -> Self { qqadd(self, y) } } impl<F:Base> Sub<Self> for Twin<F> { type Output = Self; fn sub(self, y: Self) -> Self { qqadd(self, -y) } } impl<F:Base> Mul<Self> for Twin<F> { type Output = Self; fn mul(self, y: Self) -> Self { qqprod(self, y) } } impl<F:Base> Div<Self> for Twin<F> { type Output = Self; fn div(self, y: Self) -> Self { qqdivide(self, y) } } impl<F:Base> Add<F> for Twin<F> { type Output = Self; fn add(self, y:F) -> Self { qdadd(self, y) } } impl<F:Base> Sub<F> for Twin<F> { type Output = Self; fn sub(self, y:F) -> Self { qdadd(self, -y) } } impl<F:Base> Mul<F> for Twin<F> { type Output = Self; fn mul(self, y:F) -> Self { qdprod(self, y) } } impl<F:Base> Div<F> for Twin<F> { type Output = Self; fn div(self, y:F) -> Self { qddivide(self, y) } } /* // Rust restrictions block these (generic) implementation, sigh impl<F:Base> Add<Twin<F>> for F { type Output = Twin<F>; fn add(self, y:Twin<F>) -> Self { dqadd(self, y) } } impl<F:Base> Sub<Twin<F>> for F { type Output = Twin<F>; fn sub(self, y:Twin<F>) -> Self { dqadd(self, -y) } } impl<F:Base> Mul<Twin<F>> for F { type Output = Twin<F>; fn mul(self, y:Twin<F>) -> Self { dqprod(self, y) } } impl<F:Base> Div<Twin<F>> for F { type Output = Twin<F>; fn div(self, y:Twin<F>) -> Self { dqdivide(self, y) } } */ impl<F:Base> std::fmt::Display for Twin<F> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let ZERO = Self::default(); let ONE = Self::ci(1); let TEN = Self::ci(10); let mut z = *self; if z < ZERO { z = -z; write!(f, "-")?; } let mut e = 0; if z == ZERO { while z >= TEN { e += 1; z = z / TEN; } while z < ONE { e -= 1; z = z * TEN; } } let digs = ((Self::epsilon().to64().recip()*1.1).log10().ceil() as isize) + 4; println!("{}", digs); for n in 0..digs { if n == 1 { write!(f, ".")?; } let d = z.floor().to64(); if d<0.0 || d>=10.0 { eprintln!("<<{}>>", d); } let dd = ((d as u8) + b'0') as char; write!(f, "{}", dd)?; let d0 = Self::cf(d); z = (z - d0) * TEN; } if e != 0 { write!(f, "e{}", e)?; } write!(f, "") } }
if F::HAS_MUL_ADD() { let p = a * b; let e = a.mul_add(b, -p); (p, e) } else { let (ahi, alo) = split(a); let (bhi, blo) = split(b); let p = a * b; let e = (((ahi * bhi - p) + ahi * blo) + alo * bhi) + alo * blo; (p, e) }
if_condition
[ { "content": "pub trait Ordered: Base + PartialOrd<Self> {\n\n #[inline]\n\n fn min(self, b: Self) -> Self {\n\n if self < b { self } else { b }\n\n }\n\n #[inline]\n\n fn max(self, b: Self) -> Self {\n\n if self > b { self } else { b }\n\n }\n\n\n\n fn floor(self) -> Self;\n\n fn ceil(self) -> Self;\n\n fn round(self) -> Self;\n\n fn trunc(self) -> Self;\n\n fn rint(self) -> isize;\n\n\n\n #[inline]\n\n fn є(self, a:Self, b:Self) -> bool { a<=self && self<=b }\n\n #[inline]\n\n fn є_cc(self, a:Self, b:Self) -> bool { a.є(a,b) }\n\n #[inline]\n\n fn є_oc(self, a:Self, b:Self) -> bool { a<self && self<=b }\n\n #[inline]\n\n fn є_co(self, a:Self, b:Self) -> bool { a<=self && self<b }\n\n #[inline]\n\n fn є_oo(self, a:Self, b:Self) -> bool { a<self && self<b }\n\n}\n", "file_path": "sf/src/traits.rs", "rank": 0, "score": 312935.712129303 }, { "content": "// we assume for convenience that our basic values\n\n// are all Copy-able.\n\n// This excludes, for example, arbitrary-precision floats,\n\n// but we are not targeting such use cases...\n\npub trait Base: Copy+Sized+PartialEq+Default+std::fmt::Debug+std::fmt::Display+'static {}\n\n\n", "file_path": "sf/src/traits.rs", "rank": 1, "score": 298399.57200821885 }, { "content": "pub trait Power<P=Self>: Base {\n\n fn pow(self, p: P) -> Self;\n\n}\n", "file_path": "sf/src/traits.rs", "rank": 3, "score": 282791.12803412555 }, { "content": "pub trait Normed: Base + From<Self::NT> {\n\n type NT: Field + Ordered + Constants;\n\n const epsilon: Self::NT;\n\n fn abs(self) -> Self::NT;\n\n fn vabs(self) -> Self;\n\n fn fabs(self) -> f64;\n\n // self/|self|\n\n fn signum(self) -> Self;\n\n fn mu(self) -> Self::NT;\n\n}\n", "file_path": "sf/src/traits.rs", "rank": 4, "score": 276665.4879211568 }, { "content": "pub trait Multiplication: Base + One + Mul<Self, Output = Self> + MulAssign<Self> {\n\n #[inline]\n\n fn sqr(self) -> Self { self * self }\n\n fn cub(self) -> Self { self.sqr() * self }\n\n}\n", "file_path": "sf/src/traits.rs", "rank": 5, "score": 270057.5545083037 }, { "content": "pub trait InitialGuess { fn guess(self, n:i32) -> Self; }\n\nimpl InitialGuess for r64 {\n\n #[inline]\n\n fn guess(self, n:i32) -> r64 {\n\n // TODO: this only works for finite, normal floats\n\n let b = self.0.to_bits();\n\n let e = (((b >> 52) & 0x7FFF) as i32) - 1023;\n\n let e = (e / n) + 1023;\n\n let b = (b & 0x8000_FFFF_FFFF_FFFF) | ((e as u64) << 52);\n\n r64(f64::from_bits(b))\n\n }\n\n}\n\n// NB: for mantissa part\n\n// 0.45 + 0.62*x*(1.0 - 0.11x)\n\n// reduces to 4 newton steps on [1,2)\n\n// (from 6-7 from n/2 guess)\n\n// simple: 0.5+x/2*(1.0-x/8) gives 5 Newton steps on [1,2)\n\n\n\nuse crate::complex::{c64};\n\nimpl InitialGuess for c64 {\n\n #[inline]\n\n fn guess(self, n:i32) -> c64 {\n\n let (r, a) = self.to_polar();\n\n c64::polar(r64(r.0.powf(1.0/(n as f64))), (a/(n as isize)))\n\n }\n\n}\n\n\n\n\n", "file_path": "sf/src/basic.rs", "rank": 7, "score": 263566.14203385287 }, { "content": "pub trait ComplexType: Base + Normed<NT = Self::RT> + Embeds<Self::RT>\n\n{\n\n type RT: RealValue<CT=Self>;\n\n const I: Self;\n\n fn real(self) -> Self::RT;\n\n fn imag(self) -> Self::RT;\n\n fn arg(self) -> Self::RT;\n\n fn conj(self) -> Self;\n\n fn rect(re: Self::RT, im: Self::RT) -> Self;\n\n fn polar(r: Self::RT, arg: Self::RT) -> Self;\n\n fn to_rect(self) -> (Self::RT, Self::RT) { (self.real(), self.imag()) }\n\n fn to_polar(self) -> (Self::RT, Self::RT) {\n\n let a = self.abs();\n\n if a == 0 {\n\n (a, a)\n\n } else {\n\n (a, self.arg())\n\n }\n\n }\n\n fn root_of_unity(n:isize) -> Self;\n\n}\n\n\n", "file_path": "sf/src/traits.rs", "rank": 8, "score": 258287.20652208492 }, { "content": "pub fn sf_min<V:Ordered>(a:V, b:V) -> V { a.min(b) }\n", "file_path": "sf/src/traits.rs", "rank": 9, "score": 250336.32049907703 }, { "content": "pub fn sf_max<V:Ordered>(a:V, b:V) -> V { a.max(b) }\n", "file_path": "sf/src/traits.rs", "rank": 10, "score": 250336.32049907703 }, { "content": "#[inline]\n\nfn split(a: f64) -> (f64, f64) {\n\n let t = 134217729.0 * a;\n\n let ahi = t - (t - a);\n\n let alo = a - ahi;\n\n (ahi, alo)\n\n}\n\n\n", "file_path": "sf/src/wide.rs", "rank": 11, "score": 237082.5762725274 }, { "content": "pub fn erfc_contfrac<V: Value + Exp>(x: V) -> V {\n\n let x2 = x.sqr();\n\n let terms = (1..).map(|n| (ι(n): V / 2, if n % 2 == 1 { ι(1) } else { x2 }));\n\n sf_exp(-x2) * V::FRAC_1_SQRTPI * x / contfrac_modlentz(x2, terms, V::epsilon)\n\n}\n\n\n", "file_path": "sf/src/erf/impls.rs", "rank": 12, "score": 232607.3832975477 }, { "content": "pub fn erf_series<V: Value + Exp>(x: V) -> V {\n\n let x2 = x.sqr() * 2;\n\n let terms = (1..).scan(x, |s, n| {\n\n let o = *s;\n\n *s *= x2 / (2 * n + 1);\n\n Some(o)\n\n });\n\n sf_exp(-x.sqr()) * (V::FRAC_1_SQRTPI * 2) * sum_series(terms, V::epsilon)\n\n}\n\n\n", "file_path": "sf/src/erf/impls.rs", "rank": 13, "score": 232607.3832975477 }, { "content": "pub fn erfc_contfrac2<V: Value + Exp>(x: V) -> V {\n\n let x2 = x.sqr() * 2;\n\n let terms = (1..).map(|n| (ι(-(2 * n - 1) * (2 * n)): V, x2 + (4 * n + 1)));\n\n sf_exp(-x.sqr()) * V::FRAC_1_SQRTPI * (x * 2) / contfrac_modlentz(x2 + 1, terms, V::epsilon)\n\n}\n\n\n\nuse crate::real::{*};\n\n// TODO: quick-crude for now; replace with better approach\n\nimpl Erf for r64 {\n\n fn erf(self) -> r64 {\n\n if self < r64::zero {\n\n -(-self).erf()\n\n } else if self.abs() < r64::one {\n\n impls::erf_series(self)\n\n } else {\n\n r64::one - impls::erfc_contfrac2(self)\n\n }\n\n }\n\n fn erfc(self) -> r64 {\n\n if self.abs() < r64::one {\n", "file_path": "sf/src/erf/impls.rs", "rank": 14, "score": 232607.3832975477 }, { "content": "pub fn impl_scalar<V:Value>(a:V, b:V) -> V {\n\n if a.is_zero() || b.is_zero() {return V::zero;}\n\n let (a_, b_) = (a, b);\n\n let (mut a, mut b) = (a, b);\n\n for n in 1..100 {\n\n let a0 = a;\n\n let b0 = b;\n\n a = (a0 + b0) / 2;\n\n b = sf_sqrt(a0 * b0);\n\n if a==b || (a==a0 && b==b0) {\n\n ::log::debug!(\"impl_scalar::<{}>({},{}) converged in {} iterations\", std::any::type_name::<V>(), a_, b_, n);\n\n break;\n\n }\n\n }\n\n a\n\n}\n\n\n\nuse crate::trig::*;\n", "file_path": "sf/src/agm.rs", "rank": 15, "score": 231470.56145352716 }, { "content": "// for \"floating-point\" type (real) values\n\npub trait Float : Base {\n\n // Split into normalized mantissa and exponent\n\n fn frexp(self) -> (Self, isize);\n\n // extract only the exponent\n\n fn ilogb(self) -> isize;\n\n\n\n // self * 2^n\n\n fn ldexp(self, n:isize) -> Self;\n\n\n\n // magnitude of self, but with sign-bit from x\n\n fn copysign(self, x:Self) -> Self;\n\n\n\n // next representable number larger\n\n fn next_up(self) -> Self;\n\n // prev representable number smaller\n\n fn next_dn(self) -> Self;\n\n\n\n // checks for bitwise identity\n\n fn identical(self, rhs:Self) -> bool;\n\n\n\n const infinity: Self;\n\n const neg_infinity: Self;\n\n const neg_zero: Self;\n\n}\n\n\n", "file_path": "sf/src/traits.rs", "rank": 16, "score": 229194.34509214776 }, { "content": "pub trait One: Base {\n\n const one: Self;\n\n}\n\n\n\n// absorb isize & f64 into operations also...\n\n\n", "file_path": "sf/src/traits.rs", "rank": 17, "score": 229194.3450921478 }, { "content": "pub trait Zero: Base {\n\n const zero: Self;\n\n}\n\n\n", "file_path": "sf/src/traits.rs", "rank": 18, "score": 229194.3450921478 }, { "content": "pub fn sf_ceil<V:Ordered>(a:V) -> V { a.ceil() }\n", "file_path": "sf/src/traits.rs", "rank": 19, "score": 227814.97956696886 }, { "content": "pub fn sf_trunc<V:Ordered>(a:V) -> V { a.trunc() }\n", "file_path": "sf/src/traits.rs", "rank": 20, "score": 227814.97956696886 }, { "content": "pub fn sf_sqrt<V:Roots>(x:V) -> V { x.sqrt() }\n", "file_path": "sf/src/traits.rs", "rank": 21, "score": 227814.9795669689 }, { "content": "pub fn sf_floor<V:Ordered>(a:V) -> V { a.floor() }\n", "file_path": "sf/src/traits.rs", "rank": 22, "score": 227814.97956696886 }, { "content": "pub fn sf_round<V:Ordered>(a:V) -> V { a.round() }\n", "file_path": "sf/src/traits.rs", "rank": 23, "score": 227814.97956696886 }, { "content": "// assumes re>0\n\npub fn gamma_asympt<V: Value+Log+Exp>(x: V) -> V {\n\n let mut div = V::one;\n\n let mut z = x;\n\n // shift z\n\n while z.fabs() < 50.0 {\n\n //res += -sf_log(z*(z+1));\n\n div *= (z * (z + 1));\n\n z += 2;\n\n }\n\n let z = z;\n\n\n\n let mut res = V::zero;\n\n let mut term: V = (z - 0.5) * sf_log(z) - z + V::FRAC_LOG2PI_2;\n\n res += term;\n\n for m in (2..250).step_by(2) {\n\n let old_term = term;\n\n term = (ι(sf_bernoulli_number_approx(m as usize)):V) / (z.pow(m-1) * (m*(m-1)));\n\n if μ(term) > μ(old_term) { break; }\n\n let old_res = res;\n\n res += term;\n", "file_path": "sf/src/gamma/impls.rs", "rank": 24, "score": 226575.64585222455 }, { "content": "// assumes nu>=0\n\n// adaptation of Miller's method\n\n// TODO: not clear that when this is the method of choice...\n\npub fn bessel_i_order_recur<V:Value+Exp>(nu:isize, z:V, j:bool, EXTRA:isize) -> V {\n\n //const EXTRA : isize = 100; // TODO: need to compute appropriate bounds here...\n\n let tot = (nu+EXTRA+1) as usize;\n\n let mut rs = vec![V::zero; tot];\n\n let iz = z.recip();\n\n //if j { rs[tot-1] = z/2*(ι(tot as isize-1):V/((tot as isize).pow(2)));}\n\n if j { rs[tot-1] = z/(2*(tot) as isize); }\n\n else { rs[tot-1] = iz*(2*(tot) as isize); }\n\n for j in (0..(tot-1)).rev() {\n\n rs[j] = (rs[j+1] + iz*(2*(j+1) as isize)).recip();\n\n }\n\n let mut numer = sf_exp(z);\n\n for j in 0..(nu as usize) {\n\n numer *= rs[j];\n\n }\n\n let mut denom = V::one;\n\n for j in (1..tot).rev() {\n\n denom = denom * rs[j] + 1;\n\n }\n\n denom = denom * 2 * rs[0] + 1;\n\n numer / denom\n\n}\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 25, "score": 226153.58959923574 }, { "content": "pub fn digamma_series<V:RealValue+Log>(z:V) -> V {\n\n let mut sum = -V::EULER_GAMMA - z.recip();\n\n let mut res = sum;\n\n let b2 = ι(sf_bernoulli_number_scaled_approx(2)):V * 1; // TODO\n\n let b4 = ι(sf_bernoulli_number_scaled_approx(4)):V * 6; // TODO\n\n let b6 = ι(sf_bernoulli_number_scaled_approx(6)):V * 120; // TODO\n\n let b8 = ι(sf_bernoulli_number_scaled_approx(8)):V * 5040; // TODO\n\n for k in 1..1000 {\n\n let trm = z / ((z+k)*k);\n\n sum += trm;\n\n let old_res = res;\n\n let k2 = (ι(k):V).pow(-2);\n\n let kz2 = (z+k).pow(-2);\n\n res = sum + sf_log((z+k)/k) - trm/2\n\n + b2*(k2 - kz2)\n\n + b4*(k2.pow(2) - kz2.pow(2))\n\n + b6*(k2.pow(3) - kz2.pow(3))\n\n + b8*(k2.pow(4) - kz2.pow(4));\n\n }\n\n res\n\n}\n\n\n", "file_path": "sf/src/gamma/impls.rs", "rank": 26, "score": 221971.35811572103 }, { "content": "pub fn sf_sqrt_recip<V:Roots>(x:V) -> V { x.sqrt_recip() }\n\n\n", "file_path": "sf/src/traits.rs", "rank": 27, "score": 221603.40020354805 }, { "content": "pub fn bessel_j_recur_back_in_order<V:Value>(nu:V, z:V) -> V {\n\n let n = nu.floor();\n\n let nuf = nu - n;\n\n let nx = n + 10;\n\n unimplemented!()\n\n}\n\n*/\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 28, "score": 219905.28809880244 }, { "content": "pub fn bessel_j_series<V: Value + Gamma + Power>(nu:V, z:V) -> V {\n\n let z2 = -(z / 2).sqr();\n\n let terms = (1..).scan(ι(1):V, |s, m| {\n\n *s *= z2 / m / (nu + m);\n\n Some(*s)\n\n });\n\n let terms = std::iter::once(ι(1)).chain(terms);\n\n sum_series(terms, V::epsilon) * (z / 2).pow(nu) / sf_gamma(nu + 1)\n\n}\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 29, "score": 219302.11801973585 }, { "content": "pub fn bessel_i_asymp_z<V:Value+Exp>(nu:V, z:V) -> V {\n\n sf_exp(z) / sf_sqrt(V::PI*2*z) * asymp_all(nu, -z)\n\n}\n\n\n\n\n\n// useful for recurrence normalization:\n\n// \\sum_{i=-\\infty}^\\infty I_{2n+1}(x) = sinh(x)\n\n// \\sum_{i=0}^\\infty I_{2n+1}(x) = sinh(x)/2\n\n//\n\n// \\sum_{i=-\\infty}^\\infty I_{2n}(x) = cosh(x)\n\n// I_0(x)/2 + \\sum_{i=1}^\\infty I_{2n}(x) = cosh(x)/2\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 30, "score": 218779.91851933944 }, { "content": "// for |z|>>nu, |arg z|<pi\n\n// z needs to be fairly large for this to to be accurate\n\n// TODO: separate type for nu and z\n\npub fn bessel_j_asymp_z<V:Value+Trig>(nu:V, z:V) -> V {\n\n let chi = z - (nu / 2 + 0.25) * V::PI;\n\n let mu = nu.sqr() * 4;\n\n (ι(2): V / (V::PI * z)).sqrt() * (asymp_even(nu, z) * sf_cos(chi) - asymp_odd(nu, z) * sf_sin(chi))\n\n}\n", "file_path": "sf/src/bessel/impls.rs", "rank": 31, "score": 218779.91851933944 }, { "content": "pub fn bessel_y_asymp_z<V:Value+Trig>(nu:V, z:V) -> V {\n\n let chi = z - (nu / 2 + 0.25) * V::PI;\n\n let mu = nu.sqr() * 4;\n\n (ι(2): V / (V::PI * z)).sqrt() * (asymp_even(nu, z) * sf_sin(chi) + asymp_odd(nu, z) * sf_cos(chi))\n\n}\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 32, "score": 218779.91851933944 }, { "content": "pub fn bessel_k_asymp_z<V:Value+Exp>(nu:V, z:V) -> V {\n\n sf_sqrt(V::FRAC_PI_2/z) * sf_exp(-z) * asymp_all(nu, z)\n\n}\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 33, "score": 218779.91851933944 }, { "content": "pub fn gamma_spouge<V:Value+Exp+Power>(a: isize, z: V) -> V {\n\n let z = z - 1;\n\n let res : V = (z + a).pow(z + 0.5) * sf_exp(-(z + a));\n\n let mut sm : V = V::SQRT2PI;\n\n let mut fact : V = V::one;\n\n for k in 1..=(a - 1) {\n\n sm += spouge_c(k, ι(a)):V / fact / (z + k);\n\n fact *= ι(-k):V;\n\n }\n\n res * sm\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n//\n\n// Asymptotic expansion\n\n//\n\n\n", "file_path": "sf/src/gamma/impls.rs", "rank": 34, "score": 215556.5863105958 }, { "content": "// for integral order (assumed non-negative)\n\npub fn bessel_j_recur_back<V: Value>(maxm: isize, n: isize, z: V) -> V {\n\n let mut jjp2 = V::zero;\n\n let mut jjp1 = V::one;\n\n let mut scale: V = ι(2);\n\n let mut res = V::zero;\n\n for m in (1..=(maxm - 2)).rev() {\n\n let jjm = -jjp2 + (ι(2): V * m / z) * jjp1;\n\n jjp2 = jjp1;\n\n jjp1 = jjm;\n\n if m == n + 1 {\n\n // desired value, but keep going to get scale-factor\n\n res = jjm;\n\n }\n\n scale += jjm.sqr() * (if m==1 {1} else {2});\n\n if abs(scale) > ι(1e20) {\n\n jjp2 /= 1024;\n\n jjp1 /= 1024;\n\n res /= 1024;\n\n scale /= 1024 * 1024;\n\n }\n\n }\n\n res / scale.sqrt()\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 35, "score": 215506.91159542272 }, { "content": "// for large z with |arg z|<π\n\n// TODO: check domain\n\npub fn digamma_asympt<V:RealValue+Log+Trig>(z:V) -> V {\n\n if z < ι(0.5):V {\n\n return digamma_asympt(V::one - z) - V::PI/sf_tan(V::PI*z);\n\n }\n\n let z_2 = z.sqr().recip();\n\n let mut z2m = V::one;\n\n let mut t = sf_log(z) - (z*2).recip();\n\n let mut sum = t;\n\n for m in 0..1000 {\n\n z2m *= z_2;\n\n let old_t = t;\n\n let bm : V = ι(sf_bernoulli_number_approx((2*m+2) as usize)); // TODO\n\n t = z2m * bm / (2*m+2);\n\n let old_sum = sum;\n\n sum -= t;\n\n if sum==old_sum || abs(t)>abs(old_t) {break;}\n\n }\n\n sum\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "sf/src/gamma/impls.rs", "rank": 36, "score": 215247.93467955347 }, { "content": "// this is 33% faster than the iterator-based implementation above!\n\n// (for the pure power-series part)\n\n// this is unexpected and unfortunate, as we'd prefer the iterator-based\n\n// style for code conciseness\n\npub fn sf_ln_1p_real(x: f64) -> f64 {\n\n if x > 0.25 {\n\n sf_ln_real(1.0 + x)\n\n } else {\n\n let xx = x / (x + 2.0);\n\n let x2 = xx.powi(2);\n\n let mut s = 0.0;\n\n let mut t = 2.0 * xx;\n\n for n in 0..1000 {\n\n let oldv = s;\n\n s += t / ((2 * n + 1) as f64);\n\n if (oldv - s).abs() <= s.abs() * f64::EPSILON {\n\n break;\n\n }\n\n t *= x2;\n\n }\n\n s\n\n }\n\n}\n", "file_path": "sf/src/log.rs", "rank": 37, "score": 213274.17540083558 }, { "content": "// TODO: clean up handling of nu type\n\npub fn bessel_j_series_int<V: Value + Gamma + Power>(nu:isize, z:V) -> V {\n\n let z2 = -(z / 2).sqr();\n\n let terms = (1..).scan(ι(1): V, |s, m| {\n\n *s *= z2 / m / (nu + m);\n\n Some(*s)\n\n });\n\n let terms = std::iter::once(ι(1)).chain(terms);\n\n sum_series(terms, V::epsilon) * (z / 2).pow(nu) / sf_factorial_approx(nu as usize)\n\n}\n", "file_path": "sf/src/bessel/impls.rs", "rank": 38, "score": 213038.62574967934 }, { "content": "pub fn bessel_i_series<V:Value+Gamma+Power>(nu:V, z:V) -> V {\n\n let z2 = (z / 2).sqr();\n\n let terms = (1..).scan(ι(1): V, |s, m| {\n\n *s *= z2 / m / (nu + m);\n\n Some(*s)\n\n });\n\n let terms = std::iter::once(ι(1)).chain(terms);\n\n sum_series(terms, V::epsilon) * (z / 2).pow(nu) / sf_gamma(nu + 1)\n\n}\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 39, "score": 212592.39165955063 }, { "content": "pub trait Ring: Base + Additive + Multiplication {}\n\n\n\nimpl<T: Base + Additive + Multiplication> Ring for T {}\n\n\n", "file_path": "sf/src/traits.rs", "rank": 40, "score": 212346.07819702948 }, { "content": "// TODO: clean up complex (should actually work, just clean up)\n\npub fn digamma<V:RealValue+Float+Log+Trig>(z:V) -> V {\n\n if z.is_nonposint() {\n\n V::infinity\n\n } else if abs(z)<=ι(10):V::NT {\n\n digamma_series(z)\n\n } else {\n\n digamma_asympt(z)\n\n }\n\n}\n\n\n", "file_path": "sf/src/gamma/impls.rs", "rank": 41, "score": 211755.9091850071 }, { "content": "#[inline]\n\npub fn fabs<T: Normed>(x: T) -> f64 { x.fabs() }\n", "file_path": "sf/src/traits.rs", "rank": 42, "score": 211223.3633914414 }, { "content": "#[inline]\n\npub fn sf_ellint_pi<V:EllipticIntegralThird>(c:V, k:V) -> V { k.ellint_pi(c) }\n", "file_path": "sf/src/ellint.rs", "rank": 43, "score": 210554.5064794329 }, { "content": "// for large nu>0\n\npub fn bessel_y_asymp_nu_1<V:Value+Exp+Power>(nu:V, z:V) -> V {\n\n -sf_sqrt(V::FRAC_1_PI*2/nu)*(V::E*z/(nu*2)).pow(-nu)\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 44, "score": 210012.49007227382 }, { "content": "pub trait RealType: Base + Normed + Ordered {\n\n type CT: ComplexValue<RT=Self>;\n\n}\n\n\n", "file_path": "sf/src/traits.rs", "rank": 45, "score": 209434.56557838642 }, { "content": "// quick-and-dirty\n\npub fn erf_inv<V:RealValue+Erf+Exp+Float>(z:V) -> V {\n\n if z<ι(-1) || z>ι(1) { return V::nan; }\n\n else if z == -1 {\n\n return -V::infinity;\n\n } else if z == 1 {\n\n return V::infinity;\n\n } else if z == 0 {\n\n return V::zero;\n\n }\n\n // quick approximation, could do better, but good enough to get the job done\n\n // with Halley (though could be more efficient)\n\n let c = V::FRAC_1_SQRTPI * 2;\n\n let t = z / c;\n\n let t2 = t.sqr();\n\n let mut r = t*(V::one + t2/3*(V::one + t2/10*(ι(7):V + t2*127/21)));\n\n for i in 0..20 {\n\n let o = r;\n\n let f = sf_erf(r) - z;\n\n let df = sf_exp(-r.sqr()) * c;\n\n r -= f / (df + r * f);\n\n if r == o {print!(\"<{}>\",i);break;}\n\n }\n\n r\n\n}\n", "file_path": "sf/src/erf/impls.rs", "rank": 46, "score": 208982.31586718294 }, { "content": "//TODO: domain check\n\npub fn beta_inc_contfrac<V:RealValue+Power+Gamma>(x:V, a:V, b:V) -> V {\n\n if a>ι(1):V && b>ι(1):V && x>(a-1)/(a+b-2) {\n\n return sf_beta(a,b) - beta_inc_contfrac(V::one-x, b, a)\n\n }\n\n let terms = (1..).map(|j|(\n\n if j.is_evenint() {\n\n let n = (j-2)/2;\n\n x * (n+1) * (b-n-1) / (a+2*n+1) / (a+2*n+2)\n\n } else {\n\n let n = (j-1)/2;\n\n -x * (a+n) * (a+b+n) / (a+2*n) / (a+2*n+1)\n\n }, V::one));\n\n let cf = contfrac_modlentz(V::one, terms, V::epsilon);\n\n x.pow(a) * (-x+1).pow(b) / (a*cf)\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n//\n\n// Spouge approximation\n\n//\n", "file_path": "sf/src/gamma/impls.rs", "rank": 47, "score": 206907.80643242667 }, { "content": "pub fn sf_ln_real(x: f64) -> f64 { x.ln() }\n\n\n", "file_path": "sf/src/log.rs", "rank": 48, "score": 206479.9478994253 }, { "content": "pub fn bessel_i_series_int<V:Value+Gamma+Power>(nu:isize, z:V) -> V {\n\n let z2 = (z / 2).sqr();\n\n let terms = (1..).scan(ι(1): V, |s, m| {\n\n *s *= z2 / m / (nu + m);\n\n Some(*s)\n\n });\n\n let terms = std::iter::once(ι(1)).chain(terms);\n\n sum_series(terms, V::epsilon) * (z / 2).pow(nu) / sf_factorial_approx(nu as usize)\n\n}\n", "file_path": "sf/src/bessel/impls.rs", "rank": 49, "score": 206328.8993894941 }, { "content": "pub fn frexp1(x:f64) -> (f64, isize) {\n\n if x.is_zero() || x.is_infinite() || x.is_nan() {\n\n (x, 0)\n\n } else if x.is_subnormal() {\n\n // TODO: subnormals\n\n todo!()\n\n } else {\n\n let b = x.to_bits();\n\n let e = (((b>>52) & 0x7FF) as isize) - 1023;\n\n let m = (b & !(0x7FF<<52)) | (1023<<52);\n\n (f64::from_bits(m), e)\n\n }\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "sf/src/algorithm.rs", "rank": 50, "score": 206045.18596655183 }, { "content": "#[inline]\n\npub fn sf_pow<P,V:Power<P>>(x:V, p:P) -> V { x.pow(p) }\n\n\n", "file_path": "sf/src/traits.rs", "rank": 51, "score": 205247.96175666616 }, { "content": "pub fn gamma_inc_co_contfrac<V:Value+Exp+Log+Power>(a:V, z:V) -> V {\n\n let terms = (1..).map(|j|if j.is_evenint(){(ι(j/2):V,z)}else{(-a+((j+1)/2),V::one)});\n\n let cf = contfrac_modlentz(z, terms, V::epsilon);\n\n // TDOO: this is a very simple-minded check\n\n if abs(z)>ι(100):V::NT || abs(a)>ι(100):V::NT {\n\n sf_exp(a * sf_log(z) - z - sf_log(cf))\n\n } else {\n\n z.pow(a) * sf_exp(-z) / cf\n\n }\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\n// TODO: direct quadrature?\n\n\n", "file_path": "sf/src/gamma/impls.rs", "rank": 52, "score": 204336.57726402848 }, { "content": "pub fn burl_el3_<V>(kc:V, x:V, p:V) -> V\n\n where V:Value+EllipticIntegralThird+Trig\n\n{\n\n sf_ellint_pi_inc(sf_atan(x), -p+1, sf_kc(kc))\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "sf/src/ellint.rs", "rank": 53, "score": 204275.6827919477 }, { "content": "pub fn burl_el3<V>(kc:V, x:V, p:V) -> V\n\n where V:Value+EllipticIntegralSymmetric+Trig\n\n{\n\n let r = x.sqr().recip();\n\n burl_el1(kc, x) - (p-1)/3 * sf_ellint_rj(r, r+kc.sqr(), r+1, r+p)\n\n}\n", "file_path": "sf/src/ellint.rs", "rank": 54, "score": 204275.6827919477 }, { "content": "// basic series\n\npub fn aibi_1<V:Value>(z:V) -> V {\n\n let mut res = V::one;\n\n let mut term = V::one;\n\n let z3 = z*z*z;\n\n for n in 1..1000 {\n\n term *= z3 * (n*3-2) / ((n*3)*(n*3-1)*(n*3-2));\n\n let old_res = res;\n\n res += term;\n\n if res == old_res {break;}\n\n }\n\n res\n\n}\n\n\n", "file_path": "sf/src/airy.rs", "rank": 55, "score": 204208.70020337863 }, { "content": "// basic series\n\npub fn aibi_2<V:Value>(z:V) -> V {\n\n let mut res = z;\n\n let mut term = z;\n\n let z3 = z*z*z;\n\n for n in 1..1000 {\n\n term *= z3 * (n*3-1) / ((n*3+1)*(n*3)*(n*3-1));\n\n let old_res = res;\n\n res += term;\n\n if res == old_res {break;}\n\n }\n\n res\n\n}\n\n\n", "file_path": "sf/src/airy.rs", "rank": 56, "score": 204208.70020337863 }, { "content": "pub fn uv_series<V:Value+Power+Gamma+Exp+Trig,const Ub:bool,const Vb:bool>(a:V, z:V) -> (V,V) {\n\n // TODO: switch to alternate series in bad cases\n\n let e = uv_even(a,z);\n\n let o = uv_odd(a,z);\n\n let u = if !Ub {V::nan} else {u_u0(a) * e + u_du0(a) * o};\n\n let v = if !Vb {V::nan} else {v_v0(a) * e + v_dv0(a) * o};\n\n (u, v)\n\n}\n\n\n", "file_path": "sf/src/pcf.rs", "rank": 57, "score": 202964.91812866108 }, { "content": "pub fn sf_log_1p<V: Log>(x: V) -> V { x.log_1p() }\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\nuse crate::wide::*;\n\nimpl Log for Wide {\n\n #[inline] fn log(self) -> Self { self.log() }\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\npub mod impls {\n\n use crate::algorithm::{contfrac_modlentz, sum_series, sum_series_};\n\n use crate::traits::*;\n\n\n\n #[inline]\n\n pub fn ln1p_power_series_terms<V: Value>(x: V) -> impl Iterator<Item = V> {\n\n let xx = x / (x + 2);\n\n let x2 = xx.sqr();\n\n (0..).scan(xx * 2, move |s, n| {\n", "file_path": "sf/src/log.rs", "rank": 58, "score": 202650.14485689928 }, { "content": "pub fn sf_log<V: Log>(x: V) -> V { x.log() }\n", "file_path": "sf/src/log.rs", "rank": 59, "score": 202650.14485689928 }, { "content": "pub fn ell_pi_incomplete<V>(phi:V, c:V, k:V) -> V\n\n where V:Value+Log+Trig\n\n +EllipticIntegralFirst+EllipticIntegralSecond+EllipticIntegralSymmetric\n\n{\n\n if phi == 0 {\n\n V::zero\n\n } else {\n\n pi_gauss_transform(phi, c, k)\n\n }\n\n}\n\n\n", "file_path": "sf/src/ellint.rs", "rank": 60, "score": 202230.50014658662 }, { "content": "// TODO: transform recursion to iteration\n\npub fn pi_gauss_transform<V>(phi:V, c:V, k:V) -> V\n\n where V:Value+Log+Trig\n\n +EllipticIntegralFirst+EllipticIntegralSecond+EllipticIntegralSymmetric\n\n{\n\n let kp = sf_kc(k);\n\n // TODO: domain check\n\n\n\n if kp == 1 {\n\n let cp = sf_sqrt(ι(1):V - c);\n\n return sf_atan(cp * sf_tan(phi)) / cp;\n\n } else if V::one - (k.sqr()/c) == 0 {\n\n // special case else rho below is zero\n\n return (sf_ellint_e_inc(phi, k)\n\n - c*sf_cos(phi)*sf_sin(phi)/sf_sqrt(ι(1):V-c*sf_sin(phi).sqr()))/(-c+1);\n\n }\n\n\n\n let k1 = (-kp+1)/(kp+1);\n\n let delta = sf_sqrt(ι(1):V - k.sqr()*sf_sin(phi).sqr());\n\n let psi1 = sf_asin((kp+1)*sf_sin(phi)/(delta+1));\n\n let rho = sf_sqrt(V::one - (k.sqr()/c));\n\n let c1 = c*((rho+1)/(kp+1)).sqr();\n\n let xi = sf_csc(phi).sqr();\n\n let newgt = pi_gauss_transform(psi1, c1, k1);\n\n (newgt*4/(kp+1) + (rho-1)*sf_ellint_f(phi,k) - sf_ellint_rc(xi-1, xi-c))/rho\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "sf/src/ellint.rs", "rank": 61, "score": 202230.50014658662 }, { "content": "#[inline]\n\npub fn sf_kc<V:Value>(k:V) -> V {\n\n sf_sqrt(V::one - k.sqr())\n\n}\n\n\n\nuse crate::traits::*;\n\nuse crate::agm::*;\n\nuse crate::real::*;\n\nuse crate::trig::*;\n\n\n\nimpl EllipticIntegralFirst for r64 {\n\n fn ellint_k(self) -> Self {\n\n if (ι(1):r64 - self*self).is_negreal() {\n\n ::log::warn!(\"Domain error EllipticIntegralFirst::<{}>::ellint_k({:e})\", std::any::type_name::<Self>(), self);\n\n r64::nan\n\n } else {\n\n impls::ell_k(self)\n\n }\n\n }\n\n fn ellint_f(self, phi:Self) -> Self {\n\n if !phi.є(-r64::FRAC_PI_2, r64::FRAC_PI_2) {\n", "file_path": "sf/src/ellint.rs", "rank": 62, "score": 201675.6792478172 }, { "content": "// special case when theta==pi/2 with special integral\n\npub fn sievert_pi2<V:Value>(z:V) -> V {\n\n // integrate sf_bessel_k(0, t) for t=z..Infinity\n\n // aka, according to Mathematica:\n\n // -(1/2) \\[Pi] (-1 + z BesselK[0, z] StruveL[-1, z] + z BesselK[1, z] StruveL[0, z])\n\n unimplemented!(\"sievert_pi2({:?})\", z)\n\n}\n\n\n\n// faster convergence for larger z/cos(theta)\n\n// use for theta>pi/4 && z>1\n\n// (otherwise just integrate the definition?!)\n\n/*\n", "file_path": "sf/src/sievert.rs", "rank": 63, "score": 201671.1192667211 }, { "content": "pub fn sinint_series<V:Value>(z:V) -> V {\n\n let mut sum = z;\n\n let mut t = z;\n\n let z2 = -z.sqr();\n\n for n in 1..1000 {\n\n t *= z2/((2*n)*(2*n+1));\n\n let old_sum = sum;\n\n sum += t/(2*n+1);\n\n if sum == old_sum {break;}\n\n }\n\n sum\n\n}\n\n\n", "file_path": "sf/src/expint.rs", "rank": 64, "score": 201671.1192667211 }, { "content": "pub fn dilog_series<V:Value>(z:V) -> V {\n\n let mut t = z;\n\n let mut sum = t;\n\n for n in 2..1000 {\n\n t *= z;\n\n let old = sum;\n\n sum += t/(n*n);\n\n if sum == old {break;}\n\n }\n\n sum\n\n}\n\n\n", "file_path": "sf/src/polylog.rs", "rank": 65, "score": 201671.1192667211 }, { "content": "pub fn tan_contfrac<V:Value>(z:V) -> V {\n\n let z2 = -z.sqr();\n\n let terms = (1..1000).map(|j|(z2,ι(2*j+1):V));\n\n z / contfrac_modlentz(V::one, terms, V::epsilon)\n\n}\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n\n// TODO: quick placeholder impl\n\nuse crate::real::r64;\n\nimpl Trig for r64 {\n\n fn cos(self) -> Self { r64(self.0.cos()) }\n\n fn acos(self) -> Self { r64(self.0.acos()) }\n\n fn sin(self) -> Self { r64(self.0.sin()) }\n\n fn asin(self) -> Self { r64(self.0.asin()) }\n\n fn tan(self) -> Self { r64(self.0.tan()) }\n\n fn atan(self) -> Self { r64(self.0.atan()) }\n\n\n\n fn cosh(self) -> Self { r64(self.0.cosh()) }\n", "file_path": "sf/src/trig.rs", "rank": 66, "score": 201671.1192667211 }, { "content": "// a+bx+cx^2=0\n\n// TODO: more cleanly deal with real/complex cases...\n\n// TODO: do this in an actually robust manner!\n\npub fn solve_quadratic<V:Value>(a:V, b:V, c:V) -> (V,V) {\n\n let (r1, r2);\n\n if c == 0 {\n\n r1 = solve_linear(a, b);\n\n r2 = V::nan;\n\n } else {\n\n let δ = sf_sqrt(b.sqr() - a*c*4);\n\n r1 = (-b + δ)/(c*2);\n\n r2 = (-b - δ)/(c*2);\n\n }\n\n (r1, r2)\n\n}\n\n\n", "file_path": "sf/src/solve.rs", "rank": 67, "score": 200520.46029441158 }, { "content": "pub fn burl_el2<V>(kc:V, x:V, a:V, b:V) -> V\n\n where V:Value+EllipticIntegralSymmetric\n\n{\n\n let r = x.sqr().recip();\n\n a * burl_el1(kc, x) + (b-a)/3 * sf_ellint_rd(r, r+kc.sqr(), r+1)\n\n}\n\n\n", "file_path": "sf/src/ellint.rs", "rank": 68, "score": 200520.46029441158 }, { "content": "// TODO: not great precision from this\n\n// may be from lngamma, perhaps\n\npub fn airy_series<V:Value+AiryConstants>(z:V) -> (V,V) {\n\n let s1 = aibi_1(z);\n\n let s2 = aibi_2(z);\n\n let ai = V::AI_0*s1 + V::DAI_0*s2;\n\n let bi = V::BI_0*s1 + V::DBI_0*s2;\n\n (ai,bi)\n\n}\n\n\n", "file_path": "sf/src/airy.rs", "rank": 69, "score": 199906.61729739478 }, { "content": "pub trait OrthogonalPolynomial<V: Value> {\n\n fn domain(&self) -> (V, V);\n\n fn coeff(&self, n: usize, k: usize) -> V;\n\n fn scale(&self, n: usize) -> V; // and scale_squared?\n\n fn value(&self, n: usize, x: V) -> V;\n\n fn weight(&self, n: usize, k: usize) -> V;\n\n fn zero(&self, n: usize, k: usize) -> V;\n\n fn kernel(&self, x: V) -> V;\n\n\n\n fn coeffs(&self, n: usize) -> Vec<V>;\n\n fn weights(&self, n: usize) -> Vec<V>;\n\n fn zeros(&self, n: usize) -> Vec<V>;\n\n // (also variants for j'th derivative)\n\n\n\n fn poly(&self, n: usize) -> Poly<V>;\n\n\n\n // TODO: maybe return more information...\n\n //fn integrate<F:Fn(V)->V>(&self, n:usize, f:F) -> V;\n\n // TODO: maybe do this kind of thing instead?\n\n //fn integrator(&self, n:usize) -> impl Integrator<V>;\n\n}\n\n\n\n/*\n", "file_path": "sf/src/orthopoly/mod.rs", "rank": 70, "score": 199583.34414041575 }, { "content": "pub fn impl_vec<V:Value+Trig>(a:V, b:V, c0:V, extra:Option<V>) -> (Vec<V>,Vec<V>,Vec<V>,Option<Vec<V>>) {\n\n if a.is_zero() || b.is_zero() { todo!(); }\n\n let (a_,b_) = (a,b);\n\n // TODO: be smarter with vectors...\n\n // maybe cleaner return value\n\n let mut va = Vec::new();\n\n let mut vb = Vec::new();\n\n let (mut a, mut b) = (a, b);\n\n for i in 1..1000 {\n\n va.push(a);\n\n vb.push(b);\n\n let a0 = a;\n\n let b0 = b;\n\n a = (a0 + b0) / 2;\n\n b = sf_sqrt(a0 * b0);\n\n if a==b || (a==a0 && b==b0) {\n\n ::log::debug!(\"impl_vec::<{}>({},{},..) converged in {} iterations\", std::any::type_name::<V>(), a_, b_, i);\n\n break;\n\n }\n\n }\n", "file_path": "sf/src/agm.rs", "rank": 71, "score": 199562.09839790984 }, { "content": "// a+bx=0\n\npub fn solve_linear<V:Value>(a:V, b:V) -> V {\n\n -a/b\n\n}\n\n\n", "file_path": "sf/src/solve.rs", "rank": 72, "score": 198322.76505634963 }, { "content": "pub fn sf_agm<V:AGM>(a:V, b:V) -> V {\n\n a.agm(b)\n\n}\n", "file_path": "sf/src/agm.rs", "rank": 73, "score": 198322.76505634963 }, { "content": "pub fn airy_series__combined<V:Value+AiryConstants>(z:V) -> (V,V) {\n\n let z3 = z*z*z;\n\n let mut term_1 = V::one;\n\n let mut term_2 = z;\n\n let mut ai = V::AI_0 + V::DAI_0*z;\n\n let mut bi = V::BI_0 + V::DBI_0*z;\n\n for n in 1..1000 {\n\n let old_ai = ai;\n\n let old_bi = bi;\n\n term_1 *= z3 * (n*3-2) / ((n*3)*(n*3-1)*(n*3-2));\n\n term_2 *= z3 * (n*3-1) / ((n*3+1)*(n*3)*(n*3-1));\n\n ai += V::AI_0*term_1 + V::DAI_0*term_2;\n\n bi += V::BI_0*term_1 + V::DBI_0*term_2;\n\n if ai == old_ai && bi == old_bi {break;}\n\n }\n\n (ai,bi)\n\n}\n\n\n", "file_path": "sf/src/airy.rs", "rank": 74, "score": 197883.11534449388 }, { "content": "pub fn bessel_k_series_int<V:Value+BesselI<isize>+Gamma+Log>(n:isize, z:V) -> V {\n\n let z22 = (z/2).sqr();\n\n let mut sum = sf_log(z/2) * sf_bessel_i(n,z).pari(n+1);\n\n let mut t = if n==0 {V::one/2} else {(z/2).pow(-n)/2 * sf_factorial_approx((n-1) as usize)};\n\n for k in 0..n {\n\n sum += t;\n\n t *= -z22 / (k+1) / (n-k-1);\n\n }\n\n let mut t = (z/2).pow(n).pari(n)/2;\n\n for k in 0..1000 {\n\n let old_sum = sum;\n\n sum += t * (sf_digamma(ι(k+1):V) + sf_digamma(ι(n+k+1):V));\n\n if sum != sum {break;}\n\n if old_sum == sum {break;}\n\n t *= z22 / (k+1) / (n+k+1);\n\n }\n\n sum\n\n}\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 75, "score": 197633.80603450435 }, { "content": "pub fn ell_k<V:Value+AGM>(k:V) -> V {\n\n let a = sf_agm(ι(1), sf_kc(k));\n\n V::PI / (a*2)\n\n}\n\n\n\n////////////////////////////////////////\n\n\n", "file_path": "sf/src/ellint.rs", "rank": 76, "score": 197472.65636983298 }, { "content": "pub fn ell_e<V:Value+AGM>(k:V) -> V {\n\n // TODO: domain\n\n let (va,_,vc) = sf_agm_vec(V::one, sf_kc(k), k);\n\n let n = vc.len();\n\n let mut res : V = -k.sqr() + 2;\n\n for i in 1..n {\n\n res -= vc[i].sqr() << (i as isize);\n\n }\n\n res * V::PI / (va[n-1]*4)\n\n}\n\n\n", "file_path": "sf/src/ellint.rs", "rank": 77, "score": 197472.65636983298 }, { "content": "#[inline]\n\npub fn sf_ellint_pi_inc<V:EllipticIntegralThird>(phi:V, c:V, k:V) -> V { k.ellint_pi_inc(c, phi) }\n\n\n", "file_path": "sf/src/ellint.rs", "rank": 78, "score": 196547.72018798598 }, { "content": "#[inline]\n\nfn qtsum(a: f64, b: f64) -> (f64, f64) {\n\n let s = a + b;\n\n let e = b + (a - s); // = b-(s-a)\n\n (s, e)\n\n}\n\n\n\n// general\n", "file_path": "sf/src/wide.rs", "rank": 79, "score": 196091.60012334064 }, { "content": "#[inline]\n\nfn ddsum(a: f64, b: f64) -> (f64, f64) {\n\n let s = a + b;\n\n let v = s - a;\n\n let e = (a + (v - s)) + (b - v); // = (a-(s-v))+(b-v)\n\n (s, e)\n\n}\n\n\n", "file_path": "sf/src/wide.rs", "rank": 80, "score": 196091.60012334064 }, { "content": "#[inline]\n\nfn ddprod(a: f64, b: f64) -> (f64, f64) {\n\n/*\n\n let (ahi, alo) = split(a);\n\n let (bhi, blo) = split(b);\n\n let p = a * b;\n\n let e = (((ahi * bhi - p) + ahi * blo) + alo * bhi) + alo * blo;\n\n (p, e)\n\n*/\n\n let p = a * b;\n\n let e = a.mul_add(b, -p);\n\n (p, e)\n\n}\n\n\n", "file_path": "sf/src/wide.rs", "rank": 81, "score": 196091.60012334064 }, { "content": "pub fn sf_bern2(n: usize) -> f64 {\n\n if n == 0 {\n\n 1.0\n\n } else if n == 1 {\n\n -0.5\n\n } else if n % 2 == 1 {\n\n 0.0\n\n } else {\n\n (((r64::PI * 2).pow(-(n as isize)) * 2 * sf_factorial_approx(n) * sf_zeta_approx(n)).pari((1 + n / 2) as isize)).0\n\n }\n\n}\n", "file_path": "sf/src/numbers.rs", "rank": 82, "score": 195199.44870593824 }, { "content": "pub fn bessel_y_series_int<V:Value+BesselJ<isize>+Gamma+Log>(n:isize, z:V) -> V {\n\n let z22 = (z/2).sqr();\n\n let mut sum = V::FRAC_1_PI * 2 * sf_log(z/2) * sf_bessel_j(n,z);\n\n let mut t = -(z/2).pow(-n) * V::FRAC_1_PI * sf_factorial_approx((n-1) as usize);\n\n for k in 0..n {\n\n sum += t;\n\n t *= z22 / (k+1) / (n-k-1);\n\n }\n\n let mut t = -(z/2).pow(n) * V::FRAC_1_PI;\n\n for k in 0..1000 {\n\n let old_sum = sum;\n\n sum += t * (sf_digamma(ι(k+1):V) + sf_digamma(ι(n+k+1):V));\n\n if sum != sum {break;}\n\n if old_sum == sum {break;}\n\n t *= -z22 / (k+1) / (n+k+1);\n\n }\n\n sum\n\n}\n\n\n", "file_path": "sf/src/bessel/impls.rs", "rank": 83, "score": 195197.50573275148 }, { "content": "// TODO: use Kahan (use Wide value of constant to start?)\n\npub fn cosint_series<V:Value+Log>(z:V) -> V {\n\n let mut sum = V::EULER_GAMMA + sf_log(z);\n\n let mut t = V::one;\n\n let z2 = -z.sqr();\n\n for n in 1..1000 {\n\n t *= z2/((2*n-1)*(2*n));\n\n let old_sum = sum;\n\n sum += t/(2*n);\n\n if sum == old_sum {break;}\n\n }\n\n sum\n\n}\n\n\n\nuse crate::algorithm::{contfrac_modlentz};\n", "file_path": "sf/src/expint.rs", "rank": 84, "score": 195060.2283593043 }, { "content": "// continued fraction for E_1(z)\n\npub fn e1_contfrac<V:Value+Exp>(z:V) -> V {\n\n let terms = (1..).map(|n|(ι((n+1)/2):V, if n.is_evenint(){z}else{V::one}));\n\n sf_exp(-z)/contfrac_modlentz(z, terms, V::epsilon)\n\n}\n\n\n", "file_path": "sf/src/expint.rs", "rank": 85, "score": 195059.9080955917 }, { "content": "pub fn expint_en_1<V:Value+Log>(z:V) -> V {\n\n let mut sum = -V::EULER_GAMMA - sf_log(z);\n\n let mut term = -V::one;\n\n for k in 1..1000 {\n\n term *= -z/k;\n\n let old_sum = sum;\n\n sum += term/k;\n\n if sum == old_sum {break;}\n\n }\n\n sum\n\n}\n\n\n", "file_path": "sf/src/expint.rs", "rank": 86, "score": 195055.3182814225 }, { "content": "pub fn k1<V:Value+Exp>(z:V) -> V {\n\n let iz = z.recip();\n\n V::PI/2 * sf_exp(-z) * (iz + 1)*iz\n\n}\n\n\n", "file_path": "sf/src/bessel/spher.rs", "rank": 87, "score": 195055.3182814225 }, { "content": "pub fn i2_0<V:Value+Trig>(z:V) -> V {\n\n sf_cosh(z)/z\n\n}\n\n\n", "file_path": "sf/src/bessel/spher.rs", "rank": 88, "score": 195055.3182814225 }, { "content": "pub fn y1<V:Value+Trig>(z:V) -> V {\n\n -(sf_cos(z)/z + sf_sin(z))/z\n\n}\n\n\n", "file_path": "sf/src/bessel/spher.rs", "rank": 89, "score": 195055.3182814225 }, { "content": "pub fn y0<V:Value+Trig>(z:V) -> V {\n\n -sf_cos(z)/z\n\n}\n\n\n", "file_path": "sf/src/bessel/spher.rs", "rank": 90, "score": 195055.3182814225 }, { "content": "pub fn expint_en_0<V:Value+Exp>(z:V) -> V {\n\n sf_exp(-z)/z\n\n}\n\n\n", "file_path": "sf/src/expint.rs", "rank": 91, "score": 195055.3182814225 }, { "content": "pub fn i2_1<V:Value+Trig>(z:V) -> V {\n\n let iz = z.recip();\n\n (-sf_cosh(z)*iz + sf_sinh(z))*iz\n\n}\n\n\n", "file_path": "sf/src/bessel/spher.rs", "rank": 92, "score": 195055.3182814225 }, { "content": "pub fn cosint_asympt_g<V:Value+Normed>(z:V) -> V {\n\n let mut t = V::one;\n\n let mut sum = V::one;\n\n let z2 = -z.sqr().recip();\n\n for n in 1..1000 {\n\n let old_t = t;\n\n t *= z2*(2*n)*(2*n+1);\n\n if abs(t) > abs(old_t) {break;}\n\n let old_sum = sum;\n\n sum += t;\n\n if old_sum == sum {break;}\n\n }\n\n sum / z.sqr()\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "sf/src/expint.rs", "rank": 93, "score": 195055.3182814225 }, { "content": "pub fn k0<V:Value+Exp>(z:V) -> V {\n\n V::PI/2 * sf_exp(-z) / z\n\n}\n\n\n", "file_path": "sf/src/bessel/spher.rs", "rank": 94, "score": 195055.3182814225 }, { "content": "pub fn cosint_asympt_f<V:Value+Normed>(z:V) -> V {\n\n let mut t = V::one;\n\n let mut sum = V::one;\n\n let z2 = -z.sqr().recip();\n\n for n in 1..1000 {\n\n let old_t = t;\n\n t *= z2*(2*n)*(2*n-1);\n\n if abs(t) > abs(old_t) {break;}\n\n let old_sum = sum;\n\n sum += t;\n\n if old_sum == sum {break;}\n\n }\n\n sum / z\n\n}\n\n\n", "file_path": "sf/src/expint.rs", "rank": 95, "score": 195055.3182814225 }, { "content": "pub fn i1_0<V:Value+Trig>(z:V) -> V {\n\n // TODO: test whether we should do series expansion\n\n // for z~0\n\n // if abs(z)*2 < V::NT::one {\n\n // algorithms::cum_prods_1((1..25).map(|i|z.sqr()/((2*i)*(2*i+1))).sum()\n\n // }\n\n sf_sinh(z)/z\n\n}\n\n\n", "file_path": "sf/src/bessel/spher.rs", "rank": 96, "score": 195055.3182814225 }, { "content": "pub fn sf_exp_m1_real<V:RealValue>(x:V) -> V {\n\n if x.dabs() < 0.70 {\n\n exp__powser(x, V::zero())\n\n } else {\n\n sf_exp_real(x) - V::one()\n\n }\n\n}\n\n*/\n\n\n\n\n\n\n\n\n", "file_path": "sf/src/exp.rs", "rank": 97, "score": 194745.21244220418 }, { "content": "// a+bx+cx^2+dx^3=0\n\n// TODO: check for repeated roots, make robust, etc.\n\n// TODO: deal more cleanly with real / complex cases\n\npub fn solve_cubic<V:Value+Trig>(a:V, b:V, c:V, d:V) -> (V,V,V) {\n\n let (r1, r2, r3);\n\n if d == 0 {\n\n (r1, r2) = solve_quadratic(a, b, c);\n\n r3 = V::nan;\n\n } else {\n\n // get equivalent \"depressed\" cubic: t^3+pt+q=0\n\n let p = (b*d*3 - c.sqr())/(d.sqr()*3);\n\n let q = (c.cub()*2 - b*c*d*9 + a*d.sqr()*27)/(d.cub()*27);\n\n // TODO: handle case when p==0 (cube-root * roots-of-unity)\n\n // trigonometric approach\n\n let t = sf_sqrt(-p*4/3);\n\n let α = sf_acos(-q*4/t.cub());\n\n let c3d = -c/(d*3);\n\n r1 = c3d + t*sf_cos(α/3);\n\n r2 = c3d + t*sf_cos(α/3 + V::PI*2/3);\n\n r3 = c3d + t*sf_cos(α/3 + V::PI*4/3);\n\n }\n\n // TODO: maybe a step of Newton to \"polish\"?\n\n //let polish = |x|{x - (((x*d+c)*x+b)*x+a)/((x*d*3+c*2)*x+b)};\n\n //let (r1, r2, r3) = (polish(r1), polish(r2), polish(r3));\n\n (r1, r2, r3)\n\n}\n\n\n", "file_path": "sf/src/solve.rs", "rank": 98, "score": 194195.6593416701 }, { "content": "pub fn sf_hypergeom_1f0<N,V>(a0:N, z:V) -> V\n\nwhere V:Hypergeometric1F0<N> {\n\n z.hypergeom_1f0(a0)\n\n}\n\n\n", "file_path": "sf/src/hypergeom.rs", "rank": 99, "score": 193075.4418372361 } ]
Rust
operators/src/processing/circle_merging_quadtree/grid.rs
koerberm/geoengine
61e0ec7a0c1136b4360b0f9c6306c34198e8ac3a
use geoengine_datatypes::primitives::{AxisAlignedRectangle, BoundingBox2D, Coordinate2D}; use super::{ circle_of_points::CircleOfPoints, circle_radius_model::CircleRadiusModel, hash_map::SeparateChainingHashMap, }; #[derive(Clone, Debug)] pub struct Grid<C: CircleRadiusModel> { offset: Coordinate2D, cell_width: f64, number_of_horizontal_cells: usize, cells: SeparateChainingHashMap<u16, CircleOfPoints>, radius_model: C, } impl<C: CircleRadiusModel> Grid<C> { pub fn new(bbox: BoundingBox2D, radius_model: C) -> Self { let cell_width = (2. * radius_model.min_radius() + radius_model.delta()) / std::f64::consts::SQRT_2; let map_width = bbox.size_x(); let map_height = bbox.size_y(); let mut number_of_horizontal_cells = (map_width / cell_width).ceil() as usize; let number_of_vertical_cells = (map_height / cell_width).ceil() as usize; if (number_of_horizontal_cells * number_of_vertical_cells) > 256 * 256 { number_of_horizontal_cells = number_of_horizontal_cells.max(256); } let offset_x = (bbox.lower_left().x / cell_width).floor() * cell_width; let offset_y = (bbox.lower_left().y / cell_width).floor() * cell_width; Self { offset: Coordinate2D { x: offset_x, y: offset_y, }, cell_width, number_of_horizontal_cells, cells: SeparateChainingHashMap::new(), radius_model, } } pub fn insert(&mut self, circle_of_points: CircleOfPoints) { let grid_x = ((circle_of_points.circle.x() - self.offset.x) / self.cell_width) as usize; let grid_y = ((circle_of_points.circle.y() - self.offset.x) / self.cell_width) as usize; let grid_pos = grid_y * self.number_of_horizontal_cells + grid_x; match self.cells.entry(grid_pos as u16) { super::hash_map::ValueRef::Vacant(entry_pos) => { self.cells.insert_unchecked(entry_pos, circle_of_points); } super::hash_map::ValueRef::Occupied(matched_circle_of_points) => { matched_circle_of_points.merge(&circle_of_points, &self.radius_model); } } } pub fn drain(self) -> impl Iterator<Item = CircleOfPoints> { self.cells.into_iter() } pub fn radius_model(&self) -> &C { &self.radius_model } } #[cfg(test)] mod tests { use geoengine_datatypes::primitives::{Circle, TimeInterval}; use crate::processing::circle_merging_quadtree::circle_radius_model::LogScaledRadius; use super::*; #[test] fn test_grid() { let mut grid = Grid::new( BoundingBox2D::new((0., 0.).into(), (10., 10.).into()).unwrap(), LogScaledRadius::new(2., 1.).unwrap(), ); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(1., 1., 1.), TimeInterval::default(), Default::default(), )); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(2., 1., 1.), TimeInterval::default(), Default::default(), )); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(6., 6., 1.), TimeInterval::default(), Default::default(), )); assert_eq!( grid.drain().collect::<Vec<_>>(), vec![ CircleOfPoints::new( Circle::new(1.5, 1., 2.693_147_180_559_945_4), 2, TimeInterval::default(), Default::default(), ) .unwrap(), CircleOfPoints::new_with_one_point( Circle::new(6., 6., 1.), TimeInterval::default(), Default::default(), ), ] ); } }
use geoengine_datatypes::primitives::{AxisAlignedRectangle, BoundingBox2D, Coordinate2D}; use super::{ circle_of_points::CircleOfPoints, circle_radius_model::CircleRadiusModel, hash_map::SeparateChainingHashMap, }; #[derive(Clone, Debug)] pub struct Grid<C: CircleRadiusModel> { offset: Coordinate2D, cell_width: f64, number_of_horizontal_cells: usize, cells: SeparateChainingHashMap<u16, CircleOfPoints>, radius_model: C, } impl<C: CircleRadiusModel> Grid<C> { pub fn new(bbox: BoundingBox2D, radius_model: C) -> Self { let cell_width = (2. * radius_model.min_radius() + radius_model.delta()) / std::f64::consts::SQRT_2; let map_width = bbox.size_x(); let map_height = bbox.size_y(); let mut number_of_horizontal_cells = (map_width / cell_width).ceil() as usize; let number_of_vertical_cells = (map_height / cell_width).ceil() as usize; if (number_of_horizontal_cells * number_of_vertical_cells) > 256 * 256 { number_of_horizontal_cells = number_of_horizontal_cells.max(256); } let offset_x = (bbox.lower_left().x / cell_width).floor() * cell_width; let offset_y = (bbox.lower_left().y / cell_width).floor() * cell_width; Self { offset: Coordinate2D { x: offset_x, y: offset_y, }, cell_width, number_of_horizontal_cells, cells: SeparateChainingHashMap::new(), radius_model, } } pub fn insert(&mut self, circle_of_points: CircleOfPoints) { let grid_x = ((circle_of_points.circle.x() - self.offset.x) / self.cell_width) as usize; let grid_y = ((circle_of_points.circle.y() - self.offset.x) / self.cell_width) as usize; let grid_pos = grid_y * self.number_of_horizontal_cells + grid_x; match self.cells.entry(grid_pos as u16) { super::hash_map::ValueRef::Vacant(entry_pos) => { self.cells.insert_unchecked(entry_pos, circle_of_points); } super::hash_map::ValueRef::Occupied(matched_circle_of_points) => { matched_circle_of_points.merge(&circle_of_points, &self.radius_model); } } } pub fn drain(self) -> impl Iterator<Item = CircleOfPoints> { self.cells.into_iter() } pub fn radius_model(&self) -> &C { &self.radius_model } } #[cfg(test)] mod tests { use geoengine_datatypes::primitives::{Circle, TimeInterval}; use crate::processing::circle_merging_quadtree::circle_radius_model::LogScaledRadius; use super::*; #[test]
}
fn test_grid() { let mut grid = Grid::new( BoundingBox2D::new((0., 0.).into(), (10., 10.).into()).unwrap(), LogScaledRadius::new(2., 1.).unwrap(), ); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(1., 1., 1.), TimeInterval::default(), Default::default(), )); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(2., 1., 1.), TimeInterval::default(), Default::default(), )); grid.insert(CircleOfPoints::new_with_one_point( Circle::new(6., 6., 1.), TimeInterval::default(), Default::default(), )); assert_eq!( grid.drain().collect::<Vec<_>>(), vec![ CircleOfPoints::new( Circle::new(1.5, 1., 2.693_147_180_559_945_4), 2, TimeInterval::default(), Default::default(), ) .unwrap(), CircleOfPoints::new_with_one_point( Circle::new(6., 6., 1.), TimeInterval::default(), Default::default(), ), ] ); }
function_block-full_function
[ { "content": "pub fn fn_stream() -> impl Stream<Item = usize> {\n\n let mut counter: usize = 2;\n\n\n\n stream::poll_fn(move |_| -> Poll<Option<usize>> {\n\n if counter == 0 {\n\n return Poll::Ready(None);\n\n }\n\n counter -= 1;\n\n Poll::Ready(Some(counter))\n\n })\n\n}\n\n\n", "file_path": "operators/tests/streams.rs", "rank": 0, "score": 332833.97047263675 }, { "content": "/// Initialize a basic logger within tests.\n\n/// You should only use this for debugging.\n\n///\n\n/// # Panics\n\n/// This function will panic if the logger cannot be initialized.\n\n///\n\npub fn initialize_debugging_in_test() {\n\n Logger::try_with_str(\"debug\").unwrap().start().unwrap();\n\n}\n\n\n", "file_path": "services/src/util/tests.rs", "rank": 1, "score": 281196.2731216136 }, { "content": "/// snap `value` to next `step` multiple from `start`\n\npub fn snap_next(start: f64, step: f64, value: f64) -> f64 {\n\n start + ((value - start) / step).ceil() * step\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashMap;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn assert_approx_eq_for_floats() {\n\n assert_approx_eq!(&[1., 2., 3.], &[1., 2., 3.]);\n\n\n\n assert!(!approx_eq_floats(&[1., 2.], &[1., 2., 3.]));\n\n }\n\n\n\n #[test]\n\n #[allow(clippy::float_cmp)]\n\n fn it_snaps_right() {\n", "file_path": "datatypes/src/util/helpers.rs", "rank": 2, "score": 244277.97065615526 }, { "content": "/// snap `value` to previous `step` multiple from `start`\n\npub fn snap_prev(start: f64, step: f64, value: f64) -> f64 {\n\n start + ((value - start) / step).floor() * step\n\n}\n\n\n", "file_path": "datatypes/src/util/helpers.rs", "rank": 3, "score": 244277.97065615526 }, { "content": "#[test]\n\n#[allow(clippy::float_cmp)]\n\nfn offset() {\n\n let array = {\n\n let mut array_builder = Float64Builder::new(5);\n\n array_builder\n\n .append_slice(&[2e10, 4e40, 20., 9.4, 0.])\n\n .unwrap();\n\n array_builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 5);\n\n assert_eq!(array.offset(), 0);\n\n\n\n let subarray = array.slice(2, 2);\n\n let typed_subarray: &Float64Array = subarray.as_any().downcast_ref().unwrap();\n\n\n\n assert_eq!(subarray.len(), 2);\n\n assert_eq!(subarray.offset(), 2);\n\n assert_eq!(typed_subarray.values().len(), 2);\n\n\n\n assert_eq!(typed_subarray.values(), &[20., 9.4]);\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 4, "score": 238010.66228706267 }, { "content": "#[must_use]\n\npub fn approx_eq_floats(left: &[f64], right: &[f64]) -> bool {\n\n if left.len() != right.len() {\n\n return false;\n\n }\n\n\n\n for (&l, &r) in left.iter().zip(right) {\n\n if !float_cmp::approx_eq!(f64, l, r) {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n\n/// Create hash maps by specifying key-value pairs\n\n#[macro_export]\n\nmacro_rules! hashmap {\n\n (@void $($x:tt)*) => (());\n\n (@count $($tts:expr),*) => (<[()]>::len(&[$(hashmap!(@void $tts)),*]));\n\n\n", "file_path": "datatypes/src/util/helpers.rs", "rank": 5, "score": 233825.17051836458 }, { "content": "pub fn fold_fn<T, C>(\n\n acc: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> TemporalRasterAggregationTileAccu<T>\n\nwhere\n\n T: Pixel,\n\n C: AccFunction,\n\n{\n\n let mut accu_tile = acc.accu_tile;\n\n\n\n let grid = if acc.initial_state {\n\n tile.grid_array\n\n } else {\n\n match (accu_tile.grid_array, tile.grid_array) {\n\n (GridOrEmpty::Grid(mut a), GridOrEmpty::Grid(g)) => {\n\n a.data = a\n\n .inner_ref()\n\n .iter()\n\n .zip(g.inner_ref())\n\n .map(|(x, y)| C::acc(a.no_data_value, *x, *y))\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 6, "score": 230882.79674745534 }, { "content": "pub fn no_data_ignoring_fold_fn<T, C>(\n\n acc: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> TemporalRasterAggregationTileAccu<T>\n\nwhere\n\n T: Pixel,\n\n C: NoDataIgnoringAccFunction,\n\n{\n\n let TemporalRasterAggregationTileAccu {\n\n mut accu_tile,\n\n initial_state: _initial_state,\n\n pool,\n\n } = acc;\n\n\n\n let grid = match (accu_tile.grid_array, tile.grid_array) {\n\n (GridOrEmpty::Grid(mut a), GridOrEmpty::Grid(g)) => {\n\n a.data = a\n\n .inner_ref()\n\n .iter()\n\n .zip(g.inner_ref())\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 7, "score": 226374.5879234085 }, { "content": "/// Helper function to downcast a mutable arrow array from a builder\n\n///\n\n/// The caller must be sure of its type, otherwise it panics\n\n/// # Panics\n\n/// Panics if `array` is not of type `T`\n\n///\n\npub fn downcast_mut_array<T: Any>(array: &mut dyn ArrayBuilder) -> &mut T {\n\n array.as_any_mut().downcast_mut().unwrap() // must obey type\n\n}\n\n\n", "file_path": "datatypes/src/util/arrow.rs", "rank": 8, "score": 222577.39202421316 }, { "content": "pub fn fold_future<T, C>(\n\n accu: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalRasterAggregationTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n C: AccFunction,\n\n{\n\n tokio::task::spawn_blocking(|| fold_fn::<T, C>(accu, tile)).then(|x| async move {\n\n match x {\n\n Ok(r) => Ok(r),\n\n Err(e) => Err(e.into()),\n\n }\n\n })\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 9, "score": 216902.6770307434 }, { "content": "/// Create an `arrow` struct from column meta data and data\n\npub fn struct_array_from_data(\n\n columns: Vec<Field>,\n\n column_values: Vec<ArrayRef>,\n\n number_of_features: usize,\n\n) -> Result<StructArray> {\n\n Ok(StructArray::from(\n\n ArrayData::builder(arrow::datatypes::DataType::Struct(columns))\n\n .child_data(\n\n column_values\n\n .into_iter()\n\n .map(|a| a.data().clone())\n\n .collect(),\n\n )\n\n .len(number_of_features)\n\n .build()?,\n\n ))\n\n}\n\n\n", "file_path": "datatypes/src/collections/feature_collection.rs", "rank": 10, "score": 214526.5351864032 }, { "content": "pub fn no_data_ignoring_fold_future<T, C>(\n\n accu: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalRasterAggregationTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n C: NoDataIgnoringAccFunction,\n\n{\n\n tokio::task::spawn_blocking(|| no_data_ignoring_fold_fn::<T, C>(accu, tile)).then(\n\n |x| async move {\n\n match x {\n\n Ok(r) => Ok(r),\n\n Err(e) => Err(e.into()),\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 11, "score": 212223.46420671756 }, { "content": "// TODO: move test helper somewhere else?\n\npub fn add_ndvi_dataset(ctx: &mut MockExecutionContext) -> DatasetId {\n\n let id: DatasetId = InternalDatasetId::new().into();\n\n ctx.add_meta_data(id.clone(), Box::new(create_ndvi_meta_data()));\n\n id\n\n}\n\n\n", "file_path": "operators/src/util/gdal.rs", "rank": 12, "score": 208746.10332273276 }, { "content": "fn create_lookup_table(channel: &Channel, offset: f64, slope: f64, _pool: &ThreadPool) -> Vec<f32> {\n\n // this should propably be done with SIMD not a threadpool\n\n (0..1024)\n\n .into_iter()\n\n .map(|i| {\n\n let radiance = offset + f64::from(i) * slope;\n\n channel.calculate_temperature_from_radiance(radiance) as f32\n\n })\n\n .collect::<Vec<f32>>()\n\n}\n\n\n", "file_path": "operators/src/processing/meteosat/temperature.rs", "rank": 13, "score": 208030.5477633497 }, { "content": "#[allow(clippy::float_cmp)] // allow since NO DATA is a specific value\n\nfn process_raster(number_statistics: &mut NumberStatistics, tile_grid: &Grid2D<f64>) {\n\n let no_data_value = tile_grid.no_data_value();\n\n\n\n if let Some(no_data_value) = no_data_value {\n\n for &value in &tile_grid.data {\n\n if value == no_data_value {\n\n number_statistics.add_no_data();\n\n } else {\n\n number_statistics.add(value);\n\n }\n\n }\n\n } else {\n\n for &value in &tile_grid.data {\n\n number_statistics.add(value);\n\n }\n\n }\n\n}\n\n\n\n/// The statistics summary output type for each raster input\n", "file_path": "operators/src/plot/statistics.rs", "rank": 14, "score": 204269.00150613167 }, { "content": "pub fn fold_by_blit_impl<T>(\n\n accu: RasterTileAccu2D<T>,\n\n tile: RasterTile2D<T>,\n\n) -> Result<RasterTileAccu2D<T>>\n\nwhere\n\n T: Pixel,\n\n{\n\n let mut accu_tile = accu.tile;\n\n let pool = accu.pool;\n\n let t_union = accu_tile.time.union(&tile.time)?;\n\n\n\n accu_tile.time = t_union;\n\n\n\n if tile.grid_array.is_empty() && accu_tile.no_data_value() == tile.no_data_value() {\n\n return Ok(RasterTileAccu2D::new(accu_tile, pool));\n\n }\n\n\n\n let mut materialized_accu_tile = accu_tile.into_materialized_tile();\n\n\n\n let blit_tile = match materialized_accu_tile.blit(tile) {\n", "file_path": "operators/src/adapters/raster_subquery/raster_subquery_adapter.rs", "rank": 15, "score": 197824.4528915851 }, { "content": "pub fn create_random_user_session_helper() -> UserSession {\n\n let user_id = UserId::new();\n\n\n\n UserSession {\n\n id: SessionId::new(),\n\n user: UserInfo {\n\n id: user_id,\n\n email: Some(user_id.to_string()),\n\n real_name: Some(user_id.to_string()),\n\n },\n\n created: MIN_DATETIME,\n\n valid_until: MAX_DATETIME,\n\n project: None,\n\n view: None,\n\n roles: vec![user_id.into(), Role::user_role_id()],\n\n }\n\n}\n\n\n\n#[allow(clippy::missing_panics_doc)]\n\npub async fn create_project_helper<C: ProContext>(ctx: &C) -> (UserSession, ProjectId) {\n", "file_path": "services/src/pro/util/tests.rs", "rank": 16, "score": 197741.32549697792 }, { "content": "#[allow(dead_code)]\n\n#[allow(clippy::type_complexity)]\n\n#[allow(clippy::needless_pass_by_value)]\n\npub fn fold_by_coordinate_lookup_impl<T>(\n\n accu: TileWithProjectionCoordinates<T>,\n\n tile: RasterTile2D<T>,\n\n) -> Result<TileWithProjectionCoordinates<T>>\n\nwhere\n\n T: Pixel,\n\n{\n\n const MIN_ELEMENTS_IN_PAR_CHUNK: usize = 32 * 512; // this must never be smaller than 1\n\n let min_rows_in_par_chunk =\n\n num::integer::div_ceil(MIN_ELEMENTS_IN_PAR_CHUNK, tile.grid_array.axis_size_x()).max(1);\n\n\n\n // println!(\"fold_by_coordinate_lookup_impl {:?}\", &tile.tile_position);\n\n let mut accu = accu;\n\n let t_union = accu.accu_tile.time.union(&tile.time)?;\n\n\n\n accu.tile_mut().time = t_union;\n\n\n\n if tile.grid_array.is_empty() {\n\n return Ok(accu);\n\n }\n", "file_path": "operators/src/adapters/raster_subquery/raster_subquery_reprojection.rs", "rank": 17, "score": 195150.32187216653 }, { "content": "pub fn create_feature_aggregator<P: Pixel>(\n\n number_of_features: usize,\n\n aggregation: FeatureAggregationMethod,\n\n) -> TypedAggregator {\n\n match aggregation {\n\n FeatureAggregationMethod::First => match P::TYPE {\n\n RasterDataType::U8\n\n | RasterDataType::U16\n\n | RasterDataType::U32\n\n | RasterDataType::U64\n\n | RasterDataType::I8\n\n | RasterDataType::I16\n\n | RasterDataType::I32\n\n | RasterDataType::I64 => FirstValueIntAggregator::new(number_of_features).into_typed(),\n\n RasterDataType::F32 | RasterDataType::F64 => {\n\n FirstValueFloatAggregator::new(number_of_features).into_typed()\n\n }\n\n },\n\n FeatureAggregationMethod::Mean => MeanValueAggregator::new(number_of_features).into_typed(),\n\n }\n", "file_path": "operators/src/processing/raster_vector_join/mod.rs", "rank": 18, "score": 193096.82840424535 }, { "content": "pub fn check_allowed_http_methods<'a, T, TRes>(\n\n test_helper: T,\n\n allowed_methods: &'a [Method],\n\n) -> impl futures::Future + 'a\n\nwhere\n\n T: Fn(Method) -> TRes + 'a,\n\n TRes: futures::Future<Output = ServiceResponse> + 'a,\n\n{\n\n check_allowed_http_methods2(test_helper, allowed_methods, |res| res)\n\n}\n\n\n\npub async fn send_test_request<C: SimpleContext>(\n\n req: test::TestRequest,\n\n ctx: C,\n\n) -> ServiceResponse {\n\n let app = test::init_service(\n\n App::new()\n\n .app_data(web::Data::new(ctx))\n\n .wrap(\n\n middleware::ErrorHandlers::default()\n", "file_path": "services/src/util/tests.rs", "rank": 19, "score": 192637.1704737774 }, { "content": "/// Method to generate an `Iterator` over all `GridIdx2D` in `GridBounds`\n\npub fn grid_idx_iter_2d<B>(bounds: &B) -> impl Iterator<Item = GridIdx2D>\n\nwhere\n\n B: GridBounds<IndexArray = [isize; 2]>,\n\n{\n\n let GridIdx([y_s, x_s]) = bounds.min_index();\n\n let GridIdx([y_e, x_e]) = bounds.max_index();\n\n\n\n (y_s..=y_e).flat_map(move |y| (x_s..=x_e).map(move |x| [y, x].into()))\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Grid<D, T> {\n\n pub shape: D,\n\n pub data: Vec<T>,\n\n pub no_data_value: Option<T>,\n\n}\n\n\n\npub type Grid1D<T> = Grid<GridShape1D, T>;\n\npub type Grid2D<T> = Grid<GridShape2D, T>;\n", "file_path": "datatypes/src/raster/grid.rs", "rank": 20, "score": 192030.87014804344 }, { "content": "/// Loop through an iterator by yielding the current and previous tuple. Starts with the\n\n/// (first, second) item, so the iterator must have more than one item to create an output.\n\nfn two_tuple_windows<I, T>(mut iter: I) -> impl Iterator<Item = (T, T)>\n\nwhere\n\n I: Iterator<Item = T>,\n\n T: Copy,\n\n{\n\n let mut last = iter.next();\n\n\n\n iter.map(move |item| {\n\n let output = (last.unwrap(), item);\n\n last = Some(item);\n\n output\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n use geoengine_datatypes::primitives::{\n", "file_path": "operators/src/processing/point_in_polygon.rs", "rank": 21, "score": 190310.5989094275 }, { "content": "pub fn update_project_helper(project: ProjectId) -> UpdateProject {\n\n UpdateProject {\n\n id: project,\n\n name: Some(\"TestUpdate\".to_string()),\n\n description: None,\n\n layers: Some(vec![LayerUpdate::UpdateOrInsert(Layer {\n\n workflow: WorkflowId::new(),\n\n name: \"L1\".to_string(),\n\n visibility: Default::default(),\n\n symbology: Symbology::Raster(RasterSymbology {\n\n opacity: 1.0,\n\n colorizer: Colorizer::Rgba,\n\n }),\n\n })]),\n\n plots: None,\n\n bounds: None,\n\n time_step: None,\n\n }\n\n}\n\n\n", "file_path": "services/src/util/tests.rs", "rank": 22, "score": 189855.65340170436 }, { "content": "pub fn catch_unwind_silent<F: FnOnce() -> R + panic::UnwindSafe, R>(\n\n f: F,\n\n) -> std::thread::Result<R> {\n\n let prev_hook = panic::take_hook();\n\n panic::set_hook(Box::new(|_| {}));\n\n let result = panic::catch_unwind(f);\n\n panic::set_hook(prev_hook);\n\n result\n\n}\n\n\n", "file_path": "datatypes/src/util/test.rs", "rank": 23, "score": 189846.90520882426 }, { "content": "/// Loop through an iterator by yielding the current and previous tuple. Starts with the\n\n/// (first, second) item, so the iterator must have more than one item to create an output.\n\nfn two_tuple_windows<I, T>(mut iter: I) -> impl Iterator<Item = (T, T)>\n\nwhere\n\n I: Iterator<Item = T>,\n\n T: Copy,\n\n{\n\n let mut last = iter.next();\n\n\n\n iter.map(move |item| {\n\n let output = (last.unwrap(), item);\n\n last = Some(item);\n\n output\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use geoengine_datatypes::primitives::MultiPolygon;\n\n\n\n use super::*;\n\n\n", "file_path": "operators/src/processing/point_in_polygon/tester.rs", "rank": 24, "score": 187924.06936405384 }, { "content": "pub fn get_token(req: &HttpRequest) -> Result<SessionId> {\n\n let header = req\n\n .headers()\n\n .get(header::AUTHORIZATION)\n\n .ok_or(Error::Authorization {\n\n source: Box::new(Error::MissingAuthorizationHeader),\n\n })?;\n\n let scheme = Bearer::parse(header).map_err(|_| Error::Authorization {\n\n source: Box::new(Error::InvalidAuthorizationScheme),\n\n })?;\n\n SessionId::from_str(scheme.token()).map_err(|err| Error::Authorization {\n\n source: Box::new(err),\n\n })\n\n}\n", "file_path": "services/src/handlers/mod.rs", "rank": 25, "score": 186196.0322322183 }, { "content": "fn multi_point_collection_benchmarks(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"MultiPointCollection\");\n\n\n\n group.bench_function(\"Builder Plain 100\", |b| {\n\n b.iter(|| {\n\n let mut builder = MultiPointCollection::builder().finish_header();\n\n for i in 0..100 {\n\n builder\n\n .push_geometry(Coordinate2D::new(i as f64, i as f64).into())\n\n .unwrap();\n\n builder\n\n .push_time_interval(TimeInterval::new_unchecked(i, i + 1))\n\n .unwrap();\n\n builder.finish_row();\n\n }\n\n black_box(builder.build())\n\n })\n\n });\n\n\n\n group.bench_function(\"Builder with Number 100\", |b| {\n", "file_path": "datatypes/benches/multi_point_collection.rs", "rank": 26, "score": 185647.28959216506 }, { "content": "/// Create a rayon thread pool with the given number of threads.\n\n/// Use `num_threads = 0` for auto number of threads.\n\npub fn create_rayon_thread_pool(num_threads: usize) -> Arc<ThreadPool> {\n\n rayon_destroy_global_thread_pool();\n\n\n\n let thread_pool = ThreadPoolBuilder::new()\n\n .num_threads(num_threads)\n\n .build()\n\n .expect(\"Thread Pool must be initializable\");\n\n\n\n Arc::new(thread_pool)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[should_panic(\n\n expected = \"The global thread pool has not been initialized.: ThreadPoolBuildError { kind: GlobalPoolAlreadyInitialized }\"\n\n )]\n\n fn global_rayon_fail() {\n", "file_path": "operators/src/util/rayon.rs", "rank": 27, "score": 181481.33502690843 }, { "content": "#[inline]\n\nfn diag_distance(ul_coord: Coordinate2D, lr_coord: Coordinate2D) -> f64 {\n\n // calculate the distance between upper left and lower right coordinate in srs units\n\n let proj_ul_lr_vector = ul_coord - lr_coord;\n\n (proj_ul_lr_vector.x * proj_ul_lr_vector.x + proj_ul_lr_vector.y * proj_ul_lr_vector.y).sqrt()\n\n}\n\n\n", "file_path": "datatypes/src/operations/reproject.rs", "rank": 28, "score": 177988.09998094244 }, { "content": "/// Get a lock for mutex and recover from poisoning\n\n/// TODO: proper poisoning handling\n\npub fn safe_lock_mutex<M, T>(lock: &M) -> MutexGuard<T>\n\nwhere\n\n M: Deref<Target = Mutex<T>>,\n\n{\n\n match lock.deref().lock() {\n\n Ok(guard) => guard,\n\n Err(poisoned) => poisoned.into_inner(),\n\n }\n\n}\n", "file_path": "operators/src/util/mod.rs", "rank": 29, "score": 175556.5000351266 }, { "content": "fn new_offset_key() -> RasterPropertiesKey {\n\n RasterPropertiesKey {\n\n domain: Some(\"msg\".into()),\n\n key: \"calibration_offset\".into(),\n\n }\n\n}\n\n\n", "file_path": "operators/src/processing/meteosat/mod.rs", "rank": 30, "score": 175199.41016097192 }, { "content": "fn random_points<T: Rng>(rng: &mut T, num_points: usize) -> MultiPointCollection {\n\n let coordinates = (0..num_points)\n\n .into_iter()\n\n .map(|_| (rng.gen_range(0.0..100.0), rng.gen_range(0.0..100.0)))\n\n .collect::<Vec<_>>();\n\n\n\n let time = vec![TimeInterval::default(); num_points];\n\n\n\n MultiPointCollection::from_data(\n\n MultiPoint::many(coordinates).unwrap(),\n\n time,\n\n Default::default(),\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "operators/benches/pip.rs", "rank": 31, "score": 173465.89783062646 }, { "content": "/// Parse grid offset, format is `x_step,y_step`\n\npub fn parse_grid_offset_option<'de, D>(deserializer: D) -> Result<Option<GridOffsets>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n if s.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let split: Vec<Result<f64, std::num::ParseFloatError>> = s.split(',').map(str::parse).collect();\n\n\n\n let grid_offset = match *split.as_slice() {\n\n [Ok(x_step), Ok(y_step)] => GridOffsets { x_step, y_step },\n\n _ => return Err(D::Error::custom(\"Invalid grid offset\")),\n\n };\n\n\n\n Ok(Some(grid_offset))\n\n}\n\n\n", "file_path": "services/src/ogc/wcs/request.rs", "rank": 32, "score": 172667.75090465092 }, { "content": "/// Provides the intersection method\n\npub trait GridIntersection<Rhs = Self, Out = Self> {\n\n // Returns true if Self intesects Rhs\n\n fn intersection(&self, other: &Rhs) -> Option<Out>;\n\n}\n\n\n", "file_path": "datatypes/src/raster/grid_traits.rs", "rank": 33, "score": 171006.55861119876 }, { "content": "/// Serde deserializer <https://docs.rs/serde_qs/0.6.0/serde_qs/index.html#flatten-workaround>\n\npub fn from_str<'de, D, S>(deserializer: D) -> Result<S, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n S: std::str::FromStr,\n\n{\n\n let s = <&str as serde::Deserialize>::deserialize(deserializer)?;\n\n S::from_str(s).map_err(|_error| D::Error::custom(\"could not parse string\"))\n\n}\n\n\n", "file_path": "services/src/util/mod.rs", "rank": 34, "score": 169318.7636979772 }, { "content": "#[test]\n\nfn fn_test() {\n\n let mut stream = block_on_stream(fn_stream());\n\n\n\n assert_eq!(stream.next(), Some(1));\n\n assert_eq!(stream.next(), Some(0));\n\n assert_eq!(stream.next(), None);\n\n}\n", "file_path": "operators/tests/streams.rs", "rank": 35, "score": 168215.0109137256 }, { "content": "/// reorders the given tuple of coordinates, resolutions, etc. using the axis ordering for `spatial_reference` to give (x, y)\n\npub fn tuple_from_ogc_params(\n\n a: f64,\n\n b: f64,\n\n spatial_reference: SpatialReference,\n\n) -> Result<(f64, f64)> {\n\n match spatial_reference_specification(&spatial_reference.proj_string()?)?\n\n .axis_order\n\n .ok_or(error::Error::AxisOrderingNotKnownForSrs {\n\n srs_string: spatial_reference.srs_string(),\n\n })? {\n\n AxisOrder::EastNorth => Ok((a, b)),\n\n AxisOrder::NorthEast => Ok((b, a)),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use chrono::{TimeZone, Utc};\n\n use geoengine_datatypes::spatial_reference::SpatialReferenceAuthority;\n\n use serde::de::value::StringDeserializer;\n", "file_path": "services/src/ogc/util.rs", "rank": 36, "score": 167183.8959934554 }, { "content": "/// Create `GdalDatasetParameters` from the infos in the given `dataset` and its `band`.\n\n/// `path` is the location of the actual data, `band_out` allows optionally specifying a different\n\n/// band in the resulting parameters, otherwise `band` is used.\n\npub fn gdal_parameters_from_dataset(\n\n dataset: &Dataset,\n\n band: usize,\n\n path: &Path,\n\n band_out: Option<usize>,\n\n open_options: Option<Vec<String>>,\n\n) -> Result<GdalDatasetParameters> {\n\n let rasterband = &dataset.rasterband(band as isize)?;\n\n\n\n Ok(GdalDatasetParameters {\n\n file_path: PathBuf::from(path),\n\n rasterband_channel: band_out.unwrap_or(band),\n\n geo_transform: dataset.geo_transform().context(error::Gdal)?.into(),\n\n file_not_found_handling: FileNotFoundHandling::Error,\n\n no_data_value: rasterband.no_data_value(),\n\n properties_mapping: None,\n\n width: rasterband.x_size(),\n\n height: rasterband.y_size(),\n\n gdal_open_options: open_options,\n\n gdal_config_options: None,\n\n })\n\n}\n", "file_path": "operators/src/util/gdal.rs", "rank": 37, "score": 167183.50331907865 }, { "content": "/// Create a `RasterResultDescriptor` for the given `band` and `dataset`. If the raster data type is\n\n/// unknown, the default is F64 unless it is otherwise specified by `default_data_type`. If the data\n\n/// type is a complex floating point type, an error is returned\n\npub fn raster_descriptor_from_dataset(\n\n dataset: &Dataset,\n\n band: isize,\n\n default_data_type: Option<RasterDataType>,\n\n) -> Result<RasterResultDescriptor> {\n\n let rasterband = &dataset.rasterband(band)?;\n\n\n\n let spatial_ref: SpatialReference =\n\n dataset.spatial_ref()?.try_into().context(error::DataType)?;\n\n\n\n let data_type = match rasterband.band_type() {\n\n GDALDataType::GDT_Byte => RasterDataType::U8,\n\n GDALDataType::GDT_UInt16 => RasterDataType::U16,\n\n GDALDataType::GDT_Int16 => RasterDataType::I16,\n\n GDALDataType::GDT_UInt32 => RasterDataType::U32,\n\n GDALDataType::GDT_Int32 => RasterDataType::I32,\n\n GDALDataType::GDT_Float32 => RasterDataType::F32,\n\n GDALDataType::GDT_Float64 => RasterDataType::F64,\n\n GDALDataType::GDT_Unknown => default_data_type.unwrap_or(RasterDataType::F64),\n\n _ => return Err(Error::GdalRasterDataTypeNotSupported),\n\n };\n\n\n\n Ok(RasterResultDescriptor {\n\n data_type,\n\n spatial_reference: spatial_ref.into(),\n\n measurement: Measurement::Unitless,\n\n no_data_value: rasterband.no_data_value(),\n\n })\n\n}\n\n\n", "file_path": "operators/src/util/gdal.rs", "rank": 38, "score": 167183.41528345132 }, { "content": "#[allow(clippy::unnecessary_wraps)] // TODO: remove line once implemented fully\n\nfn get_legend_graphic<C: Context>(\n\n _request: &GetLegendGraphic,\n\n _ctx: &C,\n\n _endpoint: WorkflowId,\n\n) -> Result<HttpResponse> {\n\n // TODO: implement\n\n Ok(HttpResponse::InternalServerError().finish())\n\n}\n\n\n", "file_path": "services/src/handlers/wms.rs", "rank": 39, "score": 163163.50940748822 }, { "content": "/// Serde deserializer <https://docs.rs/serde_qs/0.6.0/serde_qs/index.html#flatten-workaround>\n\npub fn from_str_option<'de, D, S>(deserializer: D) -> Result<Option<S>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n S: std::str::FromStr,\n\n{\n\n let s = <&str as serde::Deserialize>::deserialize(deserializer)?;\n\n if s.is_empty() {\n\n Ok(None)\n\n } else {\n\n S::from_str(s)\n\n .map(Some)\n\n .map_err(|_error| D::Error::custom(\"could not parse string\"))\n\n }\n\n}\n\n\n", "file_path": "services/src/util/mod.rs", "rank": 40, "score": 162769.28916067164 }, { "content": "/// Serde deserializer for booleans with case insensitive strings\n\npub fn bool_option_case_insensitive<'de, D>(deserializer: D) -> Result<Option<bool>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = <&str as serde::Deserialize>::deserialize(deserializer)?;\n\n if s.is_empty() {\n\n Ok(None)\n\n } else {\n\n bool::from_str(&s.to_lowercase())\n\n .map(Some)\n\n .map_err(|_error| {\n\n D::Error::custom(format_args!(\"could not parse string as boolean: {}\", s))\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod mod_tests {\n\n use super::*;\n\n\n", "file_path": "services/src/util/mod.rs", "rank": 41, "score": 162192.40708556943 }, { "content": "pub fn first_tile_fold_fn<T>(\n\n acc: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> TemporalRasterAggregationTileAccu<T>\n\nwhere\n\n T: Pixel,\n\n{\n\n if acc.initial_state {\n\n let mut next_accu = tile;\n\n next_accu.time = acc.accu_tile.time;\n\n\n\n TemporalRasterAggregationTileAccu {\n\n accu_tile: next_accu,\n\n initial_state: false,\n\n pool: acc.pool,\n\n }\n\n } else {\n\n acc\n\n }\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 42, "score": 159552.81916344631 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\npub fn last_tile_fold_fn<T>(\n\n acc: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> TemporalRasterAggregationTileAccu<T>\n\nwhere\n\n T: Pixel,\n\n{\n\n let mut next_accu = tile;\n\n next_accu.time = acc.accu_tile.time;\n\n\n\n TemporalRasterAggregationTileAccu {\n\n accu_tile: next_accu,\n\n initial_state: false,\n\n pool: acc.pool,\n\n }\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 43, "score": 159552.81916344631 }, { "content": "struct JoinerState<G, C> {\n\n covered_pixels: C,\n\n aggregator: TypedAggregator,\n\n g: PhantomData<G>,\n\n}\n\n\n", "file_path": "operators/src/processing/raster_vector_join/non_aggregated.rs", "rank": 44, "score": 159111.3071821076 }, { "content": "struct VectorRasterJoiner<G, C> {\n\n state: Option<JoinerState<G, C>>,\n\n aggregation_method: FeatureAggregationMethod,\n\n}\n\n\n\nimpl<G, C> VectorRasterJoiner<G, C>\n\nwhere\n\n G: Geometry + ArrowTyped + 'static,\n\n C: CoveredPixels<G>,\n\n FeatureCollection<G>: PixelCoverCreator<G, C = C>,\n\n{\n\n fn new(aggregation_method: FeatureAggregationMethod) -> Self {\n\n // TODO: is it possible to do the initialization here?\n\n\n\n Self {\n\n state: None,\n\n aggregation_method,\n\n }\n\n }\n\n\n", "file_path": "operators/src/processing/raster_vector_join/non_aggregated.rs", "rank": 45, "score": 157251.21515554166 }, { "content": "pub fn grid_eq_with_no_data<D, T>(g1: &Grid<D, T>, g2: &Grid<D, T>) -> bool\n\nwhere\n\n D: PartialEq,\n\n T: PartialEq + Copy,\n\n{\n\n if g1.data.len() != g2.data.len() || g1.shape.ne(&g2.shape) {\n\n return false;\n\n }\n\n\n\n if !match (g1.no_data_value, g2.no_data_value) {\n\n (None, None) => true,\n\n (Some(_), None) => false,\n\n (_, Some(y)) => g1.is_no_data(y),\n\n } {\n\n return false;\n\n }\n\n\n\n for (l, r) in g1.data.iter().zip(g2.data.iter()) {\n\n if g1.is_no_data(*l) && g1.is_no_data(*r) {\n\n continue;\n\n }\n\n if l != r {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "datatypes/src/util/test.rs", "rank": 46, "score": 157095.23390117878 }, { "content": "/// Provides the contains method\n\npub trait GridContains<Rhs = Self> {\n\n // Returns true if Self contains Rhs\n\n fn contains(&self, rhs: &Rhs) -> bool;\n\n}\n\n\n\nimpl<A, T> GridContains<GridIdx<A>> for T\n\nwhere\n\n T: GridBounds<IndexArray = A>,\n\n A: AsRef<[isize]>,\n\n{\n\n fn contains(&self, rhs: &GridIdx<A>) -> bool {\n\n for ((&min_idx, &max_idx), &idx) in self\n\n .min_index()\n\n .as_slice()\n\n .iter()\n\n .zip(self.max_index().as_slice())\n\n .zip(rhs.as_slice())\n\n {\n\n if !crate::util::ranges::value_in_range_inclusive(idx, min_idx, max_idx) {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "datatypes/src/raster/grid_traits.rs", "rank": 47, "score": 156313.72859654148 }, { "content": "pub fn eq_with_no_data<D, T>(g1: &GridOrEmpty<D, T>, g2: &GridOrEmpty<D, T>) -> bool\n\nwhere\n\n D: PartialEq,\n\n T: PartialEq + Copy,\n\n{\n\n match (g1, g2) {\n\n (GridOrEmpty::Grid(g1), GridOrEmpty::Grid(g2)) => grid_eq_with_no_data(g1, g2),\n\n (GridOrEmpty::Empty(g1), GridOrEmpty::Empty(g2)) => empty_grid_eq_with_no_data(g1, g2),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "datatypes/src/util/test.rs", "rank": 48, "score": 154959.97736092604 }, { "content": "#[allow(clippy::missing_panics_doc)]\n\npub fn create_ndvi_meta_data() -> GdalMetaDataRegular {\n\n let no_data_value = Some(0.); // TODO: is it really 0?\n\n GdalMetaDataRegular {\n\n start: TimeInstance::from_millis(1_388_534_400_000).unwrap(),\n\n step: TimeStep {\n\n granularity: TimeGranularity::Months,\n\n step: 1,\n\n },\n\n time_placeholders: hashmap! {\n\n \"%_START_TIME_%\".to_string() => GdalSourceTimePlaceholder {\n\n format: \"%Y-%m-%d\".to_string(),\n\n reference: TimeReference::Start,\n\n },\n\n },\n\n params: GdalDatasetParameters {\n\n file_path: test_data!(\"raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF\").into(),\n\n rasterband_channel: 1,\n\n geo_transform: GdalDatasetGeoTransform {\n\n origin_coordinate: (-180., 90.).into(),\n\n x_pixel_size: 0.1,\n", "file_path": "operators/src/util/gdal.rs", "rank": 49, "score": 152206.7301056306 }, { "content": "#[allow(dead_code)]\n\npub fn fold_by_blit_future<T>(\n\n accu: RasterTileAccu2D<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<RasterTileAccu2D<T>>>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| fold_by_blit_impl(accu, tile)).then(|x| async move {\n\n match x {\n\n Ok(r) => r,\n\n Err(e) => Err(e.into()),\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use geoengine_datatypes::{\n\n primitives::{Measurement, SpatialPartition2D, SpatialResolution, TimeInterval},\n\n raster::{Grid, GridShape, RasterDataType},\n", "file_path": "operators/src/adapters/raster_subquery/raster_subquery_adapter.rs", "rank": 50, "score": 152206.7301056306 }, { "content": "/// create an axis aligned rectangle using the values \"a,b,c,d\" from OGC bbox-like parameters using the axis ordering for `spatial_reference`\n\npub fn rectangle_from_ogc_params<A: AxisAlignedRectangle>(\n\n values: [f64; 4],\n\n spatial_reference: SpatialReference,\n\n) -> Result<A> {\n\n let [a, b, c, d] = values;\n\n match spatial_reference_specification(&spatial_reference.proj_string()?)?\n\n .axis_order\n\n .ok_or(error::Error::AxisOrderingNotKnownForSrs {\n\n srs_string: spatial_reference.srs_string(),\n\n })? {\n\n AxisOrder::EastNorth => {\n\n A::from_min_max((a, b).into(), (c, d).into()).context(error::DataType)\n\n }\n\n AxisOrder::NorthEast => {\n\n A::from_min_max((b, a).into(), (d, c).into()).context(error::DataType)\n\n }\n\n }\n\n}\n\n\n", "file_path": "services/src/ogc/util.rs", "rank": 51, "score": 151782.7029403632 }, { "content": "/// Parse a spatial resolution, format is: \"resolution\" or \"xResolution,yResolution\"\n\npub fn parse_spatial_resolution_option<'de, D>(\n\n deserializer: D,\n\n) -> Result<Option<SpatialResolution>, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n if s.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let split: Vec<Result<f64, std::num::ParseFloatError>> = s.split(',').map(str::parse).collect();\n\n\n\n let spatial_resolution = match *split.as_slice() {\n\n [Ok(resolution)] => {\n\n SpatialResolution::new(resolution, resolution).map_err(D::Error::custom)?\n\n }\n\n [Ok(x_resolution), Ok(y_resolution)] => {\n\n SpatialResolution::new(x_resolution, y_resolution).map_err(D::Error::custom)?\n\n }\n\n _ => return Err(D::Error::custom(\"Invalid spatial resolution\")),\n\n };\n\n\n\n Ok(Some(spatial_resolution))\n\n}\n\n\n", "file_path": "services/src/ogc/util.rs", "rank": 52, "score": 151775.92401856396 }, { "content": "/// this method performs the transformation of a query rectangle in `target` projection\n\n/// to a new query rectangle with coordinates in the `source` projection\n\npub fn reproject_query<S: AxisAlignedRectangle>(\n\n query: QueryRectangle<S>,\n\n source: SpatialReference,\n\n target: SpatialReference,\n\n) -> Result<QueryRectangle<S>> {\n\n let projector_source_target = CoordinateProjector::from_known_srs(source, target)?;\n\n let projector_target_source = CoordinateProjector::from_known_srs(target, source)?;\n\n\n\n let p_bbox = query\n\n .spatial_bounds\n\n .reproject_clipped(&projector_target_source)?;\n\n let s_bbox = p_bbox.reproject(&projector_source_target)?;\n\n\n\n let p_spatial_resolution =\n\n suggest_pixel_size_from_diag_cross_projected(s_bbox, p_bbox, query.spatial_resolution)?;\n\n Ok(QueryRectangle {\n\n spatial_bounds: p_bbox,\n\n spatial_resolution: p_spatial_resolution,\n\n time_interval: query.time_interval,\n\n })\n", "file_path": "datatypes/src/operations/reproject.rs", "rank": 53, "score": 151775.92401856396 }, { "content": "pub fn empty_grid_eq_with_no_data<D, T>(g1: &EmptyGrid<D, T>, g2: &EmptyGrid<D, T>) -> bool\n\nwhere\n\n D: PartialEq,\n\n T: PartialEq + Copy,\n\n{\n\n g1.shape.eq(&g2.shape) && g1.is_no_data(g2.no_data_value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::raster::{EmptyGrid, Grid2D, GridShape2D};\n\n use crate::util::test::{empty_grid_eq_with_no_data, grid_eq_with_no_data};\n\n\n\n #[test]\n\n fn test_empty_grid_eq_with_no_data_integral_ok() {\n\n let d1: GridShape2D = [3, 2].into();\n\n let d2: GridShape2D = [3, 2].into();\n\n\n\n let ndv1 = 42;\n\n let ndv2 = 42;\n", "file_path": "datatypes/src/util/test.rs", "rank": 54, "score": 150906.49877729436 }, { "content": "#[allow(dead_code)]\n\npub fn fold_by_coordinate_lookup_future<T>(\n\n accu: TileWithProjectionCoordinates<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl TryFuture<Ok = TileWithProjectionCoordinates<T>, Error = error::Error>\n\nwhere\n\n T: Pixel,\n\n{\n\n // println!(\"fold_by_coordinate_lookup_future {:?}\", &tile.tile_position);\n\n tokio::task::spawn_blocking(|| fold_by_coordinate_lookup_impl(accu, tile)).then(\n\n |x| async move {\n\n match x {\n\n Ok(r) => r,\n\n Err(e) => Err(e.into()),\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "operators/src/adapters/raster_subquery/raster_subquery_reprojection.rs", "rank": 55, "score": 150426.5389660251 }, { "content": "/// Tries to reproject all coordinates at once. If this fails, tries to reproject coordinate by coordinate.\n\n/// It returns all coordinates in input order.\n\n/// In case of success it returns `Some(Coordinate2D)` and `None` otherwise.\n\npub fn project_coordinates_fail_tolerant<P: CoordinateProjection>(\n\n i: &[Coordinate2D],\n\n p: &P,\n\n) -> Vec<Option<Coordinate2D>> {\n\n if let Ok(projected_all) = p.project_coordinates(&i) {\n\n return projected_all\n\n .into_iter()\n\n .map(Some)\n\n .collect::<Vec<Option<Coordinate2D>>>();\n\n }\n\n\n\n let individual_projected: Vec<Option<Coordinate2D>> = i\n\n .iter()\n\n .map(|&c| (c, c.reproject(p)))\n\n //.inspect(|(c, c_p)| {\n\n // dbg!(c, c_p);\n\n //})\n\n .map(|(_, c_p)| c_p.ok())\n\n .collect();\n\n // For debuging use this to find oput how many coordinates could be transformed.\n\n //dbg!(\n\n // individual_projected.iter().filter(|c| c.is_some()).count(),\n\n // i.len()\n\n //);\n\n individual_projected\n\n}\n\n\n", "file_path": "datatypes/src/operations/reproject.rs", "rank": 56, "score": 149845.9642777424 }, { "content": "fn calculate_esd(timestamp: &DateTime<Utc>) -> f64 {\n\n let perihelion = f64::from(timestamp.ordinal()) - 3.0;\n\n let e = 0.0167;\n\n let theta = std::f64::consts::TAU * (perihelion / 365.0);\n\n 1.0 - e * theta.cos()\n\n}\n\n\n\n#[async_trait]\n\nimpl<Q> QueryProcessor for ReflectanceProcessor<Q>\n\nwhere\n\n Q: QueryProcessor<Output = RasterTile2D<PixelOut>, SpatialBounds = SpatialPartition2D>,\n\n{\n\n type Output = RasterTile2D<PixelOut>;\n\n type SpatialBounds = SpatialPartition2D;\n\n\n\n async fn query<'a>(\n\n &'a self,\n\n query: RasterQueryRectangle,\n\n ctx: &'a dyn QueryContext,\n\n ) -> Result<BoxStream<'a, Result<Self::Output>>> {\n", "file_path": "operators/src/processing/meteosat/reflectance.rs", "rank": 57, "score": 148891.78746191057 }, { "content": "pub fn mean_tile_fold_future<T>(\n\n accu: TemporalMeanTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalMeanTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| {\n\n let mut accu = accu;\n\n accu.add_tile(tile)?;\n\n Ok(accu)\n\n })\n\n .then(|x| async move {\n\n match x {\n\n Ok(r) => r,\n\n Err(e) => Err(e.into()),\n\n }\n\n })\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/mean_aggregation_subquery.rs", "rank": 58, "score": 148714.7772809768 }, { "content": "pub trait TestDefault {\n\n /// Generate a default value used for testing. Use this instead of the `Default` trait\n\n /// if the default value only makes sense in tests and not in production code.\n\n fn test_default() -> Self;\n\n}\n\n\n", "file_path": "datatypes/src/util/test.rs", "rank": 59, "score": 148677.98188172275 }, { "content": "/// parse coordinate, format is \"x,y\"\n\npub fn parse_coordinate<'de, D>(deserializer: D) -> Result<Coordinate2D, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n let split: Vec<Result<f64, std::num::ParseFloatError>> = s.split(',').map(str::parse).collect();\n\n\n\n match *split.as_slice() {\n\n [Ok(x), Ok(y)] => Ok(Coordinate2D::new(x, y)),\n\n _ => Err(D::Error::custom(\"Invalid coordinate\")),\n\n }\n\n}\n\n\n", "file_path": "services/src/ogc/util.rs", "rank": 60, "score": 148182.9145450219 }, { "content": "#[inline]\n\npub fn average_floor<I>(a: I, b: I) -> I\n\nwhere\n\n I: Copy\n\n + Add<I, Output = I>\n\n + Shr<usize, Output = I>\n\n + BitAnd<I, Output = I>\n\n + BitOr<I, Output = I>\n\n + BitXor<I, Output = I>,\n\n{\n\n (a & b) + ((a ^ b) >> 1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn average_floor_checks() {\n\n assert_eq!(\n\n average_floor(631_152_000_000_i64, 946_684_800_001_i64),\n\n 788_918_400_000_i64\n\n );\n\n\n\n assert_eq!(average_floor(i64::MIN, i64::MAX), -1);\n\n }\n\n}\n", "file_path": "operators/src/util/math.rs", "rank": 61, "score": 148057.80224868265 }, { "content": "pub fn identity_accu<T: Pixel>(\n\n tile_info: TileInformation,\n\n query_rect: RasterQueryRectangle,\n\n no_data_value: T,\n\n pool: Arc<ThreadPool>,\n\n) -> impl Future<Output = Result<RasterTileAccu2D<T>>> {\n\n tokio::task::spawn_blocking(move || {\n\n let output_raster =\n\n EmptyGrid2D::new(tile_info.tile_size_in_pixels, T::from_(no_data_value)).into();\n\n let output_tile =\n\n RasterTile2D::new_with_tile_info(query_rect.time_interval, tile_info, output_raster);\n\n RasterTileAccu2D::new(output_tile, pool)\n\n })\n\n .map_err(From::from)\n\n}\n\n\n", "file_path": "operators/src/adapters/raster_subquery/raster_subquery_adapter.rs", "rank": 62, "score": 147993.1566858066 }, { "content": "/// Parse bbox, format is: \"x1,y1,x2,y2\"\n\npub fn parse_bbox<'de, D>(deserializer: D) -> Result<BoundingBox2D, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n let split: Vec<Result<f64, std::num::ParseFloatError>> = s.split(',').map(str::parse).collect();\n\n\n\n if let [Ok(x1), Ok(y1), Ok(x2), Ok(y2)] = *split.as_slice() {\n\n BoundingBox2D::new(Coordinate2D::new(x1, y1), Coordinate2D::new(x2, y2))\n\n .map_err(D::Error::custom)\n\n } else {\n\n Err(D::Error::custom(\"Invalid bbox\"))\n\n }\n\n}\n\n\n", "file_path": "services/src/ogc/util.rs", "rank": 63, "score": 146621.17347505974 }, { "content": "pub fn first_tile_fold_future<T>(\n\n accu: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalRasterAggregationTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| first_tile_fold_fn(accu, tile)).then(move |x| async move {\n\n match x {\n\n Ok(r) => Ok(r),\n\n Err(e) => Err(e.into()),\n\n }\n\n })\n\n}\n\n\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 64, "score": 145481.34416327102 }, { "content": "pub fn last_tile_fold_future<T>(\n\n accu: TemporalRasterAggregationTileAccu<T>,\n\n tile: RasterTile2D<T>,\n\n) -> impl Future<Output = Result<TemporalRasterAggregationTileAccu<T>>>\n\nwhere\n\n T: Pixel,\n\n{\n\n tokio::task::spawn_blocking(|| last_tile_fold_fn(accu, tile)).then(move |x| async move {\n\n match x {\n\n Ok(r) => Ok(r),\n\n Err(e) => Err(e.into()),\n\n }\n\n })\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TemporalRasterAggregationTileAccu<T> {\n\n accu_tile: RasterTile2D<T>,\n\n initial_state: bool,\n\n pool: Arc<ThreadPool>,\n", "file_path": "operators/src/processing/temporal_raster_aggregation/min_max_first_last_subquery.rs", "rank": 65, "score": 145481.34416327102 }, { "content": "pub fn get_config_element<'a, T>() -> Result<T>\n\nwhere\n\n T: ConfigElement + Deserialize<'a>,\n\n{\n\n get_config(T::KEY)\n\n}\n\n\n", "file_path": "services/src/util/config.rs", "rank": 66, "score": 144875.12784653693 }, { "content": "#[test]\n\nfn table() {\n\n let schema = vec![\n\n Field::new(\"feature_start\", DataType::UInt64, false),\n\n Field::new(\"time_start\", DataType::Date64, false),\n\n ];\n\n\n\n let array = {\n\n let mut builder = StructBuilder::from_fields(schema, 5);\n\n\n\n for &(feature_start, time) in &[(0_u64, 0_i64), (1, 10), (2, 20), (3, 30), (4, 40)] {\n\n builder\n\n .field_builder(0)\n\n .and_then(|builder: &mut UInt64Builder| builder.append_value(feature_start).ok())\n\n .unwrap();\n\n builder\n\n .field_builder(1)\n\n .and_then(|builder: &mut Date64Builder| builder.append_value(time).ok())\n\n .unwrap();\n\n builder.append(true).unwrap();\n\n }\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 67, "score": 144535.0601912827 }, { "content": "#[test]\n\nfn strings2() {\n\n let array = {\n\n let mut builder = StringBuilder::new(5);\n\n\n\n for string in &[\"hello\", \"from\", \"the\", \"other\", \"side\"] {\n\n builder.append_value(string).unwrap();\n\n }\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 5);\n\n assert_eq!(array.null_count(), 0);\n\n\n\n assert_eq!(array.value_offsets(), &[0, 5, 9, 12, 17, 21]);\n\n\n\n assert_eq!(array.value_length(0), 5);\n\n assert_eq!(array.value_length(1), \"from\".len() as i32);\n\n\n\n assert_eq!(array.value(0), \"hello\");\n\n assert_eq!(array.value(1), \"from\");\n\n assert_eq!(array.value(2), \"the\");\n\n assert_eq!(array.value(3), \"other\");\n\n assert_eq!(array.value(4), \"side\");\n\n\n\n assert_eq!(array.value_data().as_slice(), b\"hellofromtheotherside\");\n\n assert_eq!(array.value_offsets(), &[0, 5, 9, 12, 17, 21]);\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 68, "score": 144535.0601912827 }, { "content": "#[test]\n\nfn serialize() {\n\n let array = {\n\n let mut builder = Int32Builder::new(5);\n\n builder\n\n .append_slice(&(1..=5).collect::<Vec<i32>>())\n\n .unwrap();\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 5);\n\n\n\n // no serialization of arrays by now\n\n let json = serde_json::to_string(array.values()).unwrap();\n\n\n\n assert_eq!(json, \"[1,2,3,4,5]\");\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 69, "score": 144535.0601912827 }, { "content": "#[test]\n\nfn list() {\n\n let array = {\n\n let mut builder = ListBuilder::new(Int32Builder::new(0));\n\n\n\n builder.values().append_value(0).unwrap();\n\n builder.values().append_value(1).unwrap();\n\n builder.append(true).unwrap();\n\n builder.values().append_value(2).unwrap();\n\n builder.values().append_value(3).unwrap();\n\n builder.values().append_value(4).unwrap();\n\n builder.append(true).unwrap();\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 2);\n\n assert_eq!(array.value_offsets(), &[0, 2, 5]);\n\n assert_eq!(array.value_length(0), 2);\n\n assert_eq!(array.value_length(1), 3);\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 70, "score": 144535.0601912827 }, { "content": "#[test]\n\nfn multipoints() {\n\n use arrow::datatypes::ToByteSlice;\n\n\n\n let array = {\n\n let data = ArrayData::builder(DataType::List(Box::new(Field::new(\n\n \"\",\n\n DataType::FixedSizeList(Box::new(Field::new(\"\", DataType::Float64, false)), 2),\n\n false,\n\n ))))\n\n .len(2) // number of multipoints\n\n .add_buffer(Buffer::from(&[0_i32, 2, 5].to_byte_slice()))\n\n .add_child_data(\n\n ArrayData::builder(DataType::FixedSizeList(\n\n Box::new(Field::new(\"\", DataType::Float64, false)),\n\n 2,\n\n ))\n\n .len(5) // number of coordinates\n\n .add_child_data(\n\n ArrayData::builder(DataType::Float64)\n\n .len(10) // number of floats\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 71, "score": 144535.0601912827 }, { "content": "#[test]\n\nfn simple() {\n\n let mut primitive_array_builder = Int32Builder::new(5);\n\n primitive_array_builder.append_value(1).unwrap();\n\n primitive_array_builder.append_value(2).unwrap();\n\n primitive_array_builder\n\n .append_slice(&(3..=5).collect::<Vec<i32>>())\n\n .unwrap();\n\n\n\n let primitive_array = primitive_array_builder.finish();\n\n\n\n assert_eq!(primitive_array.len(), 5);\n\n assert_eq!(primitive_array.null_count(), 0);\n\n\n\n let mask = vec![true, false, true, false, true].into();\n\n\n\n let filtered_array = filter(&primitive_array, &mask).unwrap();\n\n\n\n assert_eq!(filtered_array.len(), 3);\n\n assert_eq!(filtered_array.null_count(), 0);\n\n\n\n assert!(primitive_array.data().null_bitmap().is_none());\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 72, "score": 144535.0601912827 }, { "content": "#[test]\n\nfn strings() {\n\n use arrow::datatypes::ToByteSlice;\n\n\n\n let array = {\n\n let mut strings = String::new();\n\n let mut offsets: Vec<i32> = Vec::new();\n\n\n\n for string in &[\"hello\", \"from\", \"the\", \"other\", \"side\"] {\n\n offsets.push(strings.len() as i32);\n\n strings.push_str(string);\n\n }\n\n offsets.push(strings.len() as i32);\n\n\n\n let data = ArrayData::builder(DataType::Utf8)\n\n .len(offsets.len() - 1) // number of strings\n\n .add_buffer(Buffer::from(offsets.to_byte_slice()))\n\n .add_buffer(Buffer::from(strings.as_bytes()))\n\n .build()\n\n .unwrap();\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 73, "score": 144535.0601912827 }, { "content": "#[test]\n\n#[allow(clippy::cast_ptr_alignment)]\n\nfn ocl() {\n\n let array = {\n\n let mut builder = Int32Builder::new(5);\n\n builder\n\n .append_slice(&(1..=5).collect::<Vec<i32>>())\n\n .unwrap();\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 5);\n\n\n\n let src = r#\"\n\n __kernel void add(__global int* buffer, int scalar) {\n\n buffer[get_global_id(0)] += scalar;\n\n }\n\n \"#;\n\n\n\n let pro_que = ProQue::builder()\n\n .src(src)\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 74, "score": 144534.8498763878 }, { "content": "#[test]\n\n#[allow(clippy::cast_ptr_alignment, clippy::identity_op)]\n\nfn binary() {\n\n let t1 = TimeInterval::new(0, 1).unwrap();\n\n let t2_bytes: [u8; 16] = unsafe { mem::transmute(t1) };\n\n let t2: TimeInterval = unsafe { mem::transmute(t2_bytes) };\n\n assert_eq!(t1, t2);\n\n\n\n let array = {\n\n let mut builder = FixedSizeBinaryBuilder::new(3, mem::size_of::<TimeInterval>() as i32);\n\n\n\n for &t in &[\n\n TimeInterval::new(0, 1).unwrap(),\n\n TimeInterval::new(1, 2).unwrap(),\n\n TimeInterval::new(2, 3).unwrap(),\n\n ] {\n\n let t_bytes: [u8; 16] = unsafe { mem::transmute(t) };\n\n builder.append_value(&t_bytes).unwrap();\n\n }\n\n\n\n builder.finish()\n\n };\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 75, "score": 144534.73041200277 }, { "content": "/// A version of `suggest_pixel_size_from_diag_cross` that takes a `partition` and a projected counterpart as input\n\npub fn suggest_pixel_size_from_diag_cross_projected<B: AxisAlignedRectangle>(\n\n bbox: B,\n\n bbox_projected: B,\n\n spatial_resolution: SpatialResolution,\n\n) -> Result<SpatialResolution> {\n\n let diag_pixels = euclidian_pixel_distance(bbox, spatial_resolution)?;\n\n\n\n let proj_ul_lr_distance =\n\n diag_distance(bbox_projected.upper_left(), bbox_projected.lower_right());\n\n\n\n let proj_ll_ur_distance =\n\n diag_distance(bbox_projected.lower_left(), bbox_projected.upper_right());\n\n\n\n let min_dist_r = proj_ul_lr_distance.min(proj_ll_ur_distance);\n\n\n\n Ok(SpatialResolution::new_unchecked(\n\n min_dist_r / diag_pixels,\n\n min_dist_r / diag_pixels,\n\n ))\n\n}\n\n\n", "file_path": "datatypes/src/operations/reproject.rs", "rank": 76, "score": 144501.2038611528 }, { "content": "#[inline(never)]\n\nfn bench_raster_operator<'a, Q, T, F, C, B>(\n\n bench_id: &'static str,\n\n named_querys: Q,\n\n tiling_specs: T,\n\n named_operator: Box<dyn RasterOperator>,\n\n context_builder: F,\n\n chunk_byte_size: B,\n\n num_threads: C,\n\n) where\n\n F: Fn(TilingSpecification, usize) -> MockExecutionContext,\n\n C: IntoIterator<Item = &'a usize> + Clone,\n\n Q: IntoIterator<Item = &'a (&'static str, RasterQueryRectangle)> + Clone,\n\n T: IntoIterator<Item = &'a TilingSpecification> + Clone,\n\n B: IntoIterator<Item = &'a ChunkByteSize> + Clone,\n\n{\n\n let run_time = tokio::runtime::Runtime::new().unwrap();\n\n\n\n for current_threads in num_threads.into_iter() {\n\n for tiling_spec in tiling_specs.clone().into_iter() {\n\n let exe_ctx = (context_builder)(*tiling_spec, *current_threads);\n", "file_path": "operators/benches/workflows.rs", "rank": 77, "score": 144423.1459052975 }, { "content": "/// Opens a Gdal Dataset with the given `path`.\n\n/// Other crates should use this method for Gdal Dataset access as a workaround to avoid strange errors.\n\npub fn gdal_open_dataset(path: &Path) -> Result<Dataset> {\n\n gdal_open_dataset_ex(path, DatasetOptions::default())\n\n}\n\n\n", "file_path": "operators/src/util/gdal.rs", "rank": 78, "score": 142950.91434235137 }, { "content": "#[test]\n\nfn null_bytes() {\n\n let mut primitive_array_builder = Int32Builder::new(2);\n\n primitive_array_builder.append_value(1).unwrap();\n\n primitive_array_builder.append_null().unwrap();\n\n primitive_array_builder.append_option(None).unwrap();\n\n primitive_array_builder.append_option(Some(4)).unwrap();\n\n primitive_array_builder.append_null().unwrap();\n\n\n\n let primitive_array = primitive_array_builder.finish();\n\n\n\n assert_eq!(primitive_array.len(), 5);\n\n assert_eq!(primitive_array.null_count(), 3);\n\n\n\n if let Some(null_bitmap) = primitive_array.data().null_bitmap() {\n\n assert_eq!(null_bitmap.len(), 8); // len returns number of bits\n\n\n\n assert_eq!(\n\n null_bitmap.clone().into_buffer().as_slice(), // must clone bitmap because there is no way to get a reference to the data\n\n &[0b0000_1001] // right most bit is first element, 1 = valid value, 0 = null or unset\n\n );\n\n }\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 79, "score": 142018.56264359778 }, { "content": "#[test]\n\nfn nested_lists() {\n\n let array = {\n\n let mut builder = ListBuilder::new(ListBuilder::new(Int32Builder::new(0)));\n\n\n\n // [[[10, 11, 12], [20, 21]], [[30]]\n\n builder\n\n .values()\n\n .values()\n\n .append_slice(&[10, 11, 12])\n\n .unwrap();\n\n builder.values().append(true).unwrap();\n\n builder.values().values().append_slice(&[20, 21]).unwrap();\n\n builder.values().append(true).unwrap();\n\n builder.append(true).unwrap();\n\n\n\n builder.values().values().append_slice(&[30]).unwrap();\n\n builder.values().append(true).unwrap();\n\n builder.append(true).unwrap();\n\n\n\n builder.finish()\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 80, "score": 142018.56264359778 }, { "content": "#[test]\n\nfn null_values() {\n\n let mut primitive_array_builder = Int32Builder::new(5);\n\n primitive_array_builder.append_value(1).unwrap();\n\n primitive_array_builder.append_null().unwrap();\n\n primitive_array_builder.append_slice(&[3, 4, 5]).unwrap();\n\n\n\n let primitive_array = primitive_array_builder.finish();\n\n\n\n assert_eq!(primitive_array.len(), 5);\n\n assert_eq!(primitive_array.null_count(), 1);\n\n\n\n let data = primitive_array.values();\n\n\n\n assert_eq!(data.len(), 5);\n\n\n\n assert_eq!(&data[0..1], &[1]);\n\n assert_eq!(&data[2..5], &[3, 4, 5]);\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 81, "score": 142018.56264359778 }, { "content": "#[test]\n\nfn filter_example() {\n\n let a = Int32Array::from(vec![Some(1), Some(2), Some(3)]);\n\n\n\n // dbg!(&a);\n\n\n\n let b = filter(\n\n &a,\n\n &BooleanArray::from(vec![Some(true), Some(false), Some(true)]),\n\n )\n\n .unwrap();\n\n\n\n // dbg!(&b);\n\n\n\n assert_eq!(\n\n b.as_any().downcast_ref::<Int32Array>().unwrap(),\n\n &Int32Array::from(vec![Some(1), Some(3)])\n\n );\n\n\n\n let c = Int32Array::from(vec![Some(1), Some(2), None]);\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 82, "score": 142018.56264359778 }, { "content": "#[test]\n\n#[allow(clippy::float_cmp)]\n\nfn multipoint_builder() {\n\n let float_builder = arrow::array::Float64Builder::new(0);\n\n let coordinate_builder = arrow::array::FixedSizeListBuilder::new(float_builder, 2);\n\n let mut multi_point_builder = arrow::array::ListBuilder::new(coordinate_builder);\n\n\n\n multi_point_builder\n\n .values()\n\n .values()\n\n .append_slice(&[0.0, 0.1])\n\n .unwrap();\n\n multi_point_builder.values().append(true).unwrap();\n\n multi_point_builder\n\n .values()\n\n .values()\n\n .append_slice(&[1.0, 1.1])\n\n .unwrap();\n\n multi_point_builder.values().append(true).unwrap();\n\n\n\n multi_point_builder.append(true).unwrap(); // first multi point\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 83, "score": 142018.3932402434 }, { "content": "#[test]\n\n#[allow(clippy::eq_op)]\n\nfn float_equality() {\n\n let mut floats = Float64Builder::new(3);\n\n floats.append_value(4.0).unwrap();\n\n floats.append_null().unwrap();\n\n floats.append_value(f64::NAN).unwrap();\n\n\n\n let floats = floats.finish();\n\n\n\n assert_eq!(floats, floats);\n\n\n\n let mut floats2 = Float64Builder::new(3);\n\n floats2.append_value(4.0).unwrap();\n\n floats2.append_null().unwrap();\n\n floats2.append_value(f64::NAN).unwrap();\n\n\n\n let floats2 = floats2.finish();\n\n\n\n assert_eq!(floats, floats2);\n\n\n\n let mut floats3 = Float64Builder::new(3);\n\n floats3.append_value(f64::NAN).unwrap();\n\n floats3.append_null().unwrap();\n\n floats3.append_value(4.0).unwrap();\n\n\n\n let floats3 = floats3.finish();\n\n\n\n assert_ne!(floats, floats3);\n\n assert_ne!(floats2, floats3);\n\n}\n\n\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 84, "score": 142018.3932402434 }, { "content": "/// Helper function to downcast an arrow array\n\n///\n\n/// The caller must be sure of its type, otherwise it panics\n\n///\n\n/// # Panics\n\n/// Panics if `array` is not of type `T`\n\n///\n\npub fn downcast_array<T: Any>(array: &ArrayRef) -> &T {\n\n array.as_any().downcast_ref().unwrap() // must obey type\n\n}\n\n\n", "file_path": "datatypes/src/util/arrow.rs", "rank": 85, "score": 140084.13421447933 }, { "content": "#[test]\n\nfn gt_eq_example() {\n\n let a = Int32Array::from(vec![Some(1), Some(2), None]);\n\n\n\n // dbg!(&a);\n\n\n\n let b = gt_eq_scalar(&a, 2).unwrap();\n\n\n\n // dbg!(&b);\n\n\n\n assert_eq!(&b, &BooleanArray::from(vec![Some(false), Some(true), None]));\n\n}\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 86, "score": 139616.37349562964 }, { "content": "#[test]\n\nfn fixed_size_list() {\n\n let array = {\n\n let mut builder = FixedSizeListBuilder::new(Int32Builder::new(0), 2);\n\n\n\n builder.values().append_value(0).unwrap();\n\n builder.values().append_value(1).unwrap();\n\n builder.append(true).unwrap();\n\n builder.values().append_value(2).unwrap();\n\n builder.values().append_value(3).unwrap();\n\n builder.append(true).unwrap();\n\n builder.values().append_value(4).unwrap();\n\n builder.values().append_value(5).unwrap();\n\n builder.append(true).unwrap();\n\n\n\n builder.finish()\n\n };\n\n\n\n assert_eq!(array.len(), 3);\n\n assert_eq!(array.value_offset(0), 0);\n\n assert_eq!(array.value_offset(1), 2);\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 87, "score": 139616.37349562964 }, { "content": "#[test]\n\n#[allow(clippy::cast_ptr_alignment)]\n\nfn multipoint_builder_bytes() {\n\n use arrow::datatypes::ToByteSlice;\n\n\n\n let coordinate_builder =\n\n arrow::array::FixedSizeBinaryBuilder::new(0, std::mem::size_of::<[f64; 2]>() as i32);\n\n let mut multi_point_builder = arrow::array::ListBuilder::new(coordinate_builder);\n\n\n\n multi_point_builder\n\n .values()\n\n .append_value(&[0.0, 0.1].to_byte_slice())\n\n .unwrap();\n\n multi_point_builder\n\n .values()\n\n .append_value(&[1.0, 1.1].to_byte_slice())\n\n .unwrap();\n\n\n\n multi_point_builder.append(true).unwrap(); // first multi point\n\n\n\n multi_point_builder\n\n .values()\n", "file_path": "datatypes/tests/example-arrow.rs", "rank": 88, "score": 139616.1631807348 }, { "content": "/// Method to generate a default `Colorizer`.\n\n///\n\n/// # Panics\n\n/// If T has no min max value\n\npub fn default_colorizer_gradient<T: Pixel>() -> Result<Colorizer> {\n\n Colorizer::linear_gradient(\n\n vec![\n\n (AsPrimitive::<f64>::as_(T::min_value()), RgbaColor::black())\n\n .try_into()\n\n .unwrap(),\n\n (AsPrimitive::<f64>::as_(T::max_value()), RgbaColor::white())\n\n .try_into()\n\n .unwrap(),\n\n ],\n\n RgbaColor::transparent(),\n\n RgbaColor::pink(),\n\n )\n\n .map_err(error::Error::from)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use geoengine_datatypes::{\n\n primitives::{Coordinate2D, SpatialPartition2D, SpatialResolution},\n", "file_path": "operators/src/util/raster_stream_to_png.rs", "rank": 89, "score": 139312.16937417406 }, { "content": "fn properties_from_band(properties: &mut RasterProperties, gdal_dataset: &GdalRasterBand) {\n\n if let Some(scale) = gdal_dataset.metadata_item(\"scale\", \"\") {\n\n properties.scale = scale.parse::<f64>().ok();\n\n };\n\n\n\n if let Some(offset) = gdal_dataset.metadata_item(\"offset\", \"\") {\n\n properties.offset = offset.parse::<f64>().ok();\n\n };\n\n\n\n if let Some(band_name) = gdal_dataset.metadata_item(\"band_name\", \"\") {\n\n properties.band_name = Some(band_name);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::engine::{MockExecutionContext, MockQueryContext};\n\n use crate::test_data;\n\n use crate::util::gdal::add_ndvi_dataset;\n", "file_path": "operators/src/source/gdal_source.rs", "rank": 90, "score": 138480.14060777015 }, { "content": "pub trait Plot {\n\n /// Creates a Vega string for embedding it into a Html page\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method fails on internal errors of the plot.\n\n ///\n\n fn to_vega_embeddable(&self, allow_interactions: bool) -> Result<PlotData>;\n\n\n\n // TODO: create some PNG output, cf. https://github.com/procyon-rs/vega_lite_3.rs/issues/18\n\n // fn to_png(&self, width_px: u16, height_px: u16) -> Vec<u8>;\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, PartialEq, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct PlotData {\n\n pub vega_string: String,\n\n pub metadata: PlotMetaData,\n\n}\n\n\n", "file_path": "datatypes/src/plots/mod.rs", "rank": 91, "score": 138124.78332481562 }, { "content": "pub fn get_config<'a, T>(key: &str) -> Result<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n SETTINGS\n\n .read()\n\n .map_err(|_error| error::Error::ConfigLockFailed)?\n\n .get::<T>(key)\n\n .context(error::Config)\n\n}\n\n\n", "file_path": "services/src/util/config.rs", "rank": 92, "score": 137531.34618524596 }, { "content": "#[derive(Clone, Debug)]\n\nstruct APITokenInterceptor {\n\n key: AsciiMetadataKey,\n\n token: AsciiMetadataValue,\n\n}\n\n\n\nimpl APITokenInterceptor {\n\n fn new(token: &str) -> Result<APITokenInterceptor> {\n\n let key = AsciiMetadataKey::from_static(\"api_token\");\n\n let value = AsciiMetadataValue::from_str(token).map_err(|_| Error::InvalidAPIToken {\n\n message: \"Could not encode configured token as ASCII.\".to_owned(),\n\n })?;\n\n\n\n Ok(APITokenInterceptor { key, token: value })\n\n }\n\n}\n\n\n\nimpl Interceptor for APITokenInterceptor {\n\n fn call(&mut self, mut request: Request<()>) -> std::result::Result<Request<()>, Status> {\n\n request\n\n .metadata_mut()\n", "file_path": "services/src/datasets/external/nfdi/mod.rs", "rank": 93, "score": 136991.77259033037 }, { "content": "pub fn spatial_reference_specification(srs_string: &str) -> Result<SpatialReferenceSpecification> {\n\n if let Some(sref) = custom_spatial_reference_specification(srs_string) {\n\n return Ok(sref);\n\n }\n\n\n\n let spatial_reference = SpatialReference::from_str(srs_string).context(error::DataType)?;\n\n let json = proj_json(srs_string).ok_or_else(|| Error::UnknownSrsString {\n\n srs_string: srs_string.to_owned(),\n\n })?;\n\n let proj_string = proj_proj_string(srs_string).ok_or_else(|| Error::UnknownSrsString {\n\n srs_string: srs_string.to_owned(),\n\n })?;\n\n\n\n let extent = spatial_reference\n\n .area_of_use_projected()\n\n .context(error::DataType)?;\n\n\n\n let axis_labels = json.coordinate_system.axis.as_ref().map(|axes| {\n\n let a0 = axes.get(0).map_or(\"\".to_owned(), |a| a.name.clone());\n\n let a1 = axes.get(1).map_or(\"\".to_owned(), |a| a.name.clone());\n", "file_path": "services/src/handlers/spatial_references.rs", "rank": 94, "score": 135953.20428212188 }, { "content": "#[async_trait]\n\npub trait MetaData<L, R, Q>: Debug + Send + Sync\n\nwhere\n\n R: ResultDescriptor,\n\n{\n\n async fn loading_info(&self, query: Q) -> Result<L>;\n\n async fn result_descriptor(&self) -> Result<R>;\n\n\n\n fn box_clone(&self) -> Box<dyn MetaData<L, R, Q>>;\n\n}\n\n\n\nimpl<L, R, Q> Clone for Box<dyn MetaData<L, R, Q>>\n\nwhere\n\n R: ResultDescriptor,\n\n{\n\n fn clone(&self) -> Box<dyn MetaData<L, R, Q>> {\n\n self.box_clone()\n\n }\n\n}\n\n\n\npub struct MockExecutionContext {\n", "file_path": "operators/src/engine/execution_context.rs", "rank": 95, "score": 135907.67737632306 }, { "content": "#[cfg(test)]\n\npub fn set_config<T>(key: &str, value: T) -> Result<()>\n\nwhere\n\n T: Into<config::Value>,\n\n{\n\n SETTINGS\n\n .write()\n\n .map_err(|_error| error::Error::ConfigLockFailed)?\n\n .set(key, value)\n\n .context(error::Config)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "services/src/util/config.rs", "rank": 96, "score": 135684.68379639107 }, { "content": "/// Helper function to downcast an arrow array\n\n///\n\n/// The caller must be sure of its type, otherwise it panics\n\n///\n\n/// # Panics\n\n/// Panics if `array` is not of type `T`\n\n///\n\npub fn downcast_dyn_array<T: Any>(array: &dyn Array) -> &T {\n\n array.as_any().downcast_ref().unwrap() // must obey type\n\n}\n\n\n", "file_path": "datatypes/src/util/arrow.rs", "rank": 97, "score": 135678.53859331014 }, { "content": "/// This method calculates a suggested pixel size for the translation of a raster into a different projection.\n\n/// The source raster is described using a `BoundingBox2D` and a pixel size as `SpatialResolution`.\n\n/// A suggested pixel size is calculated using the approach used by GDAL:\n\n/// The upper left and the lower right coordinates of the bounding box are projected in the target SRS.\n\n/// Then, the distance between both points in the target SRS is devided by the distance in pixels of the source.\n\npub fn suggest_pixel_size_like_gdal<P: CoordinateProjection, B: AxisAlignedRectangle>(\n\n bbox: B,\n\n spatial_resolution: SpatialResolution,\n\n projector: &P,\n\n) -> Result<SpatialResolution> {\n\n let diag_pixels = euclidian_pixel_distance(bbox, spatial_resolution)?;\n\n\n\n let proj_ul_lr_distance =\n\n projected_diag_distance(bbox.upper_left(), bbox.lower_right(), projector)?;\n\n\n\n // derive the pixel size by deviding srs unit distance by pixel distance in the source bbox\n\n let proj_ul_lr_pixel_size = proj_ul_lr_distance / diag_pixels;\n\n Ok(SpatialResolution::new_unchecked(\n\n proj_ul_lr_pixel_size,\n\n proj_ul_lr_pixel_size,\n\n ))\n\n}\n\n\n", "file_path": "datatypes/src/operations/reproject.rs", "rank": 98, "score": 134373.63247372565 }, { "content": "/// This approach uses the GDAL way to suggest the pixel size. However, we check both diagonals and take the smaller one.\n\n/// This method fails if the bbox cannot be projected\n\npub fn suggest_pixel_size_from_diag_cross<P: CoordinateProjection, B: AxisAlignedRectangle>(\n\n bbox: B,\n\n spatial_resolution: SpatialResolution,\n\n projector: &P,\n\n) -> Result<SpatialResolution> {\n\n let diag_pixels = euclidian_pixel_distance(bbox, spatial_resolution)?;\n\n\n\n let proj_ul_lr_distance =\n\n projected_diag_distance(bbox.upper_left(), bbox.lower_right(), projector);\n\n\n\n let proj_ll_ur_distance =\n\n projected_diag_distance(bbox.lower_left(), bbox.upper_right(), projector);\n\n\n\n let min_dist_r = match (proj_ul_lr_distance, proj_ll_ur_distance) {\n\n (Ok(ul_lr), Ok(ll_ur)) => Ok(ul_lr.min(ll_ur)),\n\n (Ok(ul_lr), Err(_)) => Ok(ul_lr),\n\n (Err(_), Ok(ll_ur)) => Ok(ll_ur),\n\n (Err(e), Err(_)) => Err(e),\n\n };\n\n\n\n min_dist_r.map(|d| SpatialResolution::new_unchecked(d / diag_pixels, d / diag_pixels))\n\n}\n\n\n", "file_path": "datatypes/src/operations/reproject.rs", "rank": 99, "score": 134372.64812757284 } ]
Rust
daemon/src/rest_api/mod.rs
peterschwarz/grid
1b3859f74faa777fe7b72edb6ae4e7d80ba5c753
pub mod error; mod routes; use std::sync::mpsc; use std::thread; use crate::database::ConnectionPool; pub use crate::rest_api::error::RestApiServerError; use crate::rest_api::routes::DbExecutor; use crate::rest_api::routes::{ fetch_agent, fetch_grid_schema, fetch_organization, fetch_product, fetch_record, fetch_record_property, get_batch_statuses, list_agents, list_grid_schemas, list_organizations, list_products, list_records, submit_batches, }; use crate::submitter::BatchSubmitter; use actix::{Addr, SyncArbiter}; use actix_web::{web, App, HttpServer, Result}; use futures::Future; const SYNC_ARBITER_THREAD_COUNT: usize = 2; #[derive(Clone)] pub struct AppState { batch_submitter: Box<dyn BatchSubmitter + 'static>, database_connection: Addr<DbExecutor>, } impl AppState { pub fn new( batch_submitter: Box<dyn BatchSubmitter + 'static>, connection_pool: ConnectionPool, ) -> Self { let database_connection = SyncArbiter::start(SYNC_ARBITER_THREAD_COUNT, move || { DbExecutor::new(connection_pool.clone()) }); AppState { batch_submitter, database_connection, } } } pub struct RestApiShutdownHandle { do_shutdown: Box<dyn Fn() -> Result<(), RestApiServerError> + Send>, } impl RestApiShutdownHandle { pub fn shutdown(&self) -> Result<(), RestApiServerError> { (*self.do_shutdown)() } } pub fn run( bind_url: &str, database_connection: ConnectionPool, batch_submitter: Box<dyn BatchSubmitter + 'static>, ) -> Result< ( RestApiShutdownHandle, thread::JoinHandle<Result<(), RestApiServerError>>, ), RestApiServerError, > { let bind_url = bind_url.to_owned(); let (tx, rx) = mpsc::channel(); let join_handle = thread::Builder::new() .name("GridRestApi".into()) .spawn(move || { let sys = actix::System::new("Grid-Rest-API"); let state = AppState::new(batch_submitter, database_connection); let addr = HttpServer::new(move || { App::new() .data(state.clone()) .service(web::resource("batches").route(web::post().to_async(submit_batches))) .service( web::resource("/batch_statuses") .route(web::get().to_async(get_batch_statuses)), ) .service( web::scope("/agent") .service(web::resource("").route(web::get().to_async(list_agents))) .service( web::resource("/{public_key}") .route(web::get().to_async(fetch_agent)), ), ) .service( web::scope("/organization") .service( web::resource("").route(web::get().to_async(list_organizations)), ) .service( web::resource("/{id}") .route(web::get().to_async(fetch_organization)), ), ) .service( web::scope("/product") .service(web::resource("").route(web::get().to_async(list_products))) .service( web::resource("/{id}").route(web::get().to_async(fetch_product)), ), ) .service( web::scope("/schema") .service( web::resource("").route(web::get().to_async(list_grid_schemas)), ) .service( web::resource("/{name}") .route(web::get().to_async(fetch_grid_schema)), ), ) .service( web::scope("/record") .service(web::resource("").route(web::get().to_async(list_records))) .service( web::scope("/{record_id}") .service( web::resource("").route(web::get().to_async(fetch_record)), ) .service( web::resource("/property/{property_name}") .route(web::get().to_async(fetch_record_property)), ), ), ) }) .bind(bind_url)? .disable_signals() .system_exit() .start(); tx.send(addr).map_err(|err| { RestApiServerError::StartUpError(format!("Unable to send Server Addr: {}", err)) })?; sys.run()?; info!("Rest API terminating"); Ok(()) })?; let addr = rx.recv().map_err(|err| { RestApiServerError::StartUpError(format!("Unable to receive Server Addr: {}", err)) })?; let do_shutdown = Box::new(move || { debug!("Shutting down Rest API"); if let Err(err) = addr.stop(true).wait() { error!("Failed to shutdown rest api cleanly: {:?}", err); } debug!("Graceful signal sent to Rest API"); Ok(()) }); Ok((RestApiShutdownHandle { do_shutdown }, join_handle)) }
pub mod error; mod routes; use std::sync::mpsc; use std::thread; use crate::database::ConnectionPool; pub use crate::rest_api::error::RestApiServerError; use crate::rest_api::routes::DbExecutor; use crate::rest_api::routes::{ fetch_agent, fetch_grid_schema, fetch_organization, fetch_product, fetch_record, fetch_record_property, get_batch_statuses, list_agents, list_grid_schemas, list_organizations, list_products, list_records, submit_batches, }; use crate::submitter::BatchSubmitter; use actix::{Addr, SyncArbiter}; use actix_web::{web, App, HttpServer, Result}; use futures::Future; const SYNC_ARBITER_THREAD_COUNT: usize = 2; #[derive(Clone)] pub struct AppState { batch_submitter: Box<dyn BatchSubmitter + 'static>, database_connection: Addr<DbExecutor>, } impl AppState { pub fn new( batch_submitter: Box<dyn BatchSubmitter + 'static>, connection_pool: ConnectionPool, ) -> Self {
AppState { batch_submitter, database_connection, } } } pub struct RestApiShutdownHandle { do_shutdown: Box<dyn Fn() -> Result<(), RestApiServerError> + Send>, } impl RestApiShutdownHandle { pub fn shutdown(&self) -> Result<(), RestApiServerError> { (*self.do_shutdown)() } } pub fn run( bind_url: &str, database_connection: ConnectionPool, batch_submitter: Box<dyn BatchSubmitter + 'static>, ) -> Result< ( RestApiShutdownHandle, thread::JoinHandle<Result<(), RestApiServerError>>, ), RestApiServerError, > { let bind_url = bind_url.to_owned(); let (tx, rx) = mpsc::channel(); let join_handle = thread::Builder::new() .name("GridRestApi".into()) .spawn(move || { let sys = actix::System::new("Grid-Rest-API"); let state = AppState::new(batch_submitter, database_connection); let addr = HttpServer::new(move || { App::new() .data(state.clone()) .service(web::resource("batches").route(web::post().to_async(submit_batches))) .service( web::resource("/batch_statuses") .route(web::get().to_async(get_batch_statuses)), ) .service( web::scope("/agent") .service(web::resource("").route(web::get().to_async(list_agents))) .service( web::resource("/{public_key}") .route(web::get().to_async(fetch_agent)), ), ) .service( web::scope("/organization") .service( web::resource("").route(web::get().to_async(list_organizations)), ) .service( web::resource("/{id}") .route(web::get().to_async(fetch_organization)), ), ) .service( web::scope("/product") .service(web::resource("").route(web::get().to_async(list_products))) .service( web::resource("/{id}").route(web::get().to_async(fetch_product)), ), ) .service( web::scope("/schema") .service( web::resource("").route(web::get().to_async(list_grid_schemas)), ) .service( web::resource("/{name}") .route(web::get().to_async(fetch_grid_schema)), ), ) .service( web::scope("/record") .service(web::resource("").route(web::get().to_async(list_records))) .service( web::scope("/{record_id}") .service( web::resource("").route(web::get().to_async(fetch_record)), ) .service( web::resource("/property/{property_name}") .route(web::get().to_async(fetch_record_property)), ), ), ) }) .bind(bind_url)? .disable_signals() .system_exit() .start(); tx.send(addr).map_err(|err| { RestApiServerError::StartUpError(format!("Unable to send Server Addr: {}", err)) })?; sys.run()?; info!("Rest API terminating"); Ok(()) })?; let addr = rx.recv().map_err(|err| { RestApiServerError::StartUpError(format!("Unable to receive Server Addr: {}", err)) })?; let do_shutdown = Box::new(move || { debug!("Shutting down Rest API"); if let Err(err) = addr.stop(true).wait() { error!("Failed to shutdown rest api cleanly: {:?}", err); } debug!("Graceful signal sent to Rest API"); Ok(()) }); Ok((RestApiShutdownHandle { do_shutdown }, join_handle)) }
let database_connection = SyncArbiter::start(SYNC_ARBITER_THREAD_COUNT, move || { DbExecutor::new(connection_pool.clone()) });
assignment_statement
[ { "content": "pub fn create_connection_pool(database_url: &str) -> Result<ConnectionPool, DatabaseError> {\n\n let connection_manager = ConnectionManager::<PgConnection>::new(database_url);\n\n Ok(ConnectionPool {\n\n pool: Pool::builder()\n\n .build(connection_manager)\n\n .map_err(|err| DatabaseError::ConnectionError(Box::new(err)))?,\n\n })\n\n}\n\n\n\npub struct Connection(PooledConnection<ConnectionManager<PgConnection>>);\n\n\n\nimpl Deref for Connection {\n\n type Target = PgConnection;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "daemon/src/database/mod.rs", "rank": 0, "score": 303179.2107669723 }, { "content": "// Leaving this as an extensible function, so other validation rules can be implemented by GTIN format\n\npub fn validate_gtin(gtin: &str) -> Result<(), ApplyError> {\n\n // Check that gtin is numeric only\n\n if is_numeric(gtin) {\n\n match gtin.chars().count() {\n\n // GTIN-8 is an 8-digit number used predominately outside of North America on smaller packaging (not supported)\n\n 8 => Err(ApplyError::InvalidTransaction(format!(\n\n \"Invalid GTIN, GTIN-8 is not supported at this time: {}\",\n\n gtin\n\n ))),\n\n // GTIN-12 is a 12-digit number used primarily in North America\n\n 12 => check_digit_validation(gtin),\n\n // GTIN-13 (it could also be a GLN or the first 13 digits of a GRAI, GDTI or GCN.) (ex: 9781981855728)\n\n 13 => check_digit_validation(gtin),\n\n // GTIN-14 is a 14-digit number used to identify trade items at various packaging levels\n\n 14 => check_digit_validation(gtin),\n\n // Invalid length\n\n _ => Err(ApplyError::InvalidTransaction(format!(\n\n \"Invalid length for GTIN identifier: {}\",\n\n gtin\n\n ))),\n\n }\n\n } else {\n\n Err(ApplyError::InvalidTransaction(format!(\n\n \"Invalid format, GTIN identifiers only contain numbers: {}\",\n\n gtin\n\n )))\n\n }\n\n}\n\n\n", "file_path": "contracts/product/src/validation.rs", "rank": 1, "score": 252014.09066773168 }, { "content": "pub fn do_list_schemas(url: &str) -> Result<(), CliError> {\n\n let client = Client::new();\n\n let schemas = client\n\n .get(&format!(\"{}/schema\", url))\n\n .send()?\n\n .json::<Vec<GridSchemaSlice>>()?;\n\n schemas.iter().for_each(|schema| display_schema(schema));\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/actions/schemas.rs", "rank": 2, "score": 252014.09066773165 }, { "content": "pub fn do_list_products(url: &str) -> Result<(), CliError> {\n\n let client = Client::new();\n\n let products = client\n\n .get(&format!(\"{}/product\", url))\n\n .send()?\n\n .json::<Vec<GridProduct>>()?;\n\n products.iter().for_each(|product| display_product(product));\n\n Ok(())\n\n}\n\n\n\n/**\n\n * Print a single product in state\n\n *\n\n * url - Url for the REST API\n\n * product_id - e.g. GTIN\n\n */\n", "file_path": "cli/src/actions/products.rs", "rank": 3, "score": 252014.09066773165 }, { "content": "pub fn validate_payload(payload: &ProductPayload) -> Result<(), ApplyError> {\n\n validate_timestamp(*payload.timestamp())?;\n\n match payload.action() {\n\n Action::ProductCreate(action_payload) => validate_product_create_action(action_payload),\n\n _ => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "contracts/product/src/payload.rs", "rank": 4, "score": 248695.13635408028 }, { "content": "pub fn validate_payload(payload: &SchemaPayload) -> Result<(), ApplyError> {\n\n match payload.action() {\n\n Action::SchemaCreate(payload) => validate_schema_create_action(payload),\n\n Action::SchemaUpdate(payload) => validate_schema_update_action(payload),\n\n }\n\n}\n\n\n", "file_path": "contracts/schema/src/payload.rs", "rank": 5, "score": 248695.13635408028 }, { "content": "pub fn run_migrations(database_url: &str) -> Result<(), CliError> {\n\n let connection = PgConnection::establish(database_url)\n\n .map_err(|err| CliError::DatabaseError(err.to_string()))?;\n\n\n\n embedded_migrations::run(&connection)\n\n .map_err(|err| CliError::DatabaseError(err.to_string()))?;\n\n\n\n info!(\"Successfully applied migrations\");\n\n\n\n Ok(())\n\n}\n", "file_path": "cli/src/actions/database.rs", "rank": 6, "score": 248695.13635408028 }, { "content": "/// Convert byte string to Resource\n\npub fn byte_to_resource(bytes: &str) -> Result<Resource, ResourceError> {\n\n match bytes {\n\n \"00\" => Ok(Resource::AGENT),\n\n \"01\" => Ok(Resource::ORG),\n\n _ => Err(ResourceError::UnknownResource(format!(\n\n \"No resource found matching byte pattern {}\",\n\n bytes\n\n ))),\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ResourceError {\n\n UnknownResource(String),\n\n}\n", "file_path": "contracts/pike/src/addresser.rs", "rank": 7, "score": 244677.9578663818 }, { "content": "pub fn validate_payload(payload: &TrackAndTracePayload) -> Result<(), ApplyError> {\n\n validate_timestamp(*payload.timestamp())?;\n\n match payload.action() {\n\n Action::CreateRecord(action_payload) => validate_record_create_action(action_payload),\n\n _ => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "contracts/track_and_trace/src/payload.rs", "rank": 8, "score": 242453.59673543068 }, { "content": "pub fn fetch_record(\n\n state: web::Data<AppState>,\n\n record_id: web::Path<String>,\n\n) -> impl Future<Item = HttpResponse, Error = RestApiResponseError> {\n\n state\n\n .database_connection\n\n .send(FetchRecord {\n\n record_id: record_id.into_inner(),\n\n })\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(record) => Ok(HttpResponse::Ok().json(record)),\n\n Err(err) => Err(err),\n\n })\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct PropertySlice {\n\n pub name: String,\n\n pub record_id: String,\n", "file_path": "daemon/src/rest_api/routes/records.rs", "rank": 9, "score": 240583.58894487674 }, { "content": "pub fn fetch_product(\n\n state: web::Data<AppState>,\n\n product_id: web::Path<String>,\n\n) -> impl Future<Item = HttpResponse, Error = RestApiResponseError> {\n\n Box::new(\n\n state\n\n .database_connection\n\n .send(FetchProduct {\n\n product_id: product_id.into_inner(),\n\n })\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(product) => Ok(HttpResponse::Ok().json(product)),\n\n Err(err) => Err(err),\n\n }),\n\n )\n\n}\n", "file_path": "daemon/src/rest_api/routes/products.rs", "rank": 10, "score": 240583.58894487674 }, { "content": "pub fn do_show_schema(url: &str, name: &str) -> Result<(), CliError> {\n\n let client = Client::new();\n\n let schema = client\n\n .get(&format!(\"{}/schema/{}\", url, name))\n\n .send()?\n\n .json::<GridSchemaSlice>()?;\n\n display_schema(&schema);\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/actions/schemas.rs", "rank": 11, "score": 237813.10133840097 }, { "content": "pub fn fetch_grid_schema(\n\n state: web::Data<AppState>,\n\n schema_name: web::Path<String>,\n\n) -> impl Future<Item = HttpResponse, Error = RestApiResponseError> {\n\n state\n\n .database_connection\n\n .send(FetchGridSchema {\n\n name: schema_name.into_inner(),\n\n })\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(schema) => Ok(HttpResponse::Ok().json(schema)),\n\n Err(err) => Err(err),\n\n })\n\n}\n", "file_path": "daemon/src/rest_api/routes/schemas.rs", "rank": 12, "score": 237764.59897900527 }, { "content": "pub fn parse_value_as_property_value(property: &Mapping) -> Result<PropertyValue, CliError> {\n\n let data_type = parse_value_as_data_type(\n\n &parse_value_as_string(property, \"data_type\")?.ok_or_else(|| {\n\n CliError::InvalidYamlError(\n\n \"Missing `data_type` field for property definition.\".to_string(),\n\n )\n\n })?,\n\n )?;\n\n\n\n let mut property_value = PropertyValueBuilder::new()\n\n .with_name(parse_value_as_string(property, \"name\")?.ok_or_else(|| {\n\n CliError::InvalidYamlError(\n\n \"Missing `name` field for product property value.\".to_string(),\n\n )\n\n })?)\n\n .with_data_type(data_type.clone());\n\n\n\n property_value = match data_type {\n\n DataType::Bytes => property_value.with_bytes_value(\n\n parse_value_as_bytes(property, \"bytes_value\")?.ok_or_else(|| {\n", "file_path": "cli/src/yaml_parser.rs", "rank": 13, "score": 235498.36197700308 }, { "content": "pub fn do_show_products(url: &str, product_id: &str) -> Result<(), CliError> {\n\n let client = Client::new();\n\n let product = client\n\n .get(&format!(\"{}/product/{}\", url, product_id))\n\n .send()?\n\n .json::<GridProduct>()?;\n\n display_product(&product);\n\n Ok(())\n\n}\n\n\n\n/**\n\n * Create a new product\n\n *\n\n * url - Url for the REST API\n\n * key - Signing key of the agent\n\n * wait - Time in seconds to wait for commit\n\n * path - Path to the yaml file that contains the product descriptions\n\n */\n", "file_path": "cli/src/actions/products.rs", "rank": 14, "score": 234755.7098053248 }, { "content": "pub fn parse_value_as_product_type(product_type: &str) -> Result<ProductType, CliError> {\n\n match product_type.to_uppercase().as_ref() {\n\n \"GS1\" => Ok(ProductType::GS1),\n\n _ => Err(CliError::InvalidYamlError(format!(\n\n \"Invalid product_type for value: {}\",\n\n product_type\n\n ))),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml key/val, parse the val as a list of Property Value objects\n\n *\n\n * properties - One or more yaml objects to be parsed as a Property Value\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 15, "score": 232672.78792024005 }, { "content": "pub fn parse_value_as_data_type(data_type: &str) -> Result<DataType, CliError> {\n\n match data_type.to_lowercase().as_ref() {\n\n \"string\" => Ok(DataType::String),\n\n \"boolean\" => Ok(DataType::Boolean),\n\n \"bytes\" => Ok(DataType::Bytes),\n\n \"number\" => Ok(DataType::Number),\n\n \"enum\" => Ok(DataType::Enum),\n\n \"struct\" => Ok(DataType::Struct),\n\n \"lat_long\" => Ok(DataType::LatLong),\n\n _ => Err(CliError::InvalidYamlError(format!(\n\n \"Invalid data type for PropertyDefinition: {}\",\n\n data_type\n\n ))),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml object, parse it as a vec of strings\n\n *\n\n * property - Yaml object we wish to parse in as a vec of strings\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 16, "score": 232672.78792024005 }, { "content": "/// Return a signing key loaded from the user's environment\n\n///\n\n/// This method attempts to load the user's key from a file. The filename\n\n/// is constructed by appending \".priv\" to the key's name. If the name argument\n\n/// is None, then the USER environment variable is used in its place.\n\n///\n\n/// The directory containing the keys is determined using the HOME\n\n/// environment variable:\n\n///\n\n/// $HOME/.grid/keys/\n\n///\n\n/// # Arguments\n\n///\n\n/// * `name` - The name of the signing key, which is used to construct the\n\n/// key's filename\n\n///\n\n/// # Errors\n\n///\n\n/// If a signing error occurs, a CliError::SigningError is returned.\n\n///\n\n/// If a HOME or USER environment variable is required but cannot be\n\n/// retrieved from the environment, a CliError::VarError is returned.\n\npub fn load_signing_key(name: Option<String>) -> Result<Secp256k1PrivateKey, CliError> {\n\n let username: String = name\n\n .ok_or_else(|| env::var(\"USER\"))\n\n .or_else(|_| {\n\n get_current_username()\n\n .ok_or(0)\n\n .and_then(|os_str| os_str.into_string().map_err(|_| 0))\n\n })\n\n .map_err(|_| {\n\n CliError::UserError(String::from(\n\n \"Could not load signing key: unable to determine username\",\n\n ))\n\n })?;\n\n\n\n let private_key_filename = dirs::home_dir()\n\n .ok_or_else(|| {\n\n CliError::UserError(String::from(\n\n \"Could not load signing key: unable to determine home directory\",\n\n ))\n\n })\n", "file_path": "cli/src/key.rs", "rank": 17, "score": 231828.53234288766 }, { "content": "pub fn parse_value_as_u32(property: &Mapping, key: &str) -> Result<Option<u32>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n // Serde only has methods to match 64 bit nums\n\n Some(value) => match value.as_u64() {\n\n Some(value) => Ok(Some(value.to_string().parse::<u32>().map_err(|_| {\n\n CliError::InvalidYamlError(format!(\n\n \"Failed to parse value of {} to 32 bit integer\",\n\n key\n\n ))\n\n })?)),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Value of {} has an invalid format. Expected is a yaml integer.\",\n\n key\n\n ))),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml object, parse it as a LatLong object\n\n *\n\n * property - Yaml object we wish to parse in as a LatLong object\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 18, "score": 222484.04898395133 }, { "content": "pub fn parse_value_as_boolean(property: &Mapping, key: &str) -> Result<Option<bool>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n Some(value) => match value.as_bool() {\n\n Some(value) => Ok(Some(value)),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Value of {} has an invalid format. Expected is a yaml boolean (true/false).\",\n\n key\n\n ))),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml object, parse it as an i32\n\n *\n\n * property - Yaml object we wish to parse in as an i32\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 19, "score": 222484.04898395133 }, { "content": "pub fn parse_value_as_i32(property: &Mapping, key: &str) -> Result<Option<i32>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n Some(value) => match value.as_i64() {\n\n Some(value) => Ok(Some(value.to_string().parse::<i32>().map_err(|_| {\n\n CliError::InvalidYamlError(format!(\n\n \"Failed to parse value of {} to 32 bit integer\",\n\n key\n\n ))\n\n })?)),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Value of {} has an invalid format. Expected is a yaml integer.\",\n\n key\n\n ))),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml object, parse it as a vector of bytes\n\n *\n\n * property - Yaml object we wish to parse in as a vector of bytes\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 20, "score": 222484.04898395133 }, { "content": "pub fn parse_value_as_string(property: &Mapping, key: &str) -> Result<Option<String>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n Some(value) => match value.as_str() {\n\n Some(value) => Ok(Some(value.to_string())),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Value of {} has an invalid format. Expected is a yaml string.\",\n\n key\n\n ))),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml object, parse it as a bool\n\n *\n\n * property - Yaml object we wish to parse in as a bool\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 21, "score": 222484.04898395133 }, { "content": "pub fn parse_value_as_i64(property: &Mapping, key: &str) -> Result<Option<i64>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n Some(value) => match value.as_i64() {\n\n Some(value) => Ok(Some(value.to_string().parse::<i64>().map_err(|_| {\n\n CliError::InvalidYamlError(format!(\n\n \"Failed to parse value of {} to 64 bit integer\",\n\n key\n\n ))\n\n })?)),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Value of {} has an invalid format. Expected is a yaml integer.\",\n\n key\n\n ))),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml object, parse it as an u32\n\n *\n\n * property - Yaml object we wish to parse in as an u32\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 22, "score": 222484.04898395133 }, { "content": "pub fn parse_value_as_lat_long(property: &Mapping, key: &str) -> Result<Option<LatLong>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n Some(value) => match value.as_str() {\n\n Some(value) => {\n\n let lat_long: Vec<&str> = value.split(',').collect();\n\n\n\n let lat: i64 = lat_long[0].parse().map_err(|err| {\n\n CliError::InvalidYamlError(format!(\n\n \"Failed to parse the Latitude value for LatLong: {}\",\n\n err\n\n ))\n\n })?;\n\n\n\n let long: i64 = lat_long[1].parse().map_err(|err| {\n\n CliError::InvalidYamlError(format!(\n\n \"Failed to parse the Longitude value for LatLong: {}\",\n\n err\n\n ))\n\n })?;\n\n\n", "file_path": "cli/src/yaml_parser.rs", "rank": 23, "score": 217145.4622367785 }, { "content": "pub fn submit_batches(url: &str, mut wait: u64, batch_list: &BatchList) -> Result<(), CliError> {\n\n let bytes = batch_list.write_to_bytes()?;\n\n\n\n let client = Client::new();\n\n\n\n let batch_link = client\n\n .post(&format!(\"{}/batches\", url))\n\n .body(bytes)\n\n .send()?\n\n .json::<BatchStatusLink>()?;\n\n\n\n debug!(\"Response: {:#?}\", batch_link);\n\n\n\n while wait > 0 {\n\n let time = Instant::now();\n\n\n\n let batch_status = client\n\n .get(&format!(\"{}&wait={}\", batch_link.link, wait))\n\n .send()?\n\n .json::<BatchStatusResponse>()?;\n", "file_path": "cli/src/http.rs", "rank": 24, "score": 216873.7970607253 }, { "content": "pub fn parse_value_as_bytes(property: &Mapping, key: &str) -> Result<Option<Vec<u8>>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n Some(value) => match value.as_i64() {\n\n Some(value) => Ok(Some(value.to_string().into_bytes())),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Value of {} has an invalid format. Expected is a yaml bytes value.\",\n\n key\n\n ))),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml object, parse it as an i64\n\n *\n\n * property - Yaml object we wish to parse in as an i64\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 25, "score": 216873.7970607253 }, { "content": "pub fn is_admin(signer: &str, org_id: &str, state: &mut PikeState) -> Result<(), ApplyError> {\n\n let admin = match state.get_agent(signer) {\n\n Ok(None) => {\n\n return Err(ApplyError::InvalidTransaction(format!(\n\n \"Signer is not an agent: {}\",\n\n signer,\n\n )))\n\n }\n\n Ok(Some(admin)) => admin,\n\n Err(err) => {\n\n return Err(ApplyError::InvalidTransaction(format!(\n\n \"Failed to retrieve state: {}\",\n\n err,\n\n )))\n\n }\n\n };\n\n\n\n if admin.get_org_id() != org_id {\n\n return Err(ApplyError::InvalidTransaction(format!(\n\n \"Signer is not associated with the organization: {}\",\n", "file_path": "contracts/pike/src/handler.rs", "rank": 26, "score": 216873.79706072534 }, { "content": "fn run() -> Result<(), DaemonError> {\n\n let matches = clap_app!(myapp =>\n\n (name: APP_NAME)\n\n (version: VERSION)\n\n (author: \"Contributors to Hyperledger Grid\")\n\n (about: \"Daemon Package for Hyperledger Grid\")\n\n (@arg connect: -C --connect +takes_value \"connection endpoint for sawtooth or splinter\")\n\n (@arg verbose: -v +multiple \"Log verbosely\")\n\n (@arg database_url: --(\"database-url\") +takes_value\n\n \"specifies the database URL to connect to.\")\n\n (@arg bind: -b --bind +takes_value \"connection endpoint for rest API\")\n\n )\n\n .get_matches();\n\n\n\n let log_level = match matches.occurrences_of(\"verbose\") {\n\n 0 => Level::Warn,\n\n 1 => Level::Info,\n\n 2 => Level::Debug,\n\n _ => Level::Trace,\n\n };\n", "file_path": "daemon/src/main.rs", "rank": 28, "score": 205369.71189040857 }, { "content": "fn run() -> Result<(), CliError> {\n\n let matches = clap_app!(myapp =>\n\n (name: APP_NAME)\n\n (version: VERSION)\n\n (author: \"Contributors to Hyperledger Grid\")\n\n (about: \"Command line for Hyperledger Grid\")\n\n (@arg url: --url +takes_value \"URL for the REST API\")\n\n (@arg wait: --wait +takes_value \"How long to wait for transaction to be committed\")\n\n (@arg key: -k +takes_value \"base name for private key file\")\n\n (@arg verbose: -v +multiple \"Log verbosely\")\n\n (@subcommand agent =>\n\n (about: \"Update or create agent\")\n\n (@setting SubcommandRequiredElseHelp)\n\n (@subcommand create =>\n\n (about: \"Create an agent\")\n\n (@arg org_id: +takes_value +required \"organization ID\")\n\n (@arg public_key: +takes_value +required \"public key\")\n\n (@arg active: \"Is user active\")\n\n (@arg roles: --roles +takes_value +multiple \"Roles assigned to agent\")\n\n (@arg metadata: --metadata +takes_value +multiple\n", "file_path": "cli/src/main.rs", "rank": 29, "score": 205369.71189040857 }, { "content": "pub fn insert_products(conn: &PgConnection, products: &[NewProduct]) -> QueryResult<()> {\n\n for prod in products {\n\n update_prod_end_block_num(conn, &prod.product_id, prod.start_block_num)?;\n\n }\n\n\n\n insert_into(product::table)\n\n .values(products)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/products.rs", "rank": 30, "score": 205202.06563840495 }, { "content": "pub fn insert_agents(conn: &PgConnection, agents: &[NewAgent]) -> QueryResult<()> {\n\n for agent in agents {\n\n update_agent_end_block_num(conn, &agent.public_key, agent.start_block_num)?;\n\n }\n\n\n\n insert_into(agent::table)\n\n .values(agents)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/agents.rs", "rank": 31, "score": 205202.06563840495 }, { "content": "pub fn insert_block(conn: &PgConnection, block: &NewBlock) -> QueryResult<()> {\n\n insert_into(block::table)\n\n .values(block)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/blocks.rs", "rank": 32, "score": 205202.06563840495 }, { "content": "fn create_db_block_from_commit_event(event: &CommitEvent) -> Result<NewBlock, EventError> {\n\n let block_id = event.id.clone();\n\n let block_num = commit_event_height_to_block_num(event.height)?;\n\n let state_root_hash = \"\".into();\n\n let source = Some(event.source.clone());\n\n Ok(NewBlock {\n\n block_id,\n\n block_num,\n\n state_root_hash,\n\n source,\n\n })\n\n}\n\n\n", "file_path": "daemon/src/event/db_handler.rs", "rank": 33, "score": 203714.33693460043 }, { "content": "pub fn insert_records(conn: &PgConnection, records: &[NewRecord]) -> QueryResult<()> {\n\n for record in records {\n\n update_record_end_block_num(conn, &record.record_id, record.start_block_num)?;\n\n }\n\n\n\n insert_into(record::table)\n\n .values(records)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/track_and_trace.rs", "rank": 34, "score": 202372.9138379145 }, { "content": "pub fn insert_reporters(conn: &PgConnection, reporters: &[NewReporter]) -> QueryResult<()> {\n\n for reporter in reporters {\n\n update_reporter_end_block_num(\n\n conn,\n\n &reporter.property_name,\n\n &reporter.record_id,\n\n &reporter.public_key,\n\n reporter.start_block_num,\n\n )?;\n\n }\n\n\n\n insert_into(reporter::table)\n\n .values(reporters)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/track_and_trace.rs", "rank": 35, "score": 202372.91383791447 }, { "content": "pub fn insert_proposals(conn: &PgConnection, proposals: &[NewProposal]) -> QueryResult<()> {\n\n for proposal in proposals {\n\n update_proposal_end_block_num(\n\n conn,\n\n &proposal.record_id,\n\n &proposal.receiving_agent,\n\n &proposal.role,\n\n proposal.start_block_num,\n\n )?;\n\n }\n\n\n\n insert_into(proposal::table)\n\n .values(proposals)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/track_and_trace.rs", "rank": 36, "score": 202372.91383791447 }, { "content": "pub fn insert_properties(conn: &PgConnection, properties: &[NewProperty]) -> QueryResult<()> {\n\n for property in properties {\n\n update_property_end_block_num(\n\n conn,\n\n &property.name,\n\n &property.record_id,\n\n property.start_block_num,\n\n )?;\n\n }\n\n\n\n insert_into(property::table)\n\n .values(properties)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/track_and_trace.rs", "rank": 37, "score": 202372.9138379145 }, { "content": "pub fn list_products(\n\n state: web::Data<AppState>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = RestApiResponseError>> {\n\n Box::new(\n\n state\n\n .database_connection\n\n .send(ListProducts)\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(products) => Ok(HttpResponse::Ok().json(products)),\n\n Err(err) => Err(err),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "daemon/src/rest_api/routes/products.rs", "rank": 38, "score": 200992.90703642162 }, { "content": "pub fn fetch_agent(\n\n state: web::Data<AppState>,\n\n public_key: web::Path<String>,\n\n) -> impl Future<Item = HttpResponse, Error = RestApiResponseError> {\n\n state\n\n .database_connection\n\n .send(FetchAgent {\n\n public_key: public_key.into_inner(),\n\n })\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(agent) => Ok(HttpResponse::Ok().json(agent)),\n\n Err(err) => Err(err),\n\n })\n\n}\n", "file_path": "daemon/src/rest_api/routes/agents.rs", "rank": 39, "score": 200992.90703642162 }, { "content": "pub fn submit_batches(\n\n req: HttpRequest,\n\n body: web::Bytes,\n\n state: web::Data<AppState>,\n\n) -> impl Future<Item = HttpResponse, Error = ActixError> {\n\n let batch_list: BatchList = match protobuf::parse_from_bytes(&*body) {\n\n Ok(batch_list) => batch_list,\n\n Err(err) => {\n\n return RestApiResponseError::BadRequest(format!(\n\n \"Protobuf message was badly formatted. {}\",\n\n err.to_string()\n\n ))\n\n .future_box()\n\n }\n\n };\n\n let response_url = match req.url_for_static(\"batch_statuses\") {\n\n Ok(url) => url,\n\n Err(err) => return Box::new(future::err(err.into())),\n\n };\n\n\n\n match state.batch_submitter.submit_batches(SubmitBatches {\n\n batch_list,\n\n response_url,\n\n }) {\n\n Ok(link) => Box::new(HttpResponse::Ok().json(link).into_future()),\n\n Err(err) => err.future_box(),\n\n }\n\n}\n\n\n", "file_path": "daemon/src/rest_api/routes/batches.rs", "rank": 40, "score": 200992.90703642162 }, { "content": "pub fn list_records(\n\n state: web::Data<AppState>,\n\n) -> impl Future<Item = HttpResponse, Error = RestApiResponseError> {\n\n Box::new(\n\n state\n\n .database_connection\n\n .send(ListRecords)\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(records) => Ok(HttpResponse::Ok().json(records)),\n\n Err(err) => Err(err),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "daemon/src/rest_api/routes/records.rs", "rank": 41, "score": 200992.90703642162 }, { "content": "pub fn fetch_organization(\n\n state: web::Data<AppState>,\n\n organization_id: web::Path<String>,\n\n) -> impl Future<Item = HttpResponse, Error = RestApiResponseError> {\n\n state\n\n .database_connection\n\n .send(FetchOrganization {\n\n organization_id: organization_id.into_inner(),\n\n })\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(organization) => Ok(HttpResponse::Ok().json(organization)),\n\n Err(err) => Err(err),\n\n })\n\n}\n", "file_path": "daemon/src/rest_api/routes/organizations.rs", "rank": 42, "score": 200992.90703642162 }, { "content": "pub fn list_agents(\n\n state: web::Data<AppState>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = RestApiResponseError>> {\n\n Box::new(\n\n state\n\n .database_connection\n\n .send(ListAgents)\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(agents) => Ok(HttpResponse::Ok().json(agents)),\n\n Err(err) => Err(err),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "daemon/src/rest_api/routes/agents.rs", "rank": 43, "score": 200992.90703642162 }, { "content": "pub fn list_organizations(\n\n state: web::Data<AppState>,\n\n) -> impl Future<Item = HttpResponse, Error = RestApiResponseError> {\n\n state\n\n .database_connection\n\n .send(ListOrganizations)\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(organizations) => Ok(HttpResponse::Ok().json(organizations)),\n\n Err(err) => Err(err),\n\n })\n\n}\n\n\n", "file_path": "daemon/src/rest_api/routes/organizations.rs", "rank": 44, "score": 200992.90703642162 }, { "content": "pub fn fetch_record_property(\n\n state: web::Data<AppState>,\n\n params: web::Path<(String, String)>,\n\n) -> impl Future<Item = HttpResponse, Error = RestApiResponseError> {\n\n state\n\n .database_connection\n\n .send(FetchRecordProperty {\n\n record_id: params.0.clone(),\n\n property_name: params.1.clone(),\n\n })\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(record) => Ok(HttpResponse::Ok().json(record)),\n\n Err(err) => Err(err),\n\n })\n\n}\n\n\n\nimpl Handler<FetchRecordProperty> for DbExecutor {\n\n type Result = Result<PropertySlice, RestApiResponseError>;\n\n\n", "file_path": "daemon/src/rest_api/routes/records.rs", "rank": 45, "score": 198173.91707055017 }, { "content": "pub fn get_batch_statuses(\n\n req: HttpRequest,\n\n state: web::Data<AppState>,\n\n query: web::Query<HashMap<String, String>>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = ActixError>> {\n\n let batch_ids = match query.get(\"id\") {\n\n Some(ids) => ids.split(',').map(ToString::to_string).collect(),\n\n None => {\n\n return RestApiResponseError::BadRequest(\n\n \"Request for statuses missing id query.\".to_string(),\n\n )\n\n .future_box();\n\n }\n\n };\n\n\n\n // Max wait time allowed is 95% of network's configured timeout\n\n let max_wait_time = (DEFAULT_TIME_OUT * 95) / 100;\n\n\n\n let wait = match query.get(\"wait\") {\n\n Some(wait_time) => {\n", "file_path": "daemon/src/rest_api/routes/batches.rs", "rank": 46, "score": 198173.91707055015 }, { "content": "pub fn list_grid_schemas(\n\n state: web::Data<AppState>,\n\n) -> Box<dyn Future<Item = HttpResponse, Error = RestApiResponseError>> {\n\n Box::new(\n\n state\n\n .database_connection\n\n .send(ListGridSchemas)\n\n .from_err()\n\n .and_then(move |res| match res {\n\n Ok(schemas) => Ok(HttpResponse::Ok().json(schemas)),\n\n Err(err) => Err(err),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "daemon/src/rest_api/routes/schemas.rs", "rank": 47, "score": 198173.91707055017 }, { "content": "pub fn insert_grid_schemas(conn: &PgConnection, schemas: &[NewGridSchema]) -> QueryResult<()> {\n\n for schema in schemas {\n\n update_grid_schema_end_block_num(conn, &schema.name, schema.start_block_num)?;\n\n }\n\n\n\n insert_into(grid_schema::table)\n\n .values(schemas)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/grid_schemas.rs", "rank": 48, "score": 197027.56736869423 }, { "content": "pub fn insert_reported_values(conn: &PgConnection, values: &[NewReportedValue]) -> QueryResult<()> {\n\n for value in values {\n\n update_reported_value_end_block_num(\n\n conn,\n\n &value.property_name,\n\n &value.record_id,\n\n value.start_block_num,\n\n )?;\n\n }\n\n\n\n insert_into(reported_value::table)\n\n .values(values)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/track_and_trace.rs", "rank": 49, "score": 197027.56736869423 }, { "content": "pub fn fetch_product(conn: &PgConnection, product_id: &str) -> QueryResult<Option<Product>> {\n\n product::table\n\n .select(product::all_columns)\n\n .filter(\n\n product::product_id\n\n .eq(product_id)\n\n .and(product::end_block_num.eq(MAX_BLOCK_NUM)),\n\n )\n\n .first(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/products.rs", "rank": 50, "score": 187644.85709764116 }, { "content": "fn validate_timestamp(timestamp: u64) -> Result<(), ApplyError> {\n\n match timestamp {\n\n 0 => Err(ApplyError::InvalidTransaction(String::from(\n\n \"Timestamp is not set\",\n\n ))),\n\n _ => Ok(()),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use grid_sdk::protos::product_payload::{\n\n ProductCreateAction as ProductCreateActionProto, ProductPayload as ProductPayloadProto,\n\n ProductPayload_Action as ActionProto,\n\n };\n\n use grid_sdk::protos::product_state::Product_ProductType;\n\n use grid_sdk::protos::IntoNative;\n\n\n", "file_path": "contracts/product/src/payload.rs", "rank": 51, "score": 186944.68742599373 }, { "content": "pub fn fetch_record(conn: &PgConnection, record_id: &str) -> QueryResult<Option<Record>> {\n\n record::table\n\n .select(record::all_columns)\n\n .filter(\n\n record::record_id\n\n .eq(record_id)\n\n .and(record::end_block_num.eq(MAX_BLOCK_NUM)),\n\n )\n\n .first(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/track_and_trace.rs", "rank": 52, "score": 185683.17107757193 }, { "content": "fn get_unix_utc_timestamp() -> Result<u64, SystemTimeError> {\n\n match SystemTime::now().duration_since(UNIX_EPOCH) {\n\n Ok(duration) => Ok(duration.as_secs()),\n\n Err(err) => Err(err),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use grid_sdk::protocol::product::payload::{Action, ProductPayload};\n\n use grid_sdk::protocol::product::state::ProductType;\n\n use grid_sdk::protocol::schema::state::{DataType, PropertyValueBuilder};\n\n use std::fs::{remove_file, File};\n\n use std::io::Write;\n\n use std::{env, panic, thread};\n\n\n\n static EXAMPLE_PRODUCT_YAML: &[u8; 288] = br##\"- product_type: \"GS1\"\n\n product_id: \"723382885088\"\n\n owner: \"314156\"\n", "file_path": "cli/src/actions/products.rs", "rank": 53, "score": 185086.6137380399 }, { "content": "fn check_digit_validation(gtin: &str) -> Result<(), ApplyError> {\n\n let mut gtin_vec: Vec<char> = gtin.chars().collect();\n\n // Remove the check digit from the gtin_vec and store it for later\n\n let check_digit_char = gtin_vec\n\n .pop()\n\n .expect(\"No characters found, but string length was > 0\");\n\n let check_digit = convert_char_to_int(check_digit_char);\n\n let mut sum = 0;\n\n let mut index = 0;\n\n\n\n if is_even(gtin_vec.len()) {\n\n // For gtin-13\n\n for digit in &gtin_vec {\n\n if is_even(index) {\n\n sum += convert_char_to_int(*digit);\n\n index += 1;\n\n } else {\n\n sum += 3 * convert_char_to_int(*digit);\n\n index += 1;\n\n }\n", "file_path": "contracts/product/src/validation.rs", "rank": 54, "score": 184346.65522621165 }, { "content": "fn validate_timestamp(timestamp: u64) -> Result<(), ApplyError> {\n\n match timestamp {\n\n 0 => Err(ApplyError::InvalidTransaction(String::from(\n\n \"Timestamp is not set\",\n\n ))),\n\n _ => Ok(()),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use grid_sdk::protos::track_and_trace_payload::{\n\n CreateRecordAction as CreateRecordActionProto,\n\n TrackAndTracePayload as TrackAndTracePayloadProto,\n\n TrackAndTracePayload_Action as ActionProto,\n\n };\n\n use grid_sdk::protos::IntoNative;\n\n\n", "file_path": "contracts/track_and_trace/src/payload.rs", "rank": 55, "score": 184346.65522621165 }, { "content": "pub fn fetch_grid_schema(conn: &PgConnection, name: &str) -> QueryResult<Option<GridSchema>> {\n\n grid_schema::table\n\n .select(grid_schema::all_columns)\n\n .filter(\n\n grid_schema::name\n\n .eq(name)\n\n .and(grid_schema::end_block_num.eq(MAX_BLOCK_NUM)),\n\n )\n\n .first(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/grid_schemas.rs", "rank": 56, "score": 183792.50869472328 }, { "content": "pub fn hash(to_hash: &str, num: usize) -> String {\n\n let mut sha = Sha512::new();\n\n sha.input_str(to_hash);\n\n let temp = sha.result_str();\n\n let hash = temp.get(..num).expect(\"PANIC! Hashing Out of Bounds Error\");\n\n hash.to_string()\n\n}\n\n\n", "file_path": "contracts/product/src/addressing.rs", "rank": 57, "score": 180122.79806843193 }, { "content": "/// Returns a state address for a given namespace registry\n\n///\n\n/// # Arguments\n\n///\n\n/// * `namespace` - the address prefix for this namespace\n\nfn compute_namespace_registry_address(namespace: &str) -> Result<String, CliError> {\n\n let prefix = match namespace.get(..6) {\n\n Some(x) => x,\n\n None => {\n\n return Err(CliError::UserError(format!(\n\n \"Namespace must be at least 6 characters long: {}\",\n\n namespace\n\n )));\n\n }\n\n };\n\n\n\n let hash: &mut [u8] = &mut [0; 64];\n\n\n\n let mut sha = Sha512::new();\n\n sha.input(prefix.as_bytes());\n\n sha.result(hash);\n\n\n\n Ok(String::from(SABRE_NAMESPACE_REGISTRY_PREFIX) + &bytes_to_hex_str(hash)[..64])\n\n}\n", "file_path": "cli/src/transaction.rs", "rank": 58, "score": 178921.35046604308 }, { "content": "pub fn hash(to_hash: &str, num: usize) -> String {\n\n let mut sha = Sha512::new();\n\n sha.input_str(to_hash);\n\n let temp = sha.result_str();\n\n let hash = match temp.get(..num) {\n\n Some(x) => x,\n\n None => \"\",\n\n };\n\n hash.to_string()\n\n}\n\n\n", "file_path": "contracts/track_and_trace/src/addressing.rs", "rank": 59, "score": 177525.09044233823 }, { "content": "fn parse_property_definition(property: &Mapping) -> Result<PropertyDefinition, CliError> {\n\n let data_type = parse_value_as_data_type(\n\n &parse_value_as_string(property, \"data_type\")?.ok_or_else(|| {\n\n CliError::InvalidYamlError(\n\n \"Missing `data_type` field for property definition.\".to_string(),\n\n )\n\n })?,\n\n )?;\n\n\n\n let mut property_definition = PropertyDefinitionBuilder::new()\n\n .with_name(parse_value_as_string(property, \"name\")?.ok_or_else(|| {\n\n CliError::InvalidYamlError(\"Missing `name` field for property definition.\".to_string())\n\n })?)\n\n .with_data_type(data_type.clone());\n\n\n\n property_definition = match parse_value_as_string(property, \"description\")? {\n\n Some(description) => property_definition.with_description(description),\n\n None => property_definition,\n\n };\n\n\n", "file_path": "cli/src/actions/schemas.rs", "rank": 60, "score": 176532.17974978097 }, { "content": "fn extract_event(msg: Message) -> Result<CommitEvent, EventIoError> {\n\n if msg.get_message_type() != Message_MessageType::CLIENT_EVENTS {\n\n return Err(EventIoError::InvalidMessage(format!(\n\n \"Received unexpected message: {:?}\",\n\n msg.get_message_type()\n\n )));\n\n }\n\n\n\n let sawtooth_events = protobuf::parse_from_bytes::<SawtoothEventList>(msg.get_content())\n\n .map_err(|err| {\n\n EventIoError::InvalidMessage(format!(\"Unable to parse event list: {}\", err))\n\n })?\n\n .take_events()\n\n .to_vec();\n\n\n\n CommitEvent::try_from(sawtooth_events.as_slice())\n\n}\n\n\n\nimpl TryFrom<&[SawtoothEvent]> for CommitEvent {\n\n type Error = EventIoError;\n", "file_path": "daemon/src/sawtooth/event.rs", "rank": 61, "score": 176532.17974978097 }, { "content": "fn validate_schema_update_action(update_action: &SchemaUpdateAction) -> Result<(), ApplyError> {\n\n if update_action.schema_name().is_empty() {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Schema name must be set\",\n\n )));\n\n }\n\n\n\n if update_action.properties().is_empty() {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Properties must not be empty\",\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use grid_sdk::protocol::schema::payload::{\n", "file_path": "contracts/schema/src/payload.rs", "rank": 62, "score": 174968.58318361273 }, { "content": "fn validate_schema_create_action(create_action: &SchemaCreateAction) -> Result<(), ApplyError> {\n\n if create_action.schema_name().is_empty() {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Schema name must be set\",\n\n )));\n\n }\n\n\n\n if create_action.properties().is_empty() {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n\n \"Properties must not be empty\",\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/schema/src/payload.rs", "rank": 63, "score": 174968.58318361273 }, { "content": "fn parse_properties(properties: &[Value]) -> Result<Vec<PropertyDefinition>, CliError> {\n\n properties\n\n .iter()\n\n .map(|value| {\n\n let property = value.as_mapping().ok_or_else(|| {\n\n CliError::InvalidYamlError(\n\n \"Failed to parse schema property definition.\".to_string(),\n\n )\n\n })?;\n\n parse_property_definition(property)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "cli/src/actions/schemas.rs", "rank": 64, "score": 173853.76808307978 }, { "content": "fn get_block_event(events: &[SawtoothEvent]) -> Result<&SawtoothEvent, EventIoError> {\n\n events\n\n .iter()\n\n .find(|event| event.get_event_type() == BLOCK_COMMIT_EVENT_TYPE)\n\n .ok_or_else(|| EventIoError::InvalidMessage(\"no block event found\".into()))\n\n}\n\n\n", "file_path": "daemon/src/sawtooth/event.rs", "rank": 65, "score": 172033.5986729057 }, { "content": "fn parse_metadata(matches: &ArgMatches) -> Result<Vec<KeyValueEntry>, CliError> {\n\n let metadata = matches\n\n .values_of(\"metadata\")\n\n .unwrap_or_default()\n\n .map(String::from)\n\n .collect::<Vec<String>>();\n\n\n\n let mut key_value_entries = Vec::new();\n\n\n\n for data in metadata {\n\n let entries = data.split('=').map(String::from).collect::<Vec<String>>();\n\n\n\n let (key, value) = if entries.len() != 2 {\n\n return Err(CliError::UserError(format!(\"Metadata malformed: {}\", data)));\n\n } else {\n\n (entries[0].clone(), entries[1].clone())\n\n };\n\n\n\n key_value_entries.push(\n\n KeyValueEntryBuilder::new()\n\n .with_key(key)\n\n .with_value(value)\n\n .build()\n\n .map_err(|err| CliError::UserError(format!(\"Metadata malformed: {}\", err)))?,\n\n );\n\n }\n\n\n\n Ok(key_value_entries)\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 66, "score": 171559.70696448145 }, { "content": "pub fn get_current_block_id(conn: &PgConnection) -> QueryResult<String> {\n\n block::table\n\n .select(block::block_id)\n\n .order_by(block::block_num.desc())\n\n .limit(1)\n\n .first(conn)\n\n .or_else(|err| {\n\n if err == NotFound {\n\n Ok(NULL_BLOCK_ID.into())\n\n } else {\n\n Err(err)\n\n }\n\n })\n\n}\n", "file_path": "daemon/src/database/helpers/blocks.rs", "rank": 67, "score": 170585.65545166627 }, { "content": "pub fn list_products(conn: &PgConnection) -> QueryResult<Vec<Product>> {\n\n product::table\n\n .select(product::all_columns)\n\n .filter(product::end_block_num.eq(MAX_BLOCK_NUM))\n\n .load::<Product>(conn)\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/products.rs", "rank": 68, "score": 170162.31402433076 }, { "content": "pub fn get_agents(conn: &PgConnection) -> QueryResult<Vec<Agent>> {\n\n agent::table\n\n .select(agent::all_columns)\n\n .filter(agent::end_block_num.eq(MAX_BLOCK_NUM))\n\n .load::<Agent>(conn)\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/agents.rs", "rank": 69, "score": 170162.31402433076 }, { "content": "pub fn list_organizations(conn: &PgConnection) -> QueryResult<Vec<Organization>> {\n\n organization::table\n\n .select(organization::all_columns)\n\n .filter(organization::end_block_num.eq(MAX_BLOCK_NUM))\n\n .load::<Organization>(conn)\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/organizations.rs", "rank": 70, "score": 170162.31402433076 }, { "content": "pub fn resolve_fork(conn: &PgConnection, block_num: i64) -> QueryResult<()> {\n\n delete(chain_record::table)\n\n .filter(chain_record::start_block_num.ge(block_num))\n\n .execute(conn)?;\n\n\n\n update(chain_record::table)\n\n .filter(chain_record::end_block_num.ge(block_num))\n\n .set(chain_record::end_block_num.eq(MAX_BLOCK_NUM))\n\n .execute(conn)?;\n\n\n\n delete(block::table)\n\n .filter(block::block_num.ge(block_num))\n\n .execute(conn)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/blocks.rs", "rank": 71, "score": 167961.77984404482 }, { "content": "pub fn list_records(conn: &PgConnection) -> QueryResult<Vec<Record>> {\n\n record::table\n\n .select(record::all_columns)\n\n .filter(record::end_block_num.eq(MAX_BLOCK_NUM))\n\n .load::<Record>(conn)\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/track_and_trace.rs", "rank": 72, "score": 167961.77984404482 }, { "content": "fn get_state_changes(events: &[SawtoothEvent]) -> Result<Vec<StateChange>, EventIoError> {\n\n Ok(events\n\n .iter()\n\n .filter(|event| event.get_event_type() == STATE_CHANGE_EVENT_TYPE)\n\n .map(|event| {\n\n get_sawtooth_state_changes_from_sawtooth_event(&event)\n\n .and_then(sawtooth_state_changes_into_native_state_changes)\n\n })\n\n .collect::<Result<Vec<_>, _>>()?\n\n .into_iter()\n\n .flatten()\n\n .filter(|state_change| state_change.is_grid_state_change())\n\n .collect())\n\n}\n\n\n", "file_path": "daemon/src/sawtooth/event.rs", "rank": 73, "score": 167235.06614133826 }, { "content": "fn commit_event_height_to_block_num(height: Option<u64>) -> Result<i64, EventError> {\n\n height\n\n .ok_or_else(|| EventError(\"event height cannot be none\".into()))?\n\n .try_into()\n\n .map_err(|err| EventError(format!(\"failed to convert event height to i64: {}\", err)))\n\n}\n\n\n", "file_path": "daemon/src/event/db_handler.rs", "rank": 74, "score": 167235.06614133826 }, { "content": "fn parse_struct_values(\n\n conn: &ConnectionPool,\n\n property_name: &str,\n\n record_id: &str,\n\n reported_value_end_block_num: i64,\n\n struct_values: &[String],\n\n) -> Result<Vec<StructPropertyValue>, RestApiResponseError> {\n\n let mut inner_values = vec![];\n\n\n\n for value_name in struct_values {\n\n let struct_property_name = format!(\"{}_{}\", property_name, value_name);\n\n let struct_value = db::fetch_reported_value_reporter_to_agent_metadata(\n\n &*conn.get()?,\n\n &record_id,\n\n &struct_property_name,\n\n Some(reported_value_end_block_num),\n\n )?\n\n .ok_or_else(|| {\n\n RestApiResponseError::NotFoundError(format!(\n\n \"Could not find values for property {} for struct value {} in record {}\",\n", "file_path": "daemon/src/rest_api/routes/records.rs", "rank": 75, "score": 166074.81642290807 }, { "content": "fn get_id_and_height(events: &[SawtoothEvent]) -> Result<(String, Option<u64>), EventIoError> {\n\n let block_event = get_block_event(events)?;\n\n let block_id = get_required_attribute_from_event(block_event, BLOCK_ID_ATTR)?;\n\n let block_num = get_required_attribute_from_event(block_event, BLOCK_NUM_ATTR)?\n\n .parse::<u64>()\n\n .map_err(|err| {\n\n EventIoError::InvalidMessage(format!(\"block_num was not a valid u64: {}\", err))\n\n })?;\n\n Ok((block_id, Some(block_num)))\n\n}\n\n\n", "file_path": "daemon/src/sawtooth/event.rs", "rank": 76, "score": 164780.99235197136 }, { "content": "fn parse_yaml(path: &str, action: Action) -> Result<Vec<SchemaPayload>, CliError> {\n\n let file = std::fs::File::open(path)?;\n\n let schemas_yaml: Vec<Mapping> = serde_yaml::from_reader(file)?;\n\n\n\n match action {\n\n Action::SchemaCreate(_) => schemas_yaml\n\n .iter()\n\n .map(|schema_yaml| {\n\n let properties =\n\n parse_value_as_sequence(schema_yaml, \"properties\")?.ok_or_else(|| {\n\n CliError::InvalidYamlError(\n\n \"Schema definition is missing `properties` field.\".to_string(),\n\n )\n\n })?;\n\n let property_definitions = parse_properties(&properties)?;\n\n let schema_name = parse_value_as_string(schema_yaml, \"name\")?.ok_or_else(|| {\n\n CliError::InvalidYamlError(\"Missing `name` field for schema.\".to_string())\n\n })?;\n\n let schema_description = parse_value_as_string(schema_yaml, \"description\")?;\n\n\n", "file_path": "cli/src/actions/schemas.rs", "rank": 77, "score": 164644.32514775026 }, { "content": "pub fn list_grid_schemas(conn: &PgConnection) -> QueryResult<Vec<GridSchema>> {\n\n grid_schema::table\n\n .select(grid_schema::all_columns)\n\n .filter(grid_schema::end_block_num.eq(MAX_BLOCK_NUM))\n\n .load::<GridSchema>(conn)\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/grid_schemas.rs", "rank": 78, "score": 163808.7038422302 }, { "content": "/// Generates a public/private key pair that can be used to sign transactions.\n\n/// If no directory is provided, the keys are created in the default directory\n\n///\n\n/// $HOME/.grid/keys/\n\n///\n\n/// If no key_name is provided the key name is set to USER environment variable.\n\npub fn generate_keys(\n\n key_name: Option<&str>,\n\n force: bool,\n\n key_directory: Option<&str>,\n\n) -> Result<(), CliError> {\n\n let key_name = match key_name {\n\n Some(name) => name.to_string(),\n\n None => get_current_username()\n\n .ok_or(0)\n\n .and_then(|os_str| os_str.into_string().map_err(|_| 0))\n\n .map_err(|_| {\n\n CliError::UserError(String::from(\n\n \"Could not determine key name, please provide one.\",\n\n ))\n\n })?,\n\n };\n\n\n\n let key_dir = match key_directory {\n\n Some(path) => {\n\n let dir = PathBuf::from(&path);\n", "file_path": "cli/src/actions/keygen.rs", "rank": 79, "score": 162603.10107535898 }, { "content": "pub fn do_create_schemas(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n path: &str,\n\n) -> Result<(), CliError> {\n\n let payloads = parse_yaml(path, Action::SchemaCreate(SchemaCreateAction::default()))?;\n\n let mut batch_list_builder = schema_batch_builder(key);\n\n for payload in payloads {\n\n batch_list_builder = batch_list_builder.add_transaction(\n\n &payload.into_proto()?,\n\n &[\n\n PIKE_NAMESPACE.to_string(),\n\n GRID_SCHEMA_NAMESPACE.to_string(),\n\n ],\n\n &[GRID_SCHEMA_NAMESPACE.to_string()],\n\n )?;\n\n }\n\n\n\n let batch_list = batch_list_builder.create_batch_list();\n\n\n\n submit_batches(url, wait, &batch_list)\n\n}\n\n\n", "file_path": "cli/src/actions/schemas.rs", "rank": 80, "score": 162598.6812412866 }, { "content": "pub fn do_delete_products(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n product_id: &str,\n\n product_type: &str,\n\n) -> Result<(), CliError> {\n\n let parsed_product_type = parse_value_as_product_type(product_type)?;\n\n let payloads = vec![generate_delete_product_payload(\n\n parsed_product_type,\n\n product_id,\n\n )?];\n\n let batch_list = build_batches_from_payloads(payloads, key)?;\n\n submit_batches(url, wait, &batch_list)\n\n}\n\n\n\n/**\n\n * Build a batch from our Product Payloads. The CLI is responsible for batch creation.\n\n *\n\n * payloads - Product payloads\n\n * key - Signing key of the agent\n\n */\n", "file_path": "cli/src/actions/products.rs", "rank": 81, "score": 162598.6812412866 }, { "content": "pub fn do_update_agent(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n update_agent: UpdateAgentAction,\n\n) -> Result<(), CliError> {\n\n let payload = PikePayloadBuilder::new()\n\n .with_action(Action::UpdateAgent)\n\n .with_update_agent(update_agent)\n\n .build()\n\n .map_err(|err| CliError::UserError(format!(\"{}\", err)))?;\n\n\n\n let batch_list = pike_batch_builder(key)\n\n .add_transaction(\n\n &payload.into_proto()?,\n\n &[PIKE_NAMESPACE.to_string()],\n\n &[PIKE_NAMESPACE.to_string()],\n\n )?\n\n .create_batch_list();\n\n\n\n submit_batches(url, wait, &batch_list)\n\n}\n", "file_path": "cli/src/actions/agents.rs", "rank": 82, "score": 162598.6812412866 }, { "content": "pub fn do_create_organization(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n create_org: CreateOrganizationAction,\n\n) -> Result<(), CliError> {\n\n let payload = PikePayloadBuilder::new()\n\n .with_action(Action::CreateOrganization)\n\n .with_create_organization(create_org)\n\n .build()\n\n .map_err(|err| CliError::UserError(format!(\"{}\", err)))?;\n\n\n\n let batch_list = pike_batch_builder(key)\n\n .add_transaction(\n\n &payload.into_proto()?,\n\n &[PIKE_NAMESPACE.to_string()],\n\n &[PIKE_NAMESPACE.to_string()],\n\n )?\n\n .create_batch_list();\n\n\n\n submit_batches(url, wait, &batch_list)\n\n}\n\n\n", "file_path": "cli/src/actions/organizations.rs", "rank": 83, "score": 162598.6812412866 }, { "content": "pub fn do_create_agent(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n create_agent: CreateAgentAction,\n\n) -> Result<(), CliError> {\n\n let payload = PikePayloadBuilder::new()\n\n .with_action(Action::CreateAgent)\n\n .with_create_agent(create_agent)\n\n .build()\n\n .map_err(|err| CliError::UserError(format!(\"{}\", err)))?;\n\n\n\n let batch_list = pike_batch_builder(key)\n\n .add_transaction(\n\n &payload.into_proto()?,\n\n &[PIKE_NAMESPACE.to_string()],\n\n &[PIKE_NAMESPACE.to_string()],\n\n )?\n\n .create_batch_list();\n\n\n\n submit_batches(url, wait, &batch_list)\n\n}\n\n\n", "file_path": "cli/src/actions/agents.rs", "rank": 84, "score": 162598.6812412866 }, { "content": "pub fn do_update_products(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n path: &str,\n\n) -> Result<(), CliError> {\n\n let payloads = parse_product_yaml(path, Action::ProductUpdate(ProductUpdateAction::default()))?;\n\n let batch_list = build_batches_from_payloads(payloads, key)?;\n\n submit_batches(url, wait, &batch_list)\n\n}\n\n\n\n/**\n\n * Delete an existing product\n\n *\n\n * url - Url for the REST API\n\n * key - Signing key of the agent\n\n * wait - Time in seconds to wait for commit\n\n * path - Path to the yaml file that contains the product descriptions\n\n */\n", "file_path": "cli/src/actions/products.rs", "rank": 85, "score": 162598.6812412866 }, { "content": "pub fn do_update_schemas(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n path: &str,\n\n) -> Result<(), CliError> {\n\n let payloads = parse_yaml(path, Action::SchemaUpdate(SchemaUpdateAction::default()))?;\n\n let mut batch_list_builder = schema_batch_builder(key);\n\n for payload in payloads {\n\n batch_list_builder = batch_list_builder.add_transaction(\n\n &payload.into_proto()?,\n\n &[\n\n PIKE_NAMESPACE.to_string(),\n\n GRID_SCHEMA_NAMESPACE.to_string(),\n\n ],\n\n &[GRID_SCHEMA_NAMESPACE.to_string()],\n\n )?;\n\n }\n\n\n\n let batch_list = batch_list_builder.create_batch_list();\n\n\n\n submit_batches(url, wait, &batch_list)\n\n}\n\n\n", "file_path": "cli/src/actions/schemas.rs", "rank": 86, "score": 162598.6812412866 }, { "content": "pub fn do_update_organization(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n update_org: UpdateOrganizationAction,\n\n) -> Result<(), CliError> {\n\n let payload = PikePayloadBuilder::new()\n\n .with_action(Action::UpdateOrganization)\n\n .with_update_organization(update_org)\n\n .build()\n\n .map_err(|err| CliError::UserError(format!(\"{}\", err)))?;\n\n\n\n let batch_list = pike_batch_builder(key)\n\n .add_transaction(\n\n &payload.into_proto()?,\n\n &[PIKE_NAMESPACE.to_string()],\n\n &[PIKE_NAMESPACE.to_string()],\n\n )?\n\n .create_batch_list();\n\n\n\n submit_batches(url, wait, &batch_list)\n\n}\n", "file_path": "cli/src/actions/organizations.rs", "rank": 87, "score": 162598.6812412866 }, { "content": "pub fn do_create_products(\n\n url: &str,\n\n key: Option<String>,\n\n wait: u64,\n\n path: &str,\n\n) -> Result<(), CliError> {\n\n let payloads = parse_product_yaml(path, Action::ProductCreate(ProductCreateAction::default()))?;\n\n let batch_list = build_batches_from_payloads(payloads, key)?;\n\n submit_batches(url, wait, &batch_list)\n\n}\n\n\n\n/**\n\n * Update an existing product\n\n *\n\n * url - Url for the REST API\n\n * key - Signing key of the agent\n\n * wait - Time in seconds to wait for commit\n\n * path - Path to the yaml file that contains the product descriptions\n\n */\n", "file_path": "cli/src/actions/products.rs", "rank": 88, "score": 162598.6812412866 }, { "content": "fn parse_product_yaml(path: &str, action: Action) -> Result<Vec<ProductPayload>, CliError> {\n\n let file = std::fs::File::open(path)?;\n\n let products_yaml: Vec<Mapping> = serde_yaml::from_reader(file)?;\n\n\n\n match action {\n\n Action::ProductCreate(_) => products_yaml\n\n .iter()\n\n .map(|product_yaml| {\n\n let product_id =\n\n parse_value_as_string(product_yaml, \"product_id\")?.ok_or_else(|| {\n\n CliError::InvalidYamlError(\n\n \"Missing `product_id` field for Product.\".to_string(),\n\n )\n\n })?;\n\n\n\n let product_type = parse_value_as_product_type(\n\n &parse_value_as_string(product_yaml, \"product_type\")?.ok_or_else(|| {\n\n CliError::InvalidYamlError(\n\n \"Missing `product_type` field for property definition.\".to_string(),\n\n )\n", "file_path": "cli/src/actions/products.rs", "rank": 89, "score": 162524.20428288402 }, { "content": "pub fn list_product_property_values(conn: &PgConnection) -> QueryResult<Vec<ProductPropertyValue>> {\n\n product_property_value::table\n\n .select(product_property_value::all_columns)\n\n .filter(product_property_value::end_block_num.eq(MAX_BLOCK_NUM))\n\n .load::<ProductPropertyValue>(conn)\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/products.rs", "rank": 90, "score": 161847.01782216097 }, { "content": "pub fn fetch_organization(\n\n conn: &PgConnection,\n\n organization_id: &str,\n\n) -> QueryResult<Option<Organization>> {\n\n organization::table\n\n .select(organization::all_columns)\n\n .filter(\n\n organization::org_id\n\n .eq(organization_id)\n\n .and(organization::end_block_num.eq(MAX_BLOCK_NUM)),\n\n )\n\n .first(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n", "file_path": "daemon/src/database/helpers/organizations.rs", "rank": 91, "score": 160537.22978451182 }, { "content": "pub fn delete_product(\n\n conn: &PgConnection,\n\n address: &str,\n\n current_block_num: i64,\n\n) -> QueryResult<()> {\n\n update(product::table)\n\n .filter(\n\n product::product_address\n\n .eq(address)\n\n .and(product::end_block_num.eq(MAX_BLOCK_NUM)),\n\n )\n\n .set(product::end_block_num.eq(current_block_num))\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/products.rs", "rank": 92, "score": 160537.22978451182 }, { "content": "pub fn insert_organizations(\n\n conn: &PgConnection,\n\n organizations: &[NewOrganization],\n\n) -> QueryResult<()> {\n\n for org in organizations {\n\n update_org_end_block_num(conn, &org.org_id, org.start_block_num)?;\n\n }\n\n\n\n insert_into(organization::table)\n\n .values(organizations)\n\n .execute(conn)\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/organizations.rs", "rank": 93, "score": 160537.22978451182 }, { "content": "pub fn build_batches_from_payloads(\n\n payloads: Vec<ProductPayload>,\n\n key: Option<String>,\n\n) -> Result<BatchList, CliError> {\n\n let mut batch_list_builder = product_batch_builder(key);\n\n for payload in payloads {\n\n batch_list_builder = batch_list_builder.add_transaction(\n\n &payload.into_proto()?,\n\n &[\n\n PIKE_NAMESPACE.to_string(),\n\n GRID_SCHEMA_NAMESPACE.to_string(),\n\n GRID_PRODUCT_NAMESPACE.to_string(),\n\n ],\n\n &[GRID_PRODUCT_NAMESPACE.to_string()],\n\n )?;\n\n }\n\n\n\n Ok(batch_list_builder.create_batch_list())\n\n}\n\n\n\n/**\n\n * Iterate through a list of products in a yaml file to build our payloads.\n\n *\n\n * path: Path to the yaml file\n\n * action: Determines the type of product payload to generate\n\n */\n", "file_path": "cli/src/actions/products.rs", "rank": 94, "score": 160537.22978451182 }, { "content": "pub fn parse_value_as_sequence(\n\n property: &Mapping,\n\n key: &str,\n\n) -> Result<Option<Sequence>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n Some(value) => match value.as_sequence() {\n\n Some(value) => Ok(Some(value.to_vec())),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Value of {} has an invalid format. Expected is a yaml list.\",\n\n key\n\n ))),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n/**\n\n * Given a yaml object, parse it as a string\n\n *\n\n * property - Yaml object we wish to parse in as a string\n\n */\n", "file_path": "cli/src/yaml_parser.rs", "rank": 95, "score": 160537.22978451182 }, { "content": "pub fn get_agent(conn: &PgConnection, public_key: &str) -> QueryResult<Option<Agent>> {\n\n agent::table\n\n .select(agent::all_columns)\n\n .filter(\n\n agent::public_key\n\n .eq(public_key)\n\n .and(agent::end_block_num.eq(MAX_BLOCK_NUM)),\n\n )\n\n .first(conn)\n\n .map(Some)\n\n .or_else(|err| if err == NotFound { Ok(None) } else { Err(err) })\n\n}\n", "file_path": "daemon/src/database/helpers/agents.rs", "rank": 96, "score": 159193.76145867343 }, { "content": "pub fn process_validator_response(\n\n status: ClientBatchSubmitResponse_Status,\n\n) -> Result<(), RestApiResponseError> {\n\n match status {\n\n ClientBatchSubmitResponse_Status::OK => Ok(()),\n\n ClientBatchSubmitResponse_Status::INVALID_BATCH => Err(RestApiResponseError::BadRequest(\n\n \"The submitted BatchList was rejected by the validator. It was '\n\n 'poorly formed, or has an invalid signature.\"\n\n .to_string(),\n\n )),\n\n _ => Err(RestApiResponseError::SawtoothValidatorResponseError(\n\n format!(\"Validator responded with error {:?}\", status),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "daemon/src/sawtooth/batch_submitter.rs", "rank": 97, "score": 158568.97155508393 }, { "content": "pub fn list_reporters(\n\n conn: &PgConnection,\n\n record_id: &str,\n\n property_name: &str,\n\n) -> QueryResult<Vec<Reporter>> {\n\n reporter::table\n\n .filter(\n\n reporter::property_name\n\n .eq(property_name)\n\n .and(reporter::record_id.eq(record_id))\n\n .and(reporter::end_block_num.eq(MAX_BLOCK_NUM)),\n\n )\n\n .load::<Reporter>(conn)\n\n}\n\n\n", "file_path": "daemon/src/database/helpers/track_and_trace.rs", "rank": 98, "score": 158568.97155508393 }, { "content": "pub fn parse_value_as_vec_string(\n\n property: &Mapping,\n\n key: &str,\n\n) -> Result<Option<Vec<String>>, CliError> {\n\n match property.get(&Value::String(key.to_string())) {\n\n Some(value) => match value.as_sequence() {\n\n Some(sequence) => Ok(Some(\n\n sequence\n\n .iter()\n\n .map(|value| match value.as_str() {\n\n Some(value) => Ok(value.to_string()),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Values in {} cannot be parsed to string.\",\n\n key\n\n ))),\n\n })\n\n .collect::<Result<Vec<String>, CliError>>()?,\n\n )),\n\n None => Err(CliError::InvalidYamlError(format!(\n\n \"Value of {} has an invalid format. Expected is a yaml list.\",\n", "file_path": "cli/src/yaml_parser.rs", "rank": 99, "score": 158568.97155508393 } ]
Rust
metrics/src/recorder.rs
kevyang/rpc-perf
30cad3701837cab25c156d7d76e8df10e10d23da
use crate::*; use datastructures::HistogramConfig; use datastructures::RwWrapper; use std::collections::HashSet; use std::sync::Arc; use datastructures::Wrapper; use evmap::{ReadHandle, WriteHandle}; use std::collections::HashMap; #[derive(Clone)] pub struct Recorder { data_read: ReadHandle<String, Arc<Channel>>, data_write: Wrapper<WriteHandle<String, Arc<Channel>>>, labels: RwWrapper<HashSet<String>>, } impl Recorder { pub fn new() -> Self { let (read, write) = evmap::new(); Self { data_read: read, data_write: Wrapper::new(write), labels: RwWrapper::new(HashSet::new()), } } pub fn record(&self, channel: String, measurement: Measurement) { self.data_read .get_and(&channel, |channel| (*channel)[0].record(measurement)); } pub fn counter(&self, channel: String) -> usize { self.data_read .get_and(&channel, |channel| (*channel)[0].counter()) .unwrap_or(0) } pub fn percentile(&self, channel: String, percentile: f64) -> Option<usize> { self.data_read .get_and(&channel, |channel| (*channel)[0].percentile(percentile)) .unwrap_or(None) } pub fn add_channel( &self, name: String, source: Source, histogram_config: Option<HistogramConfig>, ) { debug!("add channel: {} source: {:?}", name, source); let channel = Channel::new(name.clone(), source, histogram_config); if self .data_read .get_and(&name, |channel| channel.len()) .unwrap_or(0) == 0 { unsafe { (*self.data_write.get()).insert(name.clone(), Arc::new(channel)); (*self.data_write.get()).refresh(); (*self.labels.lock()).insert(name); } } } pub fn delete_channel(&self, name: String) { debug!("delete channel: {}", name); unsafe { (*self.data_write.get()).empty(name.clone()); (*self.data_write.get()).refresh(); (*self.labels.lock()).remove(&name); } } pub fn readings(&self) -> Vec<Reading> { let mut result = Vec::new(); unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].readings()); if let Some(readings) = readings { result.extend(readings); } } } result } pub fn hash_map(&self) -> HashMap<String, HashMap<Output, usize>> { let mut result = HashMap::new(); unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].hash_map()); if let Some(readings) = readings { result.insert(label.to_owned(), readings); } } } result } #[cfg(feature = "waterfall")] pub fn save_files(&self) { unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].save_files()); } } } pub fn add_output(&self, name: String, output: Output) { self.data_read .get_and(&name, |channel| (*channel)[0].add_output(output)); } pub fn delete_output(&self, name: String, output: Output) { self.data_read .get_and(&name, |channel| (*channel)[0].delete_output(output)); } pub fn latch(&self) { unsafe { for label in &*self.labels.get() { self.data_read .get_and(label, |channel| (*channel)[0].latch()); } } } pub fn clear(&self) { unsafe { for label in &*self.labels.get() { self.data_read .get_and(label, |channel| (*channel)[0].clear()); } } } } impl Default for Recorder { fn default() -> Self { Self::new() } }
use crate::*; use datastructures::HistogramConfig; use datastructures::RwWrapper; use std::collections::HashSet; use std::sync::Arc; use datastructures::Wrapper; use evmap::{ReadHandle, WriteHandle}; use std::collections::HashMap; #[derive(Clone)] pub struct Recorder { data_read: ReadHandle<String, Arc<Channel>>, data_write: Wrapper<WriteHandle<String, Arc<Channel>>>, labels: RwWrapper<HashSet<String>>, } impl Recorder { pub fn new() -> Self { let (read, write) = evmap::new(); Self { data_read: read, data_write: Wrapper::new(write), labels: RwWrapper::new(HashSet::new()), } } pub fn record(&self, channel: String, measurement: Measurement) { self.data_read .get_and(&channel, |channel| (*channel)[0].record(measurement)); } pub fn counter(&self, channel: String) -> usize { self.data_read .get_and(&channel, |channel| (*channel)[0].counter()) .unwrap_or(0) } pub fn percentile(&self, channel: String, percentile: f64) -> Option<usize> { self.data_read .get_and(&channel, |channel| (*channel)[0].percentile(pe
source: {:?}", name, source); let channel = Channel::new(name.clone(), source, histogram_config); if self .data_read .get_and(&name, |channel| channel.len()) .unwrap_or(0) == 0 { unsafe { (*self.data_write.get()).insert(name.clone(), Arc::new(channel)); (*self.data_write.get()).refresh(); (*self.labels.lock()).insert(name); } } } pub fn delete_channel(&self, name: String) { debug!("delete channel: {}", name); unsafe { (*self.data_write.get()).empty(name.clone()); (*self.data_write.get()).refresh(); (*self.labels.lock()).remove(&name); } } pub fn readings(&self) -> Vec<Reading> { let mut result = Vec::new(); unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].readings()); if let Some(readings) = readings { result.extend(readings); } } } result } pub fn hash_map(&self) -> HashMap<String, HashMap<Output, usize>> { let mut result = HashMap::new(); unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].hash_map()); if let Some(readings) = readings { result.insert(label.to_owned(), readings); } } } result } #[cfg(feature = "waterfall")] pub fn save_files(&self) { unsafe { for label in &*self.labels.get() { let readings = self .data_read .get_and(label, |channel| (*channel)[0].save_files()); } } } pub fn add_output(&self, name: String, output: Output) { self.data_read .get_and(&name, |channel| (*channel)[0].add_output(output)); } pub fn delete_output(&self, name: String, output: Output) { self.data_read .get_and(&name, |channel| (*channel)[0].delete_output(output)); } pub fn latch(&self) { unsafe { for label in &*self.labels.get() { self.data_read .get_and(label, |channel| (*channel)[0].latch()); } } } pub fn clear(&self) { unsafe { for label in &*self.labels.get() { self.data_read .get_and(label, |channel| (*channel)[0].clear()); } } } } impl Default for Recorder { fn default() -> Self { Self::new() } }
rcentile)) .unwrap_or(None) } pub fn add_channel( &self, name: String, source: Source, histogram_config: Option<HistogramConfig>, ) { debug!("add channel: {}
random
[ { "content": "pub fn runner(runtime: f64, source: Source, measurement_type: MeasurementType, label: String) {\n\n for single_channel in [true, false].iter() {\n\n for i in [1, 2, 4, 8, 16, 32, 64].iter() {\n\n timed_run(\n\n *i,\n\n runtime,\n\n source,\n\n measurement_type,\n\n *single_channel,\n\n format!(\"{} (threads: {})\", label, i),\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "metrics/examples/benchmarks.rs", "rank": 0, "score": 272716.10576157185 }, { "content": "pub fn runner(runtime: f64, structure: Structure, operation: Operation, label: String) {\n\n match operation {\n\n Operation::Increment => {\n\n for single_channel in [true, false].iter() {\n\n for i in [1, 2, 4, 8, 16, 32, 64].iter() {\n\n timed_run(\n\n *i,\n\n runtime,\n\n structure,\n\n operation,\n\n *single_channel,\n\n format!(\"{} (threads: {})\", label, i),\n\n );\n\n }\n\n }\n\n }\n\n Operation::Percentile => {\n\n for i in [1, 2, 4, 8, 16, 32, 64].iter() {\n\n timed_run(\n\n *i,\n", "file_path": "datastructures/examples/benchmarks.rs", "rank": 1, "score": 235790.4494071119 }, { "content": "pub trait Session: Read + Write {\n\n // implementation specific\n\n\n\n /// Return a reference to the `session::Common` struct\n\n fn common(&self) -> &Common;\n\n /// Return a mutable reference to the `session::Common` struct\n\n fn common_mut(&mut self) -> &mut Common;\n\n /// Return a reference to the `session::Stream` struct\n\n fn stream(&self) -> &Stream;\n\n /// Return a mutable reference to the `session::Stream` struct\n\n fn stream_mut(&mut self) -> &mut Stream;\n\n /// Handle any reads necessary for session management\n\n fn session_read(&mut self) -> Result<(), Error>;\n\n /// Handle flushing any writes necessary for session management\n\n fn session_flush(&mut self) -> Result<(), Error>;\n\n /// Used to check if the `Session` has completed negotiation\n\n fn is_handshaking(&self) -> bool;\n\n /// Used to clear the contents of the session buffer\n\n fn clear_buffer(&mut self);\n\n /// Reset the session state so it can be reconnected\n", "file_path": "rpc-perf/src/session/mod.rs", "rank": 2, "score": 169154.4027501688 }, { "content": "pub fn register_stats(recorder: &Simple) {\n\n recorder.add_counter_channel(Stat::CommandsGet);\n\n recorder.add_counter_channel(Stat::CommandsSet);\n\n recorder.add_distribution_channel(Stat::KeySize, 0, 60_000_000_000, 3);\n\n recorder.add_distribution_channel(Stat::ValueSize, 0, 60_000_000_000, 3);\n\n recorder.add_counter_channel(Stat::Window);\n\n recorder.add_counter_channel(Stat::RequestsEnqueued);\n\n recorder.add_counter_channel(Stat::RequestsDequeued);\n\n recorder.add_counter_channel(Stat::RequestsError);\n\n recorder.add_counter_channel(Stat::RequestsTimeout);\n\n recorder.add_counter_channel(Stat::ConnectionsTotal);\n\n recorder.add_histogram_channel(Stat::ConnectionsOpened, 0, 60_000_000_000, 3);\n\n recorder.add_counter_channel(Stat::ConnectionsClosed);\n\n recorder.add_counter_channel(Stat::ConnectionsError);\n\n recorder.add_counter_channel(Stat::ConnectionsClientClosed);\n\n recorder.add_counter_channel(Stat::ConnectionsServerClosed);\n\n recorder.add_counter_channel(Stat::ConnectionsTimeout);\n\n recorder.add_histogram_channel(Stat::ResponsesTotal, 0, 60_000_000_000, 3);\n\n recorder.add_counter_channel(Stat::ResponsesOk);\n\n recorder.add_counter_channel(Stat::ResponsesError);\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 3, "score": 142963.88157733303 }, { "content": "pub fn main() {\n\n println!(\"A simple demo of the logger\");\n\n\n\n logger::Logger::new()\n\n .label(\"demo\")\n\n .level(logger::Level::Trace)\n\n .init()\n\n .expect(\"Failed to initialize logger\");\n\n trace!(\"Some tracing message\");\n\n debug!(\"Some debugging message\");\n\n info!(\"Just some general info\");\n\n warn!(\"You might want to know this\");\n\n error!(\"You need to know this\");\n\n fatal!(\"Something really bad happened! Terminating program\");\n\n // code below would be unreachable\n\n}\n", "file_path": "logger/examples/demo.rs", "rank": 4, "score": 120827.01931542862 }, { "content": "pub fn main() {\n\n let runtime = 10.0;\n\n\n\n runner(\n\n runtime,\n\n Source::Counter,\n\n MeasurementType::Counter,\n\n \"Counter\".to_string(),\n\n );\n\n runner(\n\n runtime,\n\n Source::Distribution,\n\n MeasurementType::Distribution,\n\n \"Distribution\".to_string(),\n\n );\n\n runner(\n\n runtime,\n\n Source::Gauge,\n\n MeasurementType::Gauge,\n\n \"Gauge\".to_string(),\n", "file_path": "metrics/examples/benchmarks.rs", "rank": 5, "score": 120827.01931542862 }, { "content": "pub fn main() {\n\n let runtime = 2.0;\n\n\n\n runner(\n\n runtime,\n\n Structure::Counter,\n\n Operation::Increment,\n\n \"Counter Incr/s\".to_string(),\n\n );\n\n runner(\n\n runtime,\n\n Structure::FixedHistogram,\n\n Operation::Increment,\n\n \"Fixed Histogram Incr/s\".to_string(),\n\n );\n\n runner(\n\n runtime,\n\n Structure::FixedHistogram,\n\n Operation::Percentile,\n\n \"Fixed Histogram Percentile/s\".to_string(),\n", "file_path": "datastructures/examples/benchmarks.rs", "rank": 6, "score": 120827.01931542862 }, { "content": "pub fn sized_run(\n\n threads: usize,\n\n max: usize,\n\n source: Source,\n\n measurement_type: MeasurementType,\n\n single_channel: bool,\n\n) -> f64 {\n\n let recorder = Recorder::new();\n\n\n\n let mut thread_pool = Vec::new();\n\n let t0 = time::Instant::now();\n\n for tid in 0..threads {\n\n let recorder = recorder.clone();\n\n let label = if !single_channel {\n\n format!(\"test{}\", tid)\n\n } else {\n\n \"test\".to_string()\n\n };\n\n let histogram_config = HistogramConfig::new(0, 2_000_000_001, 3, None);\n\n recorder.add_channel(label.clone(), source, Some(histogram_config));\n", "file_path": "metrics/examples/benchmarks.rs", "rank": 7, "score": 119087.81263657441 }, { "content": "pub fn timed_run(\n\n threads: usize,\n\n runtime: f64,\n\n structure: Structure,\n\n operation: Operation,\n\n single_channel: bool,\n\n label: String,\n\n) {\n\n let max = 100_000;\n\n let duration = sized_run(threads, max, structure, operation, single_channel);\n\n let rate = max as f64 / duration;\n\n let max = (runtime * rate) as usize;\n\n let duration = sized_run(threads, max, structure, operation, single_channel);\n\n let rate = max as f64 / duration;\n\n println!(\n\n \"{} (contended: {}): {:.2e} ops\",\n\n label, single_channel, rate\n\n );\n\n}\n\n\n", "file_path": "datastructures/examples/benchmarks.rs", "rank": 8, "score": 119087.81263657441 }, { "content": "pub fn sized_run(\n\n threads: usize,\n\n max: usize,\n\n structure: Structure,\n\n operation: Operation,\n\n contended: bool,\n\n) -> f64 {\n\n let mut thread_pool = Vec::new();\n\n let t0 = time::Instant::now();\n\n match structure {\n\n Structure::Counter => {\n\n if contended {\n\n let counter = Counter::default();\n\n for _ in 0..threads {\n\n let counter = counter.clone();\n\n match operation {\n\n Operation::Increment => {\n\n thread_pool.push(thread::spawn(move || {\n\n for _ in 0..(max / threads) {\n\n counter.incr(1);\n", "file_path": "datastructures/examples/benchmarks.rs", "rank": 9, "score": 119087.81263657441 }, { "content": "pub fn main() {\n\n let config = config::Config::new();\n\n\n\n Logger::new()\n\n .label(\"rpc_perf\")\n\n .level(config.logging())\n\n .init()\n\n .expect(\"Failed to initialize logger\");\n\n\n\n let recorder = Simple::new(&config);\n\n stats::register_stats(&recorder);\n\n\n\n let mut stats_stdout = stats::StandardOut::new(&recorder, config.interval());\n\n\n\n let readings = Arc::new(Mutex::new(Vec::<Reading>::new()));\n\n if let Some(stats_listen) = config.listen() {\n\n let mut stats_http = stats::Http::new(stats_listen, &recorder);\n\n let _ = thread::Builder::new()\n\n .name(\"http\".to_string())\n\n .spawn(move || loop {\n", "file_path": "rpc-perf/src/main.rs", "rank": 10, "score": 119087.81263657441 }, { "content": "pub fn timed_run(\n\n threads: usize,\n\n runtime: f64,\n\n source: Source,\n\n measurement_type: MeasurementType,\n\n single_channel: bool,\n\n label: String,\n\n) {\n\n let max = 100_000;\n\n let duration = sized_run(threads, max, source, measurement_type, single_channel);\n\n let rate = max as f64 / duration;\n\n let max = (runtime * rate) as usize;\n\n let duration = sized_run(threads, max, source, measurement_type, single_channel);\n\n let rate = max as f64 / duration;\n\n println!(\n\n \"{} (single channel: {}): {:.2e} updates/s\",\n\n label, single_channel, rate\n\n );\n\n}\n\n\n", "file_path": "metrics/examples/benchmarks.rs", "rank": 11, "score": 119087.81263657441 }, { "content": "fn string_buffer(string: &str, size: f32) -> ImageBuffer<ColorRgb> {\n\n // load font\n\n let font_data = dejavu::sans_mono::regular();\n\n let collection = FontCollection::from_bytes(font_data as &[u8]).unwrap();\n\n let font = collection.into_font().unwrap();\n\n\n\n // size and scaling\n\n let height: f32 = size;\n\n let pixel_height = height.ceil() as usize;\n\n let scale = Scale {\n\n x: height * 1.0,\n\n y: height,\n\n };\n\n\n\n let v_metrics = font.v_metrics(scale);\n\n let offset = point(0.0, v_metrics.ascent);\n\n\n\n let glyphs: Vec<PositionedGlyph> = font.layout(string, scale, offset).collect();\n\n\n\n let width = glyphs\n", "file_path": "waterfall/src/lib.rs", "rank": 12, "score": 107624.8163409561 }, { "content": "pub fn save_waterfall<S: ::std::hash::BuildHasher>(\n\n heatmap: &Heatmap,\n\n file: &str,\n\n labels: HashMap<usize, String, S>,\n\n interval: usize,\n\n) {\n\n debug!(\"saving waterfall\");\n\n let height = heatmap.slices();\n\n let width = heatmap.buckets();\n\n\n\n // create image buffer\n\n let mut buffer = ImageBuffer::<ColorRgb>::new(width, height);\n\n\n\n let mut y = 0;\n\n let histogram = Latched::new(0, heatmap.highest_count(), 3);\n\n for slice in heatmap {\n\n for b in slice.histogram() {\n\n let magnitude = (b.count() as f64 / b.width() as f64).ceil() as usize;\n\n if magnitude > 0 {\n\n histogram.incr(magnitude, 1);\n", "file_path": "waterfall/src/lib.rs", "rank": 13, "score": 100846.2760660417 }, { "content": "pub trait TryWrite {\n\n fn try_write_buf<B: Buf>(&mut self, buf: &mut B) -> io::Result<Option<usize>>\n\n where\n\n Self: Sized,\n\n {\n\n let res = self.try_write(buf.bytes());\n\n\n\n if let Ok(Some(cnt)) = res {\n\n buf.advance(cnt);\n\n }\n\n\n\n res\n\n }\n\n\n\n fn try_write(&mut self, buf: &[u8]) -> io::Result<Option<usize>>;\n\n}\n\n\n\nimpl<T: Read> TryRead for T {\n\n fn try_read(&mut self, dst: &mut [u8]) -> io::Result<Option<usize>> {\n\n self.read(dst).map_non_block()\n\n }\n\n}\n\n\n\nimpl<T: Write> TryWrite for T {\n\n fn try_write(&mut self, src: &[u8]) -> io::Result<Option<usize>> {\n\n self.write(src).map_non_block()\n\n }\n\n}\n\n\n", "file_path": "buffer/src/lib.rs", "rank": 14, "score": 100197.47025030309 }, { "content": "pub trait TryRead {\n\n fn try_read_buf<B: BufMut>(&mut self, buf: &mut B) -> io::Result<Option<usize>>\n\n where\n\n Self: Sized,\n\n {\n\n // Reads the length of the slice supplied by buf.mut_bytes into the buffer\n\n // This is not guaranteed to consume an entire datagram or segment.\n\n // If your protocol is msg based (instead of continuous stream) you should\n\n // ensure that your buffer is large enough to hold an entire segment\n\n // (1532 bytes if not jumbo frames)\n\n let res = self.try_read(unsafe { buf.bytes_mut() });\n\n\n\n if let Ok(Some(cnt)) = res {\n\n unsafe {\n\n buf.advance_mut(cnt);\n\n }\n\n }\n\n\n\n res\n\n }\n\n\n\n fn try_read(&mut self, buf: &mut [u8]) -> io::Result<Option<usize>>;\n\n}\n\n\n", "file_path": "buffer/src/lib.rs", "rank": 15, "score": 100197.47025030309 }, { "content": "/// a helper function to parse a floating point argument by name from `ArgMatches`\n\nfn parse_float_arg(matches: &ArgMatches, key: &str) -> Option<f64> {\n\n matches.value_of(key).map(|f| {\n\n f.parse().unwrap_or_else(|_| {\n\n println!(\"ERROR: could not parse {}\", key);\n\n process::exit(1);\n\n })\n\n })\n\n}\n", "file_path": "rpc-perf/src/config/mod.rs", "rank": 16, "score": 95279.9945109648 }, { "content": "/// maps a value to a color based on a low point, mid point, and high point\n\n/// values below low will clip to black\n\n/// mid point is the transition between luminosity (black-blue) and hue (blue->red) ramps\n\n/// values above high will clip to red\n\nfn color_from_value(value: usize, low: usize, mid: usize, high: usize) -> ColorRgb {\n\n let hsl = if value < low {\n\n HSL {\n\n h: 250.0,\n\n s: 1.0,\n\n l: 0.0,\n\n }\n\n } else if value < mid {\n\n HSL {\n\n h: 250.0,\n\n s: 1.0,\n\n l: (value as f64 / mid as f64) * 0.5,\n\n }\n\n } else if value < high {\n\n HSL {\n\n h: 250.0 - (250.0 * (value - mid) as f64 / high as f64),\n\n s: 1.0,\n\n l: 0.5,\n\n }\n\n } else {\n", "file_path": "waterfall/src/lib.rs", "rank": 17, "score": 94791.50596402731 }, { "content": "fn default_clients() -> usize {\n\n 1\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 18, "score": 94465.12632543241 }, { "content": "fn default_interval() -> usize {\n\n 60\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 19, "score": 94465.12632543241 }, { "content": "fn default_poolsize() -> usize {\n\n 1\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 20, "score": 94465.12632543241 }, { "content": "fn default_request_timeout() -> usize {\n\n 200 * MILLISECOND / MICROSECOND\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 21, "score": 92932.02458213636 }, { "content": "fn default_connect_timeout() -> usize {\n\n 200 * MILLISECOND / MICROSECOND\n\n}\n\n\n\n#[derive(Copy, Clone, Deserialize, Debug)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum Protocol {\n\n Memcache,\n\n Ping,\n\n Echo,\n\n RedisResp,\n\n RedisInline,\n\n}\n\nimpl Default for Protocol {\n\n fn default() -> Protocol {\n\n Protocol::Memcache\n\n }\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 22, "score": 92932.02458213636 }, { "content": "fn delta_count<T: ToString>(\n\n a: &HashMap<String, HashMap<Output, usize>>,\n\n b: &HashMap<String, HashMap<Output, usize>>,\n\n label: T,\n\n) -> Option<usize> {\n\n let output = Output::Counter;\n\n let label = label.to_string();\n\n if let Some(a_outputs) = a.get(&label) {\n\n let a_value = a_outputs.get(&output).unwrap_or(&0);\n\n if let Some(b_outputs) = b.get(&label) {\n\n let b_value = b_outputs.get(&output).unwrap_or(&0);\n\n\n\n Some(b_value - a_value)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 23, "score": 91440.78709927273 }, { "content": "fn delta_percent<T: ToString>(\n\n a: &HashMap<String, HashMap<Output, usize>>,\n\n b: &HashMap<String, HashMap<Output, usize>>,\n\n label_a: T,\n\n label_b: T,\n\n) -> Option<f64> {\n\n let delta_a = delta_count(a, b, label_a);\n\n let delta_b = delta_count(a, b, label_b);\n\n\n\n if let Some(a) = delta_a {\n\n if let Some(b) = delta_b {\n\n if b == 0 {\n\n Some(100.0)\n\n } else {\n\n Some(100.0 * a as f64 / b as f64)\n\n }\n\n } else {\n\n Some(100.0)\n\n }\n\n } else {\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 24, "score": 91440.78709927273 }, { "content": "fn default_windows() -> Option<usize> {\n\n Some(5)\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 25, "score": 91379.69784573572 }, { "content": "fn do_warmup(config: &Config, recorder: &Simple) {\n\n if let Some(target) = config.warmup_hitrate() {\n\n info!(\"-----\");\n\n info!(\"Warming the cache...\");\n\n let control = Bool::new(true);\n\n launch_clients(&config, &recorder, control.clone());\n\n\n\n let mut warm = 0;\n\n\n\n loop {\n\n std::thread::sleep(std::time::Duration::new(config.interval() as u64, 0));\n\n recorder.increment(Stat::Window);\n\n\n\n let hit = recorder.counter(Stat::ResponsesHit) as f64;\n\n let miss = recorder.counter(Stat::ResponsesMiss) as f64;\n\n let hitrate = hit / (hit + miss);\n\n\n\n debug!(\"Hit-rate: {:.2}%\", hitrate * 100.0);\n\n if hitrate >= target {\n\n warm += 1;\n", "file_path": "rpc-perf/src/main.rs", "rank": 26, "score": 88908.09922668699 }, { "content": "fn launch_clients(config: &Config, recorder: &stats::Simple, control: Bool) {\n\n let request_ratelimiter = if let Some(limit) = config.request_ratelimit() {\n\n Some(Ratelimiter::new(config.clients(), 1, limit))\n\n } else {\n\n None\n\n };\n\n\n\n let connect_ratelimiter = if let Some(limit) = config.connect_ratelimit() {\n\n Some(Ratelimiter::new(config.clients(), 1, limit))\n\n } else {\n\n None\n\n };\n\n\n\n let close_rate = if let Some(rate) = config.close_rate() {\n\n Some(Ratelimiter::new(config.clients(), 1, rate))\n\n } else {\n\n None\n\n };\n\n\n\n for i in 0..config.clients() {\n", "file_path": "rpc-perf/src/main.rs", "rank": 27, "score": 80723.82538521755 }, { "content": "/// a helper function to parse a numeric argument by name from `ArgMatches`\n\nfn parse_numeric_arg(matches: &ArgMatches, key: &str) -> Option<usize> {\n\n matches.value_of(key).map(|f| {\n\n f.parse().unwrap_or_else(|_| {\n\n println!(\"ERROR: could not parse {}\", key);\n\n process::exit(1);\n\n })\n\n })\n\n}\n\n\n", "file_path": "rpc-perf/src/config/mod.rs", "rank": 28, "score": 79031.13024578732 }, { "content": "#[cfg(not(feature = \"tls\"))]\n\nfn make_client(id: usize, codec: Box<Codec>, _config: &Config) -> Box<Client> {\n\n Box::new(PlainClient::new(id, codec))\n\n}\n\n\n", "file_path": "rpc-perf/src/main.rs", "rank": 29, "score": 76958.12442303558 }, { "content": "#[cfg(feature = \"tls\")]\n\nfn make_client(id: usize, codec: Box<Codec>, config: &Config) -> Box<Client> {\n\n if config.tls_ca().is_some() && config.tls_key().is_some() && config.tls_cert().is_some() {\n\n let mut client = TLSClient::new(id, codec);\n\n if let Some(cafile) = config.tls_ca() {\n\n client.load_ca(&cafile);\n\n }\n\n\n\n if let Some(keyfile) = config.tls_key() {\n\n if let Some(certfile) = config.tls_cert() {\n\n client.load_key_and_cert(&keyfile, &certfile);\n\n }\n\n }\n\n Box::new(client)\n\n } else {\n\n Box::new(PlainClient::new(id, codec))\n\n }\n\n}\n\n\n", "file_path": "rpc-perf/src/main.rs", "rank": 30, "score": 76958.12442303558 }, { "content": "struct Sample {\n\n value: usize,\n\n count: usize,\n\n time: time::Instant,\n\n direction: Direction,\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a Moving {\n\n type Item = &'a Bucket;\n\n type IntoIter = Iter<'a>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.data.into_iter()\n\n }\n\n}\n\n\n\nimpl Moving {\n\n /// Create a new `MovingHistogram` with the given min, max, precision, and window\n\n pub fn new(min: usize, max: usize, precision: usize, window: time::Duration) -> Self {\n\n Self {\n", "file_path": "datastructures/src/histogram/moving.rs", "rank": 31, "score": 65565.55772700979 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\nstruct ColorRgb {\n\n pub r: u8,\n\n pub g: u8,\n\n pub b: u8,\n\n}\n\n\n", "file_path": "waterfall/src/lib.rs", "rank": 32, "score": 65565.55772700979 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\nstruct Command {\n\n action: Action,\n\n weight: usize,\n\n}\n\n\n\nimpl Command {\n\n pub fn action(&self) -> Action {\n\n self.action\n\n }\n\n\n\n pub fn weight(&self) -> usize {\n\n self.weight\n\n }\n\n}\n\n\n", "file_path": "rpc-perf/src/config/mod.rs", "rank": 33, "score": 64689.8004715869 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\nstruct Value {\n\n length: usize,\n\n weight: usize,\n\n}\n\n\n\nimpl Value {\n\n pub fn length(&self) -> usize {\n\n self.length\n\n }\n\n\n\n pub fn weight(&self) -> usize {\n\n self.weight\n\n }\n\n}\n\n\n\nimpl Config {\n\n /// parse command line options and return `Config`\n\n pub fn new() -> Config {\n\n let app = App::new(NAME)\n\n .version(VERSION)\n", "file_path": "rpc-perf/src/config/mod.rs", "rank": 34, "score": 64689.8004715869 }, { "content": "struct ImageBuffer<T> {\n\n buffer: Vec<Vec<T>>,\n\n height: usize,\n\n width: usize,\n\n}\n\n\n", "file_path": "waterfall/src/lib.rs", "rank": 35, "score": 62480.825524536354 }, { "content": "fn main() {\n\n Logger::new()\n\n .label(\"simulator\")\n\n .level(Level::Debug)\n\n .init()\n\n .expect(\"Failed to initialize logger\");\n\n\n\n info!(\"Welcome to the simulator!\");\n\n\n\n let histogram = FixedHistogram::new(0, 1_000_000, 2);\n\n let heatmap = HeatmapBuilder::new(0, 1_000_000, 2, 1_000_000, 5_000_000_000).build();\n\n\n\n let distribution = Normal::new(500.0, 250.0);\n\n\n\n let start = std::time::Instant::now();\n\n\n\n loop {\n\n let now = std::time::Instant::now();\n\n if now - start >= std::time::Duration::new(5, 0) {\n\n break;\n", "file_path": "waterfall/examples/simulator.rs", "rank": 36, "score": 61987.51953367567 }, { "content": "fn main() {\n\n let limiter = Ratelimiter::new(1, 1, 1);\n\n for i in 0..10 {\n\n limiter.wait();\n\n println!(\"T -{}\", 10 - i);\n\n }\n\n println!(\"Ignition\");\n\n}\n", "file_path": "ratelimiter/examples/blastoff.rs", "rank": 37, "score": 61987.51953367567 }, { "content": "/// A set of common functions for all `Histogram` types\n\npub trait Histogram {\n\n /// Clear all samples from histogram\n\n fn clear(&self);\n\n /// Return the number of samples seen with the nominal value\n\n fn count(&self, value: usize) -> usize;\n\n /// Decrement the number of samples for the nominal value by count\n\n fn decr(&self, value: usize, count: usize);\n\n /// Increment the number of samples for the nominal value by count\n\n fn incr(&self, value: usize, count: usize);\n\n /// Return the maximum value that can be stored\n\n fn max(&self) -> usize;\n\n /// Return the minimum value that can be stored\n\n fn min(&self) -> usize;\n\n /// Calculate the percentile (0.0-1.0)\n\n fn percentile(&self, percentile: f64) -> Option<usize>;\n\n /// Return the precision in significant figures\n\n fn precision(&self) -> usize;\n\n /// Return the number of samples that were below the minimum storable value\n\n fn too_low(&self) -> usize;\n\n /// Return the number of samples that were above the maximum storable value\n", "file_path": "datastructures/src/histogram/mod.rs", "rank": 38, "score": 60724.62288693116 }, { "content": "#[allow(dead_code)]\n\nfn spare() {\n\n println!();\n\n}\n", "file_path": "datastructures/ffi/cdatastructures/src/histogram.rs", "rank": 39, "score": 60213.07688415851 }, { "content": "pub trait Decoder: Send {\n\n fn decode(&self, buf: &[u8]) -> Result<Response, Error>;\n\n}\n", "file_path": "codec/src/lib.rs", "rank": 40, "score": 58997.48288856172 }, { "content": "pub trait Client: Send {\n\n // configuration\n\n fn add_endpoint(&mut self, server: &SocketAddr);\n\n fn set_connect_ratelimit(&mut self, ratelimiter: Option<Ratelimiter>) {\n\n self.common_mut().set_connect_ratelimit(ratelimiter)\n\n }\n\n fn set_connect_timeout(&mut self, microseconds: usize) {\n\n self.common_mut().set_connect_timeout(microseconds)\n\n }\n\n fn set_poolsize(&mut self, connections: usize) {\n\n self.common_mut().set_poolsize(connections);\n\n }\n\n fn poolsize(&self) -> usize {\n\n self.common().poolsize()\n\n }\n\n fn set_tcp_nodelay(&mut self, nodelay: bool) {\n\n self.common_mut().set_tcp_nodelay(nodelay);\n\n }\n\n fn tcp_nodelay(&self) -> bool {\n\n self.common().tcp_nodelay()\n", "file_path": "rpc-perf/src/client/mod.rs", "rank": 41, "score": 57363.003143889946 }, { "content": "pub trait Codec: Send {\n\n fn common(&self) -> &Common;\n\n fn common_mut(&mut self) -> &mut Common;\n\n fn decode(&self, buf: &[u8]) -> Result<Response, Error>;\n\n fn encode(&mut self, buf: &mut BytesMut, rng: &mut ThreadRng);\n\n\n\n fn generate(&self, rng: &mut ThreadRng) -> Command {\n\n self.common().generator.generate(rng)\n\n }\n\n fn set_generator(&mut self, generator: Generator) {\n\n self.common_mut().set_generator(generator);\n\n }\n\n fn set_recorder(&mut self, recorder: Simple) {\n\n self.common_mut().set_recorder(recorder);\n\n }\n\n}\n\n\n\npub struct Common {\n\n generator: Generator,\n\n recorder: Option<Simple>,\n", "file_path": "rpc-perf/src/codec/mod.rs", "rank": 42, "score": 57363.003143889946 }, { "content": "fn default_tcp_nodelay() -> bool {\n\n false\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 43, "score": 55972.972417428915 }, { "content": "fn default_logging_level() -> Level {\n\n Level::Info\n\n}\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 44, "score": 55972.972417428915 }, { "content": "\n\nimpl Channel {\n\n pub fn new(name: String, source: Source, histogram_config: Option<HistogramConfig>) -> Self {\n\n let histogram = if let Some(config) = histogram_config {\n\n Some(config.build())\n\n } else {\n\n None\n\n };\n\n Self {\n\n name: RwWrapper::new(name),\n\n source,\n\n counter: Counter::default(),\n\n histogram,\n\n last_write: Counter::default(),\n\n latched: true,\n\n max: Point::new(0, 0),\n\n min: Point::new(0, 0),\n\n outputs: RwWrapper::new(HashSet::new()),\n\n }\n\n }\n", "file_path": "metrics/src/channel.rs", "rank": 53, "score": 42026.60314784991 }, { "content": " }\n\n Measurement::Increment { value, time } => self.record_increment(value, time),\n\n Measurement::TimeInterval { start, stop } => self.record_time_interval(start, stop),\n\n }\n\n }\n\n\n\n // for Counter measurements:\n\n // counter tracks value\n\n // histogram tracks rate of change\n\n fn record_counter(&self, value: usize, time: usize) {\n\n if self.source == Source::Counter {\n\n let previous = self.counter.get();\n\n if previous > 0 {\n\n // calculate the difference between consecutive readings and the rate\n\n let delta_value = value - previous;\n\n let delta_time = time - self.last_write.get();\n\n let rate = (delta_value as f64 * (1_000_000_000.0 / delta_time as f64)) as usize;\n\n trace!(\n\n \"delta value: {} time: {} rate: {}\",\n\n delta_value,\n", "file_path": "metrics/src/channel.rs", "rank": 54, "score": 42024.30613863223 }, { "content": " if let Some(value) = self.percentile(percentile.as_f64()) {\n\n result.push(Reading::new(self.name(), output.clone(), value));\n\n }\n\n }\n\n }\n\n }\n\n }\n\n result\n\n }\n\n\n\n pub fn hash_map(&self) -> HashMap<Output, usize> {\n\n let mut result = HashMap::new();\n\n unsafe {\n\n for output in (*self.outputs.lock()).iter() {\n\n trace!(\"generate reading for: {} {:?}\", self.name(), *output);\n\n match output {\n\n Output::Counter => {\n\n result.insert(output.clone(), self.counter());\n\n }\n\n Output::MaxPointTime => {\n", "file_path": "metrics/src/channel.rs", "rank": 55, "score": 42023.46869806838 }, { "content": "// #[derive(Clone)]\n\npub struct Channel {\n\n name: RwWrapper<String>,\n\n source: Source,\n\n counter: Counter,\n\n histogram: Option<Box<Histogram>>,\n\n last_write: Counter,\n\n latched: bool,\n\n max: Point,\n\n min: Point,\n\n outputs: RwWrapper<HashSet<Output>>,\n\n}\n\n\n\nimpl PartialEq for Channel {\n\n fn eq(&self, other: &Channel) -> bool {\n\n self.name() == other.name()\n\n }\n\n}\n\n\n\nimpl Eq for Channel {}\n", "file_path": "metrics/src/channel.rs", "rank": 56, "score": 42020.44662706748 }, { "content": "\n\n pub fn name(&self) -> String {\n\n unsafe { (*self.name.get()).clone() }\n\n }\n\n\n\n pub fn source(&self) -> Source {\n\n self.source\n\n }\n\n\n\n pub fn record(&self, measurement: Measurement) {\n\n trace!(\"record: {} {:?}\", self.name(), measurement);\n\n match measurement {\n\n Measurement::Counter { value, time } => {\n\n self.record_counter(value, time);\n\n }\n\n Measurement::Distribution { value, count, time } => {\n\n self.record_distribution(value, count, time);\n\n }\n\n Measurement::Gauge { value, time } => {\n\n self.record_gauge(value, time);\n", "file_path": "metrics/src/channel.rs", "rank": 57, "score": 42019.29978346249 }, { "content": " pub fn readings(&self) -> Vec<Reading> {\n\n let mut result = Vec::new();\n\n unsafe {\n\n for output in (*self.outputs.lock()).iter() {\n\n trace!(\"generate reading for: {} {:?}\", self.name(), *output);\n\n match output {\n\n Output::Counter => {\n\n result.push(Reading::new(self.name(), output.clone(), self.counter()));\n\n }\n\n Output::MaxPointTime => {\n\n if self.max.time() > 0 {\n\n result.push(Reading::new(self.name(), output.clone(), self.max.time()));\n\n }\n\n }\n\n Output::MinPointTime => {\n\n if self.max.time() > 0 {\n\n result.push(Reading::new(self.name(), output.clone(), self.min.time()));\n\n }\n\n }\n\n Output::Percentile(percentile) => {\n", "file_path": "metrics/src/channel.rs", "rank": 58, "score": 42017.65188304258 }, { "content": " } else {\n\n self.max.set(duration, start);\n\n }\n\n // track point of smallest interval\n\n if self.min.time() > 0 {\n\n if duration < self.min.value() {\n\n self.min.set(duration, start);\n\n }\n\n } else {\n\n self.min.set(duration, start);\n\n }\n\n }\n\n }\n\n\n\n pub fn counter(&self) -> usize {\n\n self.counter.get()\n\n }\n\n\n\n pub fn percentile(&self, percentile: f64) -> Option<usize> {\n\n if let Some(ref histogram) = self.histogram {\n", "file_path": "metrics/src/channel.rs", "rank": 59, "score": 42017.58746551388 }, { "content": "use std::collections::HashSet;\n\n\n\n#[derive(Debug)]\n\npub enum Measurement {\n\n // taken from a counter eg: number of requests\n\n Counter {\n\n value: usize,\n\n time: usize,\n\n },\n\n // taken from a distribution eg: an external histogram\n\n Distribution {\n\n value: usize,\n\n count: usize,\n\n time: usize,\n\n },\n\n // taken from a gauge eg: bytes of memory used\n\n Gauge {\n\n value: usize,\n\n time: usize,\n\n },\n", "file_path": "metrics/src/channel.rs", "rank": 60, "score": 42017.14577826782 }, { "content": " }\n\n }\n\n\n\n // for Gauge measurements:\n\n // counter tracks latest reading\n\n // histogram tracks readings\n\n // max tracks largest reading\n\n // min tracks smallest reading\n\n fn record_gauge(&self, value: usize, time: usize) {\n\n if self.source == Source::Gauge {\n\n self.counter.set(value);\n\n if let Some(ref histogram) = self.histogram {\n\n histogram.incr(value, 1);\n\n }\n\n // track the point of max gauge reading\n\n if self.max.time() > 0 {\n\n if value > self.max.value() {\n\n self.max.set(value, time);\n\n }\n\n } else {\n", "file_path": "metrics/src/channel.rs", "rank": 61, "score": 42016.85147829711 }, { "content": " } else {\n\n self.min.set(rate, time);\n\n }\n\n } else {\n\n self.counter.set(value);\n\n }\n\n self.last_write.set(time);\n\n }\n\n }\n\n\n\n // for Distribution measurements:\n\n // counter tracks sum of all counts\n\n // histogram tracks values\n\n fn record_distribution(&self, value: usize, count: usize, time: usize) {\n\n if self.source == Source::Distribution {\n\n self.counter.incr(count);\n\n if let Some(ref histogram) = self.histogram {\n\n histogram.incr(value, count);\n\n }\n\n self.last_write.set(time);\n", "file_path": "metrics/src/channel.rs", "rank": 62, "score": 42016.60694771623 }, { "content": " self.max.set(value, time);\n\n }\n\n // track the point of min rate\n\n if self.min.time() > 0 {\n\n if value < self.min.value() {\n\n self.min.set(value, time);\n\n }\n\n } else {\n\n self.min.set(value, time);\n\n }\n\n self.last_write.set(time);\n\n }\n\n }\n\n\n\n // for Increment measurements:\n\n // counter tracks sum of all increments\n\n // histogram tracks magnitude of increments\n\n fn record_increment(&self, value: usize, time: usize) {\n\n if self.source == Source::Counter {\n\n self.counter.incr(value);\n", "file_path": "metrics/src/channel.rs", "rank": 63, "score": 42015.15561997525 }, { "content": " if let Some(ref histogram) = self.histogram {\n\n histogram.incr(value, 1);\n\n }\n\n self.last_write.set(time);\n\n }\n\n }\n\n\n\n // for TimeInterval measurements, we increment the histogram with duration of event\n\n fn record_time_interval(&self, start: usize, stop: usize) {\n\n if self.source == Source::TimeInterval {\n\n self.counter.incr(1);\n\n let duration = stop - start;\n\n if let Some(ref histogram) = self.histogram {\n\n histogram.incr(duration, 1);\n\n }\n\n // track point of largest interval\n\n if self.max.time() > 0 {\n\n if duration > self.max.value() {\n\n self.max.set(duration, start);\n\n }\n", "file_path": "metrics/src/channel.rs", "rank": 64, "score": 42014.81572775398 }, { "content": "// Copyright 2019 Twitter, Inc\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::*;\n\nuse datastructures::HistogramConfig;\n\n\n\nuse datastructures::{Counter, Histogram, RwWrapper};\n\n\n\nuse std::collections::HashMap;\n", "file_path": "metrics/src/channel.rs", "rank": 65, "score": 42011.79065979565 }, { "content": " if self.max.time() > 0 {\n\n result.insert(output.clone(), self.max.time());\n\n }\n\n }\n\n Output::MinPointTime => {\n\n if self.max.time() > 0 {\n\n result.insert(output.clone(), self.min.time());\n\n }\n\n }\n\n Output::Percentile(percentile) => {\n\n if let Some(value) = self.percentile(percentile.as_f64()) {\n\n result.insert(output.clone(), value);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n result\n\n }\n\n}\n", "file_path": "metrics/src/channel.rs", "rank": 66, "score": 42009.63312590688 }, { "content": " histogram.percentile(percentile)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn add_output(&self, output: Output) {\n\n trace!(\"add output: {} {:?}\", self.name(), output);\n\n unsafe {\n\n (*self.outputs.lock()).insert(output);\n\n }\n\n }\n\n\n\n pub fn delete_output(&self, output: Output) {\n\n trace!(\"delete output: {} {:?}\", self.name(), output);\n\n unsafe {\n\n (*self.outputs.lock()).remove(&output);\n\n }\n\n }\n\n\n", "file_path": "metrics/src/channel.rs", "rank": 67, "score": 42009.538279382876 }, { "content": " // incremental count to sum into a counter\n\n Increment {\n\n value: usize,\n\n time: usize,\n\n },\n\n // the start and stop of an event\n\n TimeInterval {\n\n start: usize,\n\n stop: usize,\n\n },\n\n}\n\n\n\n#[derive(PartialEq, Debug, Copy, Clone)]\n\npub enum Source {\n\n Counter,\n\n Distribution,\n\n Gauge,\n\n TimeInterval,\n\n}\n\n\n", "file_path": "metrics/src/channel.rs", "rank": 68, "score": 42009.18458045675 }, { "content": " pub fn latch(&self) {\n\n if self.latched {\n\n if let Some(ref histogram) = self.histogram {\n\n histogram.clear();\n\n }\n\n }\n\n self.max.set(0, 0);\n\n self.min.set(0, 0);\n\n }\n\n\n\n pub fn clear(&self) {\n\n self.last_write.set(0);\n\n self.counter.set(0);\n\n if let Some(ref histogram) = self.histogram {\n\n histogram.clear();\n\n }\n\n self.max.set(0, 0);\n\n self.min.set(0, 0);\n\n }\n\n\n", "file_path": "metrics/src/channel.rs", "rank": 69, "score": 42008.79414463062 }, { "content": " delta_time,\n\n rate\n\n );\n\n self.counter.incr(delta_value);\n\n if let Some(ref histogram) = self.histogram {\n\n histogram.incr(rate, 1);\n\n }\n\n // track the point of max rate\n\n if self.max.time() > 0 {\n\n if rate > self.max.value() {\n\n self.max.set(rate, time);\n\n }\n\n } else {\n\n self.max.set(rate, time);\n\n }\n\n // track the point of min rate\n\n if self.min.time() > 0 {\n\n if rate < self.min.value() {\n\n self.min.set(rate, time);\n\n }\n", "file_path": "metrics/src/channel.rs", "rank": 70, "score": 42000.65641614366 }, { "content": "extern uintptr_t histogram_percentile(histogram_t *, float);\n", "file_path": "datastructures/ffi/cdatastructures/src/histogram.h", "rank": 71, "score": 36330.03471767803 }, { "content": "extern counter_t * counter_new(void);\n", "file_path": "datastructures/ffi/cdatastructures/src/counter.h", "rank": 72, "score": 36316.74836600414 }, { "content": "extern histogram_t * histogram_new(uintptr_t, uintptr_t, uintptr_t);\n", "file_path": "datastructures/ffi/cdatastructures/src/histogram.h", "rank": 73, "score": 36316.74836600414 }, { "content": " recorder.add_counter_channel(Stat::ResponsesHit);\n\n recorder.add_counter_channel(Stat::ResponsesMiss);\n\n}\n\n\n\npub struct StandardOut<'a> {\n\n previous: HashMap<String, HashMap<Output, usize>>,\n\n recorder: &'a Simple,\n\n interval: usize,\n\n}\n\n\n\nimpl<'a> StandardOut<'a> {\n\n pub fn new(recorder: &'a Simple, interval: usize) -> Self {\n\n Self {\n\n previous: recorder.hash_map(),\n\n recorder,\n\n interval,\n\n }\n\n }\n\n\n\n fn display_percentiles(&self, stat: Stat, label: &str, divisor: usize, unit: &str) {\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 74, "score": 39.096368108747924 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\npub enum Output {\n\n Counter,\n\n MaxPointTime,\n\n MinPointTime,\n\n Percentile(Percentile),\n\n}\n\n\n\npub struct Reading {\n\n label: String,\n\n output: Output,\n\n value: usize,\n\n}\n\n\n\nimpl Reading {\n\n pub fn new(label: String, output: Output, value: usize) -> Self {\n\n Self {\n\n label,\n", "file_path": "metrics/src/lib.rs", "rank": 75, "score": 38.780590754507585 }, { "content": "//!\n\n//! ## Reading\n\n//! A `Reading` represents the value of a metric at a point in time. The\n\n//! `Reading` stores information about the `Channel` label, the `Output` it\n\n//! corresponds to, and the value.\n\n\n\nmod channel;\n\nmod point;\n\nmod recorder;\n\n\n\npub use crate::channel::{Channel, Measurement, Source};\n\npub use crate::point::Point;\n\npub use crate::recorder::Recorder;\n\npub use datastructures::HistogramConfig;\n\npub(crate) use logger::*;\n\n\n\nuse std::fmt;\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\n#[allow(non_camel_case_types)]\n", "file_path": "metrics/src/lib.rs", "rank": 76, "score": 35.40320952799711 }, { "content": " heatmap.incr(start, stop - start, 1);\n\n }\n\n }\n\n\n\n pub fn distribution<T: ToString>(&self, label: T, value: usize) {\n\n self.inner.record(\n\n label.to_string(),\n\n Measurement::Distribution {\n\n time: time::precise_time_ns() as usize,\n\n value,\n\n count: 1,\n\n },\n\n );\n\n }\n\n\n\n pub fn percentile<T: ToString>(&self, label: T, percentile: f64) -> Option<usize> {\n\n self.inner.percentile(label.to_string(), percentile)\n\n }\n\n\n\n pub fn latch(&self) {\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 77, "score": 35.029366739903104 }, { "content": " .add_output(label.to_string(), Output::Percentile(Percentile::p75));\n\n self.inner\n\n .add_output(label.to_string(), Output::Percentile(Percentile::p90));\n\n self.inner\n\n .add_output(label.to_string(), Output::Percentile(Percentile::p99));\n\n self.inner\n\n .add_output(label.to_string(), Output::Percentile(Percentile::p999));\n\n self.inner\n\n .add_output(label.to_string(), Output::Percentile(Percentile::p9999));\n\n }\n\n\n\n pub fn add_distribution_channel<T: ToString>(\n\n &self,\n\n label: T,\n\n min: usize,\n\n max: usize,\n\n precision: usize,\n\n ) {\n\n let histogram_config = HistogramConfig::new(min, max, precision, None);\n\n self.inner.add_channel(\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 78, "score": 30.070818986740957 }, { "content": " self.inner.add_output(label.to_string(), Output::Counter);\n\n }\n\n\n\n pub fn add_histogram_channel<T: ToString>(\n\n &self,\n\n label: T,\n\n min: usize,\n\n max: usize,\n\n precision: usize,\n\n ) {\n\n let histogram_config = HistogramConfig::new(min, max, precision, None);\n\n self.inner.add_channel(\n\n label.to_string(),\n\n Source::TimeInterval,\n\n Some(histogram_config),\n\n );\n\n self.inner.add_output(label.to_string(), Output::Counter);\n\n self.inner\n\n .add_output(label.to_string(), Output::Percentile(Percentile::p50));\n\n self.inner\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 79, "score": 29.459141919717425 }, { "content": " use super::*;\n\n\n\n fn approx_eq(a: usize, b: usize, precision: usize) -> bool {\n\n let power = 10_u32.pow(precision as u32) as f64;\n\n let a = (a as f64).log(power) as usize;\n\n let b = (b as f64).log(power) as usize;\n\n if a >= (b - 1) && a <= (b + 1) {\n\n println!(\"{} ~= {}\", a, b);\n\n true\n\n } else {\n\n println!(\"{} !~= {}\", a, b);\n\n false\n\n }\n\n }\n\n\n\n #[test]\n\n fn counter_channel() {\n\n let recorder = Recorder::new();\n\n let name = \"test\".to_string();\n\n let histogram_config = HistogramConfig::new(0, 2_000_000_001, 3, None);\n", "file_path": "metrics/src/lib.rs", "rank": 80, "score": 29.228423902528835 }, { "content": " self.inner.counter(label.to_string())\n\n }\n\n\n\n pub fn increment<T: ToString>(&self, label: T) {\n\n self.inner.record(\n\n label.to_string(),\n\n Measurement::Increment {\n\n time: time::precise_time_ns() as usize,\n\n value: 1,\n\n },\n\n )\n\n }\n\n\n\n pub fn time_interval<T: ToString>(&self, label: T, start: usize, stop: usize) {\n\n self.inner\n\n .record(label.to_string(), Measurement::TimeInterval { start, stop });\n\n }\n\n\n\n pub fn heatmap_increment(&self, start: usize, stop: usize) {\n\n if let Some(ref heatmap) = self.heatmap {\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 81, "score": 28.35012279719817 }, { "content": " windows * config.interval() * SECOND,\n\n )\n\n .build(),\n\n )\n\n } else {\n\n warn!(\"Unable to initialize waterfall output without fixed duration\");\n\n None\n\n }\n\n } else {\n\n None\n\n };\n\n Self {\n\n inner: Recorder::new(),\n\n heatmap,\n\n }\n\n }\n\n\n\n pub fn add_counter_channel<T: ToString>(&self, label: T) {\n\n self.inner\n\n .add_channel(label.to_string(), Source::Counter, None);\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 82, "score": 27.943869423906143 }, { "content": " self.inner.latch();\n\n }\n\n\n\n pub fn hash_map(&self) -> HashMap<String, HashMap<Output, usize>> {\n\n self.inner.hash_map()\n\n }\n\n\n\n pub fn clear(&self) {\n\n self.inner.clear();\n\n }\n\n\n\n pub fn readings(&self) -> Vec<Reading> {\n\n self.inner.readings()\n\n }\n\n\n\n pub fn save_waterfall(&self, file: String) {\n\n if let Some(ref heatmap) = self.heatmap {\n\n let mut labels = HashMap::new();\n\n labels.insert(100, \"100ns\".to_string());\n\n labels.insert(200, \"200ns\".to_string());\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 83, "score": 26.029725839786433 }, { "content": "//! * rich telemetry derived from simple measurements\n\n//!\n\n//! # Overview\n\n//!\n\n//! ## Recorder\n\n//! This library is based on having a `Recorder` which stores the shared\n\n//! state representing all measurements. The `Recorder` can have multiple\n\n//! `Channel`s which each track a specific source of measusurements.\n\n//!\n\n//! ## Channel\n\n//! A `Channel` aggregates data from a specific source. A `Channel` can be\n\n//! configured to track measurements taken from counters, distributions,\n\n//! gauges, or time-intervals. The `Channel` allows for registering interest\n\n//! in one or more `Output`s which are used to produce `Reading`s.\n\n//!\n\n//! ## Output\n\n//! An `Output` is registered with a `Channel` to signal that a type of\n\n//! `Reading` should be produced from the measurements recorded into that\n\n//! `Channel`. Outputs can be counter readings, percentiles, or the time\n\n//! offset of the min or max measurement.\n", "file_path": "metrics/src/lib.rs", "rank": 84, "score": 25.947321540514057 }, { "content": "\n\nuse std::fmt::Display;\n\nuse std::io::Error;\n\nuse std::io::ErrorKind;\n\nuse std::io::Read;\n\nuse std::io::Write;\n\nuse std::net::SocketAddr;\n\nuse std::net::ToSocketAddrs;\n\n\n\n/// Holds the `Stream`'s address and underlying stream\n\npub struct Stream {\n\n address: SocketAddr,\n\n stream: Option<TcpStream>,\n\n nodelay: bool,\n\n}\n\n\n\nimpl Stream {\n\n /// Create a new `Stream` which will be connected to the given address\n\n pub fn new<T: ToSocketAddrs + Display>(address: T) -> Self {\n\n let address = address\n", "file_path": "rpc-perf/src/session/stream.rs", "rank": 85, "score": 25.27170330694858 }, { "content": " }\n\n assert_eq!(recorder.counter(\"test\".to_string()), 100);\n\n assert_eq!(recorder.percentile(\"test\".to_string(), 0.0), Some(1));\n\n assert_eq!(recorder.percentile(\"test\".to_string(), 0.50), Some(50));\n\n assert_eq!(recorder.percentile(\"test\".to_string(), 0.90), Some(90));\n\n assert_eq!(recorder.percentile(\"test\".to_string(), 0.95), Some(95));\n\n assert_eq!(recorder.percentile(\"test\".to_string(), 0.99), Some(99));\n\n assert_eq!(recorder.percentile(\"test\".to_string(), 0.999), Some(100));\n\n assert_eq!(recorder.percentile(\"test\".to_string(), 1.00), Some(100));\n\n }\n\n\n\n #[test]\n\n fn gauge_channel() {\n\n let recorder = Recorder::new();\n\n let name = \"test\".to_string();\n\n let histogram_config = HistogramConfig::new(1, 100, 3, None);\n\n recorder.add_channel(name.clone(), Source::Gauge, Some(histogram_config));\n\n // let channel = Channel::latched(\"test\".to_string(), Source::Gauge, 1, 100, 3);\n\n // recorder.add_channel(channel);\n\n // let channel = recorder.get_channel(\"test\".to_string()).unwrap();\n", "file_path": "metrics/src/lib.rs", "rank": 86, "score": 25.02601577114553 }, { "content": " pub fn human(&self) -> String {\n\n let mut data = Vec::new();\n\n for reading in &self.snapshot {\n\n let label = reading.label();\n\n let output = reading.output();\n\n let value = reading.value();\n\n match output {\n\n Output::Counter => {\n\n data.push(format!(\"{}/count: {}\", label, value));\n\n }\n\n Output::Percentile(percentile) => match percentile {\n\n Percentile::Minimum => {\n\n data.push(format!(\"{}/minimum/value: {}\", label, value));\n\n }\n\n Percentile::Maximum => {\n\n data.push(format!(\"{}/maximum/value: {}\", label, value));\n\n }\n\n _ => {\n\n data.push(format!(\"{}/histogram/{}: {}\", label, percentile, value));\n\n }\n", "file_path": "rpc-perf/src/stats/http.rs", "rank": 87, "score": 24.85145888250895 }, { "content": "use std::{\n\n fmt::Display, io::Error, io::ErrorKind, io::Read, io::Write, net::ToSocketAddrs, sync::Arc,\n\n};\n\n\n\npub struct TLSSession {\n\n common: Common,\n\n config: ClientConfig,\n\n stream: Stream,\n\n session: ClientSession,\n\n buffer: Buffer,\n\n}\n\n\n\nimpl TLSSession {\n\n /// Create a new `TLSSession` which uses the configured `ClientSession` for TLS\n\n pub fn new<T: ToSocketAddrs + Display>(address: T, config: ClientConfig) -> Self {\n\n let hostname =\n\n webpki::DNSNameRef::try_from_ascii_str(\"localhost\").expect(\"invalid dns name\");\n\n let session = ClientSession::new(&Arc::new(config.clone()), hostname);\n\n Self {\n\n common: Common::new(),\n", "file_path": "rpc-perf/src/session/tls_session.rs", "rank": 88, "score": 24.71612432271445 }, { "content": "pub struct Http {\n\n recorder: Simple,\n\n server: Server,\n\n snapshot: Vec<Reading>,\n\n refreshed: u64,\n\n}\n\n\n\nimpl Http {\n\n pub fn new(address: SocketAddr, recorder: &Simple) -> Self {\n\n let server = tiny_http::Server::http(address);\n\n if server.is_err() {\n\n fatal!(\"Failed to open {} for HTTP Stats listener\", address);\n\n }\n\n Self {\n\n recorder: recorder.clone(),\n\n server: server.unwrap(),\n\n snapshot: Vec::new(),\n\n refreshed: 0,\n\n }\n\n }\n", "file_path": "rpc-perf/src/stats/http.rs", "rank": 89, "score": 24.555766227820605 }, { "content": "}\n\n\n\nimpl Common {\n\n pub fn new() -> Self {\n\n Self {\n\n generator: Config::default().generator(),\n\n recorder: None,\n\n }\n\n }\n\n\n\n pub fn set_generator(&mut self, generator: Generator) {\n\n self.generator = generator;\n\n }\n\n\n\n pub fn set_recorder(&mut self, recorder: Simple) {\n\n self.recorder = Some(recorder);\n\n }\n\n\n\n pub fn recorder(&self) -> &Option<Simple> {\n\n &self.recorder\n\n }\n\n}\n\n\n\nimpl Default for Common {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n", "file_path": "rpc-perf/src/codec/mod.rs", "rank": 90, "score": 24.12940317747762 }, { "content": " assert!(approx_eq(\n\n recorder.percentile(\"test\".to_string(), 0.5).unwrap(),\n\n 1_000_000_000,\n\n 3\n\n ));\n\n assert!(approx_eq(\n\n recorder.percentile(\"test\".to_string(), 1.0).unwrap(),\n\n 2_000_000_000,\n\n 3\n\n ));\n\n }\n\n\n\n #[test]\n\n fn distribution_channel() {\n\n let recorder = Recorder::new();\n\n let name = \"test\".to_string();\n\n let histogram_config = HistogramConfig::new(1, 101, 3, None);\n\n recorder.add_channel(name.clone(), Source::Distribution, Some(histogram_config));\n\n // let channel = Channel::latched(\"test\".to_string(), Source::Distribution, 1, 101, 3);\n\n // recorder.add_channel(channel);\n", "file_path": "metrics/src/lib.rs", "rank": 91, "score": 24.11498970067639 }, { "content": " pub fn new(value: T) -> Self {\n\n Self {\n\n value: Arc::new(RwLock::new(Wrapper::new(value))),\n\n }\n\n }\n\n\n\n /// Get a mutable pointer to the inner value without taking a lock. This\n\n /// should only be used for thread-safe actions on atomics. This will block\n\n /// if there is a locked write\n\n pub fn get(&self) -> *mut T {\n\n self.value.read().get()\n\n }\n\n\n\n /// Get a mutable pointer to the inner value by taking a lock. This can\n\n /// be used when a non-thread-safe action must be taken on the inner type,\n\n /// such as resizing the inner datastructure. Taking a lock will cause all\n\n /// `get()` to block until the lock is released\n\n pub fn lock(&self) -> *mut T {\n\n self.value.write().get()\n\n }\n", "file_path": "datastructures/src/wrapper.rs", "rank": 92, "score": 23.726665767344514 }, { "content": "pub struct Counter {\n\n inner: Wrapper<AtomicUsize>,\n\n}\n\n\n\nimpl Default for Counter {\n\n fn default() -> Self {\n\n Counter::new(0)\n\n }\n\n}\n\n\n\nimpl Counter {\n\n /// Create a new zeroed counter\n\n pub fn new(value: usize) -> Self {\n\n Self {\n\n inner: Wrapper::new(AtomicUsize::new(value)),\n\n }\n\n }\n\n\n\n /// Clear the counter by reseting the value to zero\n\n pub fn clear(&self) {\n", "file_path": "datastructures/src/counter.rs", "rank": 93, "score": 23.6999646272607 }, { "content": " recorder.add_channel(name.clone(), Source::Counter, Some(histogram_config));\n\n // let channel = Channel::latched(\"test\".to_string(), Source::Counter, 0, 2_000_000_001, 3);\n\n // recorder.add_channel(channel);\n\n // let channel = recorder.get_channel(\"test\".to_string()).unwrap();\n\n assert_eq!(recorder.counter(\"test\".to_string()), 0);\n\n recorder.record(\n\n \"test\".to_string(),\n\n Measurement::Counter { time: 0, value: 1 },\n\n );\n\n assert_eq!(recorder.counter(\"test\".to_string()), 1);\n\n recorder.record(\n\n \"test\".to_string(),\n\n Measurement::Counter { time: 1, value: 2 },\n\n );\n\n assert_eq!(recorder.counter(\"test\".to_string()), 2);\n\n assert!(approx_eq(\n\n recorder.percentile(\"test\".to_string(), 0.0).unwrap(),\n\n 0,\n\n 3\n\n ));\n", "file_path": "metrics/src/lib.rs", "rank": 94, "score": 23.406281552416722 }, { "content": " inner: Bucket,\n\n buckets: Vec<Bucket>,\n\n step: RwWrapper<f64>,\n\n}\n\n\n\nimpl OuterBucket {\n\n pub fn new(min: usize, max: usize, buckets: usize) -> Self {\n\n trace!(\"outer bucket: {} -> {} with {} buckets\", min, max, buckets);\n\n let inner = Bucket::new(min, max);\n\n let count = buckets;\n\n let mut buckets = Vec::with_capacity(count);\n\n let range = max - min;\n\n let step = range as f64 / count as f64;\n\n\n\n for i in 0..count {\n\n let bucket_min = (i as f64 * step) as usize + min;\n\n let bucket_max = ((i + 1) as f64 * step) as usize + min;\n\n buckets.push(Bucket::new(bucket_min, bucket_max));\n\n }\n\n\n", "file_path": "datastructures/src/histogram/bucket.rs", "rank": 95, "score": 23.27651151195745 }, { "content": " #[test]\n\n fn time_interval_channel() {\n\n let recorder = Recorder::new();\n\n let name = \"test\".to_string();\n\n let histogram_config = HistogramConfig::new(1, 100, 3, None);\n\n recorder.add_channel(name.clone(), Source::TimeInterval, Some(histogram_config));\n\n assert_eq!(recorder.counter(\"test\".to_string()), 0);\n\n recorder.record(\n\n \"test\".to_string(),\n\n Measurement::TimeInterval { start: 0, stop: 1 },\n\n );\n\n assert_eq!(recorder.counter(\"test\".to_string()), 1);\n\n for i in 1..100 {\n\n recorder.record(\n\n \"test\".to_string(),\n\n Measurement::TimeInterval {\n\n start: i,\n\n stop: i + 1,\n\n },\n\n );\n", "file_path": "metrics/src/lib.rs", "rank": 96, "score": 23.25189860219828 }, { "content": "\n\nuse logger::*;\n\nuse mio::Token;\n\nuse slab::Slab;\n\n\n\nuse std::net::SocketAddr;\n\n\n\n/// A structure which represents a `Client` which uses plain `Session`s\n\npub struct PlainClient {\n\n common: Common,\n\n sessions: Slab<PlainSession>,\n\n}\n\n\n\nimpl PlainClient {\n\n /// Create a new `PlainClient` which will send requests from the queue and parse the responses\n\n pub fn new(id: usize, codec: Box<Codec>) -> PlainClient {\n\n Self {\n\n common: Common::new(id, codec),\n\n sessions: Slab::new(),\n\n }\n", "file_path": "rpc-perf/src/client/plain_client.rs", "rank": 97, "score": 23.220597439434794 }, { "content": " as i64,\n\n ),\n\n histogram: self.inner.slices[index].clone(),\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a Heatmap {\n\n type Item = Slice;\n\n type IntoIter = Iter<'a>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n Iter::new(self)\n\n }\n\n}\n\n\n\nimpl Config {\n\n pub fn new(min: usize, max: usize, precision: usize, resolution: usize, span: usize) -> Self {\n\n Self {\n", "file_path": "datastructures/src/heatmap.rs", "rank": 98, "score": 22.448453350214344 }, { "content": " max: Counter,\n\n}\n\n\n\nimpl Bucket {\n\n pub fn new(min: usize, max: usize) -> Self {\n\n Self {\n\n count: Counter::default(),\n\n min: Counter::new(min),\n\n max: Counter::new(max),\n\n }\n\n }\n\n\n\n pub fn incr(&self, count: usize) {\n\n self.count.incr(count)\n\n }\n\n\n\n pub fn decr(&self, count: usize) {\n\n self.count.decr(count)\n\n }\n\n\n", "file_path": "datastructures/src/histogram/bucket.rs", "rank": 99, "score": 22.399788992577164 } ]
Rust
tests/serde_tests.rs
wrobstory/brickline
16b7281242744398cfaaaae87dda3b5c3c76ba4e
extern crate brickline; use std::convert::TryFrom; use brickline::wanted::{ Color, Condition, Item, ItemID, ItemType, MaxPrice, MinQty, Notify, QtyFilled, Remarks, WantedList, }; mod common; #[cfg(test)] mod tests { use super::*; #[test] fn test_xml_to_wanted_list() { let bricklink_wanted_list: WantedList = common::resource_name_to_wanted_list("bricklink_example.xml"); let item_1 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3622")), color: Some(Color(11)), max_price: None, min_qty: None, qty_filled: Some(QtyFilled(4)), condition: None, remarks: None, notify: None, wanted_show: None, wanted_list_id: None, }; let item_2 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3039")), color: None, max_price: None, min_qty: None, qty_filled: None, condition: None, remarks: None, notify: None, wanted_show: None, wanted_list_id: None, }; let item_3 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3001")), color: Some(Color(5)), max_price: Some(MaxPrice(1.00)), min_qty: Some(MinQty(100)), qty_filled: None, condition: Some(Condition::New), remarks: Some(Remarks(String::from("for MOC AB154A"))), notify: Some(Notify::N), wanted_show: None, wanted_list_id: None, }; let items = vec![item_1, item_2, item_3]; let expected_wanted_list = WantedList { items: items }; assert_eq!(bricklink_wanted_list, expected_wanted_list); } #[test] fn test_wanted_list_to_string_1() { let item_1 = Item::build_test_item( ItemType::Part, ItemID(String::from("3622")), Some(Color(11)), Some(MinQty(4)), ); let items = vec![item_1]; let wanted_list = WantedList { items: items }; let stringified = String::try_from(wanted_list).unwrap(); let expected = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\ <INVENTORY>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3622</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ </INVENTORY>\ "; assert_eq!(String::from(expected), stringified); } #[test] fn test_wanted_list_to_string_2() { let item_1 = Item::build_test_item( ItemType::Part, ItemID(String::from("3622")), Some(Color(11)), Some(MinQty(4)), ); let item_2 = Item::build_test_item( ItemType::Part, ItemID(String::from("3623")), Some(Color(11)), Some(MinQty(4)), ); let item_3 = Item::build_test_item( ItemType::Part, ItemID(String::from("3624")), Some(Color(11)), Some(MinQty(4)), ); let items = vec![item_1, item_2, item_3]; let wanted_list = WantedList { items: items }; let stringified = String::try_from(wanted_list).unwrap(); let expected = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\ <INVENTORY>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3622</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3623</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3624</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ </INVENTORY>\ "; assert_eq!(String::from(expected), stringified); } #[test] fn test_roundtrips() { for resource_name in vec![ "bricklink_example.xml", "test_wanted_list_1.xml", "test_wanted_list_2.xml", "test_wanted_list_3.xml", ] .iter() { let wanted_list = common::resource_name_to_wanted_list(resource_name); let stringified = String::try_from(wanted_list).unwrap(); let expected_string = common::resource_name_to_string(resource_name); assert_eq!(expected_string, stringified); } } }
extern crate brickline; use std::convert::TryFrom; use brickline::wanted::{ Color, Condition, Item, ItemID, ItemType, MaxPrice, MinQty, Notify, QtyFilled, Remarks, WantedList, }; mod common; #[cfg(test)] mod tests { use super::*; #[test]
#[test] fn test_wanted_list_to_string_1() { let item_1 = Item::build_test_item( ItemType::Part, ItemID(String::from("3622")), Some(Color(11)), Some(MinQty(4)), ); let items = vec![item_1]; let wanted_list = WantedList { items: items }; let stringified = String::try_from(wanted_list).unwrap(); let expected = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\ <INVENTORY>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3622</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ </INVENTORY>\ "; assert_eq!(String::from(expected), stringified); } #[test] fn test_wanted_list_to_string_2() { let item_1 = Item::build_test_item( ItemType::Part, ItemID(String::from("3622")), Some(Color(11)), Some(MinQty(4)), ); let item_2 = Item::build_test_item( ItemType::Part, ItemID(String::from("3623")), Some(Color(11)), Some(MinQty(4)), ); let item_3 = Item::build_test_item( ItemType::Part, ItemID(String::from("3624")), Some(Color(11)), Some(MinQty(4)), ); let items = vec![item_1, item_2, item_3]; let wanted_list = WantedList { items: items }; let stringified = String::try_from(wanted_list).unwrap(); let expected = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\ <INVENTORY>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3622</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3623</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ <ITEM>\ <ITEMTYPE>P</ITEMTYPE>\ <ITEMID>3624</ITEMID>\ <COLOR>11</COLOR>\ <MINQTY>4</MINQTY>\ </ITEM>\ </INVENTORY>\ "; assert_eq!(String::from(expected), stringified); } #[test] fn test_roundtrips() { for resource_name in vec![ "bricklink_example.xml", "test_wanted_list_1.xml", "test_wanted_list_2.xml", "test_wanted_list_3.xml", ] .iter() { let wanted_list = common::resource_name_to_wanted_list(resource_name); let stringified = String::try_from(wanted_list).unwrap(); let expected_string = common::resource_name_to_string(resource_name); assert_eq!(expected_string, stringified); } } }
fn test_xml_to_wanted_list() { let bricklink_wanted_list: WantedList = common::resource_name_to_wanted_list("bricklink_example.xml"); let item_1 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3622")), color: Some(Color(11)), max_price: None, min_qty: None, qty_filled: Some(QtyFilled(4)), condition: None, remarks: None, notify: None, wanted_show: None, wanted_list_id: None, }; let item_2 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3039")), color: None, max_price: None, min_qty: None, qty_filled: None, condition: None, remarks: None, notify: None, wanted_show: None, wanted_list_id: None, }; let item_3 = Item { item_type: ItemType::Part, item_id: ItemID(String::from("3001")), color: Some(Color(5)), max_price: Some(MaxPrice(1.00)), min_qty: Some(MinQty(100)), qty_filled: None, condition: Some(Condition::New), remarks: Some(Remarks(String::from("for MOC AB154A"))), notify: Some(Notify::N), wanted_show: None, wanted_list_id: None, }; let items = vec![item_1, item_2, item_3]; let expected_wanted_list = WantedList { items: items }; assert_eq!(bricklink_wanted_list, expected_wanted_list); }
function_block-full_function
[ { "content": "use brickline::wanted::{SerdeWantedList, WantedList};\n\nuse brickline::xml_to_string;\n\n\n\nuse quick_xml::de::from_str;\n\n\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "tests/common/mod.rs", "rank": 0, "score": 64840.782517102314 }, { "content": "/// Given an WantedList, build a HashMap of each WantedList Item where\n\n/// the hash key is the ItemID and Color combination for the Item.\n\n/// Note: we explicitly .clone the Item for this map, as we're going to\n\n/// use it as the base case for our joined list.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `inventory`: Bricklink inventory as deserialized from XML\n\n///\n\n/// Example\n\n///\n\n/// ```no_run\n\n/// use brickline::{xml_to_string, build_item_color_hashmap};\n\n/// use brickline::wanted::{WantedList, SerdeWantedList};\n\n/// use quick_xml::de::from_str;\n\n/// use std::path::PathBuf;\n\n///\n\n/// let path = PathBuf::from(\"/home/user/path/to/file.xml\");\n\n/// let xml_string = xml_to_string(&path).unwrap();\n\n/// let wanted_list = WantedList::from(from_str::<SerdeWantedList>(&xml_string).unwrap());\n\n/// let hm = build_item_color_hashmap(&wanted_list);\n\n/// ```\n\npub fn build_item_color_hashmap(inventory: &WantedList) -> BTreeMap<ItemColorHashKey, Item> {\n\n inventory\n\n .items\n\n .iter()\n\n .fold(BTreeMap::new(), |mut acc, item| {\n\n let item_color_key = ItemColorHashKey {\n\n item_id: &item.item_id,\n\n color: &item.color,\n\n };\n\n // Cloning here as we're going to mutate these\n\n // Items to combine them with other lists\n\n acc.insert(item_color_key, item.clone());\n\n acc\n\n })\n\n}\n\n\n\n/// Given two items, add the MinQty of the righthand (incrementing) Item to the\n\n/// lefthand (to-be-incremented) Item. The lefthand item_to_increment *will*\n\n/// be mutated.\n\n///\n", "file_path": "src/lib.rs", "rank": 1, "score": 63026.57637306994 }, { "content": "pub fn resource_name_to_wanted_list(resource_name: &str) -> WantedList {\n\n let resource_path = get_resource_path(resource_name);\n\n let resource_str = xml_to_string(&resource_path).unwrap();\n\n WantedList::from(from_str::<SerdeWantedList>(&resource_str).unwrap())\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 2, "score": 52363.199433918686 }, { "content": "fn load_resource_directory() -> PathBuf {\n\n let mut resource_dir = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n resource_dir.push(\"resources/test\");\n\n resource_dir\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 3, "score": 49266.73212414912 }, { "content": "/// increment_item(&mut left_item, &right_item);\n\n///\n\nfn increment_item(item_to_increment: &mut Item, incrementing_item: &Item) -> () {\n\n let incrementing_min_qty = match &incrementing_item.min_qty {\n\n Some(qty) => qty.0,\n\n None => 1,\n\n };\n\n\n\n match &item_to_increment.min_qty {\n\n Some(qty) => item_to_increment.min_qty = Some(MinQty(qty.0 + incrementing_min_qty)),\n\n None => item_to_increment.min_qty = Some(MinQty(1 + incrementing_min_qty)),\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 47935.68258053949 }, { "content": "fn get_resource_path(resource_name: &str) -> PathBuf {\n\n let mut resource_path = load_resource_directory();\n\n resource_path.push(resource_name);\n\n resource_path\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 5, "score": 43062.60058789863 }, { "content": "pub fn resource_name_to_string(resource_name: &str) -> String {\n\n let resource_path = get_resource_path(resource_name);\n\n let mut file = File::open(resource_path).unwrap();\n\n let mut xml_string = String::new();\n\n file.read_to_string(&mut xml_string).unwrap();\n\n xml_string.replace(\"\\n\", \"\")\n\n}\n", "file_path": "tests/common/mod.rs", "rank": 6, "score": 43062.60058789863 }, { "content": "pub fn update_wanted_list_statistic(item: &Item, aggregate: &mut WantedListStatistics) -> () {\n\n aggregate.total_items += 1;\n\n\n\n match &item.min_qty {\n\n Some(min_qty) => aggregate.total_parts += min_qty.0,\n\n None => aggregate.total_parts += 1,\n\n };\n\n\n\n let ic_hk = OwnedItemColorHashKey {\n\n item_id: item.item_id.clone(),\n\n color: item.color.clone(),\n\n };\n\n\n\n if !aggregate.item_color_set.contains(&ic_hk) {\n\n aggregate.unique_item_color_count += 1;\n\n aggregate.item_color_set.insert(ic_hk);\n\n }\n\n\n\n item.color.as_ref().map(|color| {\n\n if !aggregate.color_set.contains(color) {\n\n aggregate.unique_color_count += 1;\n\n aggregate.color_set.insert(color.clone());\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/wanted.rs", "rank": 7, "score": 36494.97342166041 }, { "content": "extern crate brickline;\n\n\n\nuse brickline::wanted::{ItemID, MinQty, Remarks};\n\n\n\nmod common;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_join_inventories_1() {\n\n let wanted_list_1 = common::resource_name_to_wanted_list(\"test_wanted_list_1.xml\");\n\n let wanted_list_2 = common::resource_name_to_wanted_list(\"test_wanted_list_2.xml\");\n\n\n\n let joined_wanted_list_1 = brickline::join_inventories(&wanted_list_1, &wanted_list_2);\n\n let joined_wanted_list_2 = brickline::join_inventories(&wanted_list_2, &wanted_list_1);\n\n\n\n // These end up being ordered by ItemID\n", "file_path": "tests/lib_tests.rs", "rank": 9, "score": 27398.191551096912 }, { "content": "\n\n #[test]\n\n fn test_join_inventories_2() {\n\n let wanted_list_1 = common::resource_name_to_wanted_list(\"test_wanted_list_1.xml\");\n\n let wanted_list_2 = common::resource_name_to_wanted_list(\"bricklink_example.xml\");\n\n\n\n let joined_wanted_list_1 = brickline::join_inventories(&wanted_list_1, &wanted_list_2);\n\n let joined_wanted_list_2 = brickline::join_inventories(&wanted_list_2, &wanted_list_1);\n\n\n\n // These end up being ordered by ItemID\n\n let expected_qty = vec![\n\n (ItemID(String::from(\"3001\")), Some(MinQty(200))),\n\n (ItemID(String::from(\"3039\")), None),\n\n (ItemID(String::from(\"3622\")), Some(MinQty(5))),\n\n (ItemID(String::from(\"3623\")), None),\n\n ];\n\n for (i, expected) in expected_qty.iter().enumerate() {\n\n let item = &joined_wanted_list_1.items[i];\n\n assert_eq!(expected.0, item.item_id);\n\n assert_eq!(expected.1, item.min_qty)\n", "file_path": "tests/lib_tests.rs", "rank": 16, "score": 27387.34387916595 }, { "content": " let expected_qty = vec![\n\n (ItemID(String::from(\"3000\")), Some(MinQty(4))),\n\n (ItemID(String::from(\"3001\")), Some(MinQty(200))),\n\n (ItemID(String::from(\"3622\")), Some(MinQty(14))),\n\n (ItemID(String::from(\"3623\")), None),\n\n ];\n\n for (i, expected) in expected_qty.iter().enumerate() {\n\n let item = &joined_wanted_list_1.items[i];\n\n assert_eq!(expected.0, item.item_id);\n\n assert_eq!(expected.1, item.min_qty)\n\n }\n\n\n\n // The first join should retain the remarks of wanted_list_1\n\n assert_eq!(\n\n joined_wanted_list_1.items[1].remarks,\n\n Some(Remarks(\"Testing\".to_string()))\n\n );\n\n // The second join should use wanted_list_2, so no remarks\n\n assert_eq!(joined_wanted_list_2.items[1].remarks, None);\n\n }\n", "file_path": "tests/lib_tests.rs", "rank": 17, "score": 27387.12120922926 }, { "content": " }\n\n\n\n // The first join should retain the remarks of wanted_list_1\n\n assert_eq!(\n\n joined_wanted_list_1.items[0].remarks,\n\n Some(Remarks(\"Testing\".to_string()))\n\n );\n\n // The second join should retain the remarks of bricklink_example\n\n assert_eq!(\n\n joined_wanted_list_2.items[0].remarks,\n\n Some(Remarks(\"for MOC AB154A\".to_string()))\n\n );\n\n }\n\n}\n", "file_path": "tests/lib_tests.rs", "rank": 18, "score": 27386.479181350067 }, { "content": "/// Given two Inventories, join the right inventory into the left one.\n\n/// Here's how the join happens:\n\n/// 1. Build hash table from left inventory\n\n/// 2. Iterate through right inventory and probe table for ItemId/Color keys\n\n/// 3. If a key is found, add the MinQty of the right inventory to the left.\n\n/// NOTE: The metadata from the *left* inventory is retained. There is no\n\n/// other metadata joining other than MinQty.\n\n/// 4. If no key is found, add the Item from the right inventory to the hash table\n\n/// 5. Convert the .values() of the hash table into .items of a new WantedList\n\n///\n\n/// # Arguments\n\n///\n\n/// * `left_inventory`: WantedList to be joined into\n\n/// * `right_inventory`: WantedList to join into left inventory\n\n///\n\n/// Example\n\n///\n\n/// ```\n\n/// use brickline::join_inventories;\n\n/// use brickline::wanted::{WantedList, Item, ItemID, ItemType, Color, MinQty};\n\n///\n\n/// let item = Item::build_test_item(\n\n/// ItemType::Part,\n\n/// ItemID(String::from(\"3039\")),\n\n/// Some(Color(5)),\n\n/// Some(MinQty(20)),\n\n/// );\n\n/// let item_1 = item.clone();\n\n///\n\n/// let left_inventory = WantedList { items: vec![item] };\n\n/// let right_inventory = WantedList { items: vec![item_1] };\n\n///\n\n/// let joined_inventory = join_inventories(&left_inventory, &right_inventory);\n\n/// ```\n\npub fn join_inventories(left_inventory: &WantedList, right_inventory: &WantedList) -> WantedList {\n\n let mut left_inv_map = build_item_color_hashmap(left_inventory);\n\n right_inventory\n\n .items\n\n .iter()\n\n .fold(&mut left_inv_map, |acc, right_item| {\n\n let item_color_key = ItemColorHashKey {\n\n item_id: &right_item.item_id,\n\n color: &right_item.color,\n\n };\n\n if let Some(left_item) = acc.get_mut(&item_color_key) {\n\n increment_item(left_item, right_item);\n\n } else {\n\n acc.insert(item_color_key, right_item.clone());\n\n }\n\n acc\n\n });\n\n WantedList {\n\n items: left_inv_map.values().cloned().collect(),\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 20, "score": 18384.99658816313 }, { "content": "```\n\n| ◯ ◯ ● |\n\n| ◯ ◯ ● |\n\n | ◯ ● ◯ |\n\n | ◯ ● ◯ | \n\n | ● ◯ ◯ |\n\n | ● ◯ ◯ |\n\n```\n\n\n\n# brickline\n\n\n\nBrickline is a set of command line utilities for manipulating, modifying, and analyzing BrickLink LEGO wanted lists. \n\n\n\nThis README contains information on how to\n\n[install `brickline`](https://github.com/wrobstory/brickline#installation) as well as a guide to the available commands. \n\n\n\n## Commands\n\n\n\n### Join\n\n\n\nJoin two Bricklink Wanted List on ItemID and Color, summing the minimum quantity (MinQty) values of the two lists; it will keep the remaining metadata from the lefthand list. This is something you can't do on Bricklink right now: if you try to copy a wanted list to another wanted list with duplicate ItemID/Color combinations you will get a \"Warning: Item color combination already exists\".\n\n\n\nExample: \n\n```\n\n$ ./target/release/brickline join -l ./resources/test/test_wanted_list_3.xml \\\n\n -r ./resources/test/test_wanted_list_4.xml \\\n\n -o /tmp/joined_wanted_list.xml\n\nLeft Wanted list Statistics for ./resources/test/test_wanted_list_3.xml\n\n\n\nTotal Items: 45, \n\nTotal Parts: 196, \n\nUnique Item/Color Count: 45, \n\nUnique Color Count: 4\n\n\n\nRight Wanted List Statistics for ./resources/test/test_wanted_list_4.xml\n\n\n\nTotal Items: 151, \n\nTotal Parts: 848, \n\nUnique Item/Color Count: 151, \n\nUnique Color Count: 14\n\n\n\nMerging wanted lists...\n\n\n\nMerged Wanted List Statistics \n\nTotal Items: 195, \n\nTotal Parts: 1044, \n\nUnique Item/Color Count: 195, \n\nUnique Color Count: 15\n\n\n\nWriting joined wanted list to /tmp/joined_wanted_list.xml\n\n```\n\n\n\n\n\n## Installation \n\n\n\nYou can compile from source by [installing Cargo](https://crates.io/install), ([Rust's](https://www.rust-lang.org/) package manager)\n\n\n\n\n\n```bash\n\ngit clone git://github.com/wrobstory/brickline\n\ncd brickline\n\ncargo build --release\n\n```\n\n\n\nThe resulting binary will be at `target/release/brickline`. \n\n\n\nI'm hoping to have Github binaries and a homebrew package available soon.\n", "file_path": "README.md", "rank": 21, "score": 14225.853370749399 }, { "content": "// TODO: Unify the above and below\n\npub fn gen_statistics(wanted_list: &WantedList) -> WantedListStatistics {\n\n let mut statistics = WantedListStatistics::init();\n\n wanted_list\n\n .items\n\n .iter()\n\n .for_each(|item| update_wanted_list_statistic(item, &mut statistics));\n\n statistics\n\n}\n\n\n\nimpl std::convert::From<WantedList> for SerdeWantedList {\n\n fn from(wanted_list: WantedList) -> SerdeWantedList {\n\n SerdeWantedList {\n\n items: wanted_list\n\n .items\n\n .into_iter()\n\n .map(|i| SerdeItem::from(i))\n\n .collect(),\n\n }\n\n }\n\n}\n", "file_path": "src/wanted.rs", "rank": 22, "score": 12635.828948924158 }, { "content": "/// Given a path to a file, read the file and deserialize it to an WantedList\n\n///\n\n/// # Arguments\n\n///\n\n/// * `file_path`: String path to file\n\n///\n\n/// Example\n\n///\n\n/// ```no_run\n\n/// use brickline::file_to_wanted_list;\n\n///\n\n/// let inventory = file_to_wanted_list(\"/path/to/wanted_list.xml\");\n\npub fn file_to_wanted_list(file_path: &str) -> Result<(WantedList, WantedListStatistics), IOError> {\n\n let resource_path = PathBuf::from(file_path);\n\n let resource_str = xml_to_string(&resource_path)?;\n\n match from_str::<SerdeWantedList>(&resource_str) {\n\n Ok(serde_inventory) => Ok(type_and_gen_statistics(serde_inventory)),\n\n Err(e) => Err(IOError::new(ErrorKind::InvalidInput, e)),\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 23, "score": 10556.644972064216 }, { "content": "}\n\n\n\nimpl Item {\n\n /// Build a test Item with item_type, item_id, color, and all other fields set to\n\n /// None. Only used as a test data generator.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `item_type` - ItemType\n\n /// * `item_id` - ItemID\n\n /// * `color` - Color\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use brickline::wanted::{Item, ItemType, ItemID, Color};\n\n ///\n\n /// let test_item = Item::build_test_item(\n\n /// ItemType::Part,\n\n /// ItemID(String::from(\"3622\")),\n", "file_path": "src/wanted.rs", "rank": 24, "score": 11.546085570012801 }, { "content": " /// Some(Color(11)),\n\n /// None\n\n /// );\n\n /// ```\n\n pub fn build_test_item(\n\n item_type: ItemType,\n\n item_id: ItemID,\n\n color: Option<Color>,\n\n min_qty: Option<MinQty>,\n\n ) -> Item {\n\n Item {\n\n item_type,\n\n item_id: item_id.into(),\n\n color: color.map(|c| c.into()),\n\n min_qty: min_qty.map(|m| m.into()),\n\n max_price: None,\n\n qty_filled: None,\n\n condition: None,\n\n remarks: None,\n\n notify: None,\n", "file_path": "src/wanted.rs", "rank": 25, "score": 11.284105447956723 }, { "content": "/// # Arguments\n\n///\n\n/// * `item_to_increment`: Item to be incremented\n\n/// * `incrementing_item`: Item to increment from\n\n///\n\n/// Example\n\n///\n\n/// use brickline::increment_item;\n\n/// use brickline::wanted::Item;\n\n///\n\n/// let mut left_item = Item::build_test_item(ItemType::Part, ItemID(String::from(\"3039\")), Some(Color(5)), Some(MinQty(20)));\n\n/// let right_item = Item::build_test_item(ItemType::Part, ItemID(String::from(\"3039\")), Some(Color(5)), Some(MinQty(10)));\n\n\n", "file_path": "src/lib.rs", "rank": 26, "score": 11.13682471185571 }, { "content": " pub item_id: String,\n\n #[serde(rename = \"COLOR\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub color: Option<i8>,\n\n #[serde(rename = \"MAXPRICE\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub max_price: Option<String>,\n\n #[serde(rename = \"MINQTY\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub min_qty: Option<i32>,\n\n #[serde(rename = \"QTYFILLED\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub qty_filled: Option<i32>,\n\n #[serde(rename = \"CONDITION\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub condition: Option<String>,\n\n #[serde(rename = \"REMARKS\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub remarks: Option<String>,\n\n #[serde(rename = \"NOTIFY\")]\n", "file_path": "src/wanted.rs", "rank": 27, "score": 10.940560712111049 }, { "content": " pub remarks: Option<Remarks>,\n\n pub notify: Option<Notify>,\n\n pub wanted_show: Option<WantedShow>,\n\n pub wanted_list_id: Option<WantedListID>,\n\n}\n\n\n\nimpl std::convert::From<SerdeItem> for Item {\n\n fn from(serde_item: SerdeItem) -> Item {\n\n Item {\n\n item_type: ItemType::from(serde_item.item_type),\n\n item_id: ItemID::from(serde_item.item_id),\n\n color: serde_item.color.map(|c| Color::from(c)),\n\n max_price: serde_item.max_price.map(|m| MaxPrice::from(m)),\n\n min_qty: serde_item.min_qty.map(|m| MinQty::from(m)),\n\n qty_filled: serde_item.qty_filled.map(|q| QtyFilled::from(q)),\n\n condition: serde_item.condition.map(|c| Condition::from(c)),\n\n remarks: serde_item.remarks.map(|r| Remarks::from(r)),\n\n notify: serde_item.notify.map(|n| Notify::from(n)),\n\n wanted_show: serde_item.wanted_show.map(|w| WantedShow::from(w)),\n\n wanted_list_id: serde_item.wanted_list_id.map(|w| WantedListID::from(w)),\n", "file_path": "src/wanted.rs", "rank": 28, "score": 10.773311032451828 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl std::convert::From<Item> for SerdeItem {\n\n fn from(item: Item) -> SerdeItem {\n\n SerdeItem {\n\n item_type: String::from(item.item_type),\n\n item_id: String::from(item.item_id),\n\n color: item.color.map(|c| i8::from(c)),\n\n max_price: item.max_price.map(|m| String::from(m)),\n\n min_qty: item.min_qty.map(|m| i32::from(m)),\n\n qty_filled: item.qty_filled.map(|q| i32::from(q)),\n\n condition: item.condition.map(|c| String::from(c)),\n\n remarks: item.remarks.map(|r| String::from(r)),\n\n notify: item.notify.map(|n| String::from(n)),\n\n wanted_show: item.wanted_show.map(|ws| String::from(ws)),\n\n wanted_list_id: item.wanted_list_id.map(|w| String::from(w)),\n\n }\n\n }\n", "file_path": "src/wanted.rs", "rank": 29, "score": 9.609366362492882 }, { "content": " let joined_inventory = join_inventories(&left_wanted_list, &right_wanted_list);\n\n let joined_statistics = gen_statistics(&joined_inventory);\n\n println!(\"Merged Wanted List Statistics {}\\n\", joined_statistics);\n\n let xml_string = String::try_from(joined_inventory)?;\n\n\n\n let out_path_str = join_args\n\n .value_of(\"output\")\n\n .ok_or(IOError::new(ErrorKind::InvalidInput, \"Empty output path\"))?;\n\n let out_path = PathBuf::from(out_path_str);\n\n write_file_with_overwrite_prompt(&out_path, &xml_string)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use crate::wanted::ItemType;\n\n\n\n #[test]\n", "file_path": "src/lib.rs", "rank": 30, "score": 9.041970181063954 }, { "content": " /// ```\n\n /// use quick_xml::se::to_string;\n\n /// use brickline::wanted::{WantedList, SerdeWantedList, Item, ItemType,\n\n /// ItemID, Color};\n\n ///\n\n /// let test_item = Item::build_test_item(\n\n /// ItemType::Part,\n\n /// ItemID(String::from(\"3622\")),\n\n /// Some(Color(11)),\n\n /// None\n\n /// );\n\n /// let wanted_list = WantedList { items: vec![test_item]};\n\n /// let serde_wanted_list = SerdeWantedList::from(wanted_list);\n\n /// let stringified = to_string(&serde_wanted_list).unwrap();\n\n /// let repaired = SerdeWantedList::amend_serialized_string(stringified);\n\n /// ```\n\n pub fn amend_serialized_string(mut serde_string: String) -> String {\n\n serde_string.replace_range(11..17, \"\");\n\n let end_bound_1 = serde_string.len() - 19;\n\n let end_bound_2 = serde_string.len() - 12;\n", "file_path": "src/wanted.rs", "rank": 31, "score": 8.872996577929245 }, { "content": "pub mod wanted;\n\n\n\nuse crate::wanted::{\n\n gen_statistics, type_and_gen_statistics, Color, Item, ItemID, MinQty, SerdeWantedList,\n\n WantedList, WantedListStatistics,\n\n};\n\n\n\nuse std::collections::BTreeMap;\n\nuse std::convert::TryFrom;\n\nuse std::error;\n\nuse std::fs::File;\n\nuse std::io::{Error as IOError, ErrorKind, Read, Write};\n\nuse std::path::PathBuf;\n\n\n\nuse clap::ArgMatches;\n\nuse quick_xml::de::from_str;\n\n\n\n/// The primary key of an WantedList Item\n\n#[derive(Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub struct ItemColorHashKey<'a> {\n", "file_path": "src/lib.rs", "rank": 32, "score": 8.810712727315805 }, { "content": "}\n\n\n\nimpl std::convert::From<Condition> for String {\n\n fn from(condition: Condition) -> String {\n\n match condition {\n\n Condition::New => \"N\".to_string(),\n\n Condition::Used => \"U\".to_string(),\n\n Condition::Complete => \"C\".to_string(),\n\n Condition::Incomplete => \"I\".to_string(),\n\n Condition::Sealed => \"S\".to_string(),\n\n Condition::NotProvided => \"X\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n/// Notes on the item\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Remarks(pub String);\n\n\n\nimpl std::convert::From<String> for Remarks {\n", "file_path": "src/wanted.rs", "rank": 33, "score": 8.326459340664101 }, { "content": " fn from(input_str: String) -> Remarks {\n\n Self(input_str)\n\n }\n\n}\n\n\n\nimpl std::convert::From<Remarks> for String {\n\n fn from(remarks: Remarks) -> String {\n\n remarks.0\n\n }\n\n}\n\n\n\n/// Be notified when these items are listed for sale\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Notify {\n\n Y,\n\n N,\n\n}\n\n\n\nimpl std::convert::From<String> for Notify {\n\n fn from(notify_str: String) -> Notify {\n", "file_path": "src/wanted.rs", "rank": 34, "score": 7.61890542061227 }, { "content": " let hm = build_item_color_hashmap(&inventory);\n\n assert_eq!(hm.len(), 3);\n\n let key_1 = ItemColorHashKey {\n\n item_id: &ItemID(String::from(\"3622\")),\n\n color: &Some(Color(11)),\n\n };\n\n let key_2 = ItemColorHashKey {\n\n item_id: &ItemID(String::from(\"3039\")),\n\n color: &None,\n\n };\n\n assert_eq!(hm.get(&key_1), Some(&item_1a));\n\n assert_eq!(hm.get(&key_2), Some(&item_2a));\n\n }\n\n\n\n #[test]\n\n fn test_increment_item_with_righthand_min_qty() {\n\n let mut left_item = Item::build_test_item(\n\n ItemType::Part,\n\n ItemID(String::from(\"3039\")),\n\n Some(Color(5)),\n", "file_path": "src/lib.rs", "rank": 35, "score": 7.539861182163943 }, { "content": " fn test_build_item_color_hashmap() {\n\n let item_1 = Item::build_test_item(\n\n ItemType::Part,\n\n ItemID(String::from(\"3622\")),\n\n Some(Color(11)),\n\n None,\n\n );\n\n let item_1a = item_1.clone();\n\n let item_2 =\n\n Item::build_test_item(ItemType::Part, ItemID(String::from(\"3039\")), None, None);\n\n let item_2a = item_2.clone();\n\n let item_3 = Item::build_test_item(\n\n ItemType::Part,\n\n ItemID(String::from(\"3001\")),\n\n Some(Color(5)),\n\n None,\n\n );\n\n let inventory = WantedList {\n\n items: vec![item_1, item_2, item_3],\n\n };\n", "file_path": "src/lib.rs", "rank": 36, "score": 7.503258548618698 }, { "content": " );\n\n let right_item = Item::build_test_item(\n\n ItemType::Part,\n\n ItemID(String::from(\"3039\")),\n\n Some(Color(5)),\n\n None,\n\n );\n\n\n\n increment_item(&mut left_item, &right_item);\n\n assert_eq!(left_item.min_qty.unwrap().0, 21);\n\n }\n\n\n\n #[test]\n\n fn test_increment_item_with_no_min_qty() {\n\n let mut left_item = Item::build_test_item(\n\n ItemType::Part,\n\n ItemID(String::from(\"3039\")),\n\n Some(Color(5)),\n\n None,\n\n );\n", "file_path": "src/lib.rs", "rank": 37, "score": 7.312377496237103 }, { "content": " Some(MinQty(20)),\n\n );\n\n let right_item = Item::build_test_item(\n\n ItemType::Part,\n\n ItemID(String::from(\"3039\")),\n\n Some(Color(5)),\n\n Some(MinQty(10)),\n\n );\n\n\n\n increment_item(&mut left_item, &right_item);\n\n assert_eq!(left_item.min_qty.unwrap().0, 30);\n\n }\n\n\n\n #[test]\n\n fn test_increment_item_with_no_righthand_min_qty() {\n\n let mut left_item = Item::build_test_item(\n\n ItemType::Part,\n\n ItemID(String::from(\"3039\")),\n\n Some(Color(5)),\n\n Some(MinQty(20)),\n", "file_path": "src/lib.rs", "rank": 38, "score": 7.163394219022888 }, { "content": " #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub notify: Option<String>,\n\n #[serde(rename = \"WANTEDSHOW\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub wanted_show: Option<String>,\n\n #[serde(rename = \"WANTEDLISTID\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub wanted_list_id: Option<String>,\n\n}\n\n\n\n/// A single Lego Item\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Item {\n\n pub item_type: ItemType,\n\n pub item_id: ItemID,\n\n pub color: Option<Color>,\n\n pub max_price: Option<MaxPrice>,\n\n pub min_qty: Option<MinQty>,\n\n pub qty_filled: Option<QtyFilled>,\n\n pub condition: Option<Condition>,\n", "file_path": "src/wanted.rs", "rank": 39, "score": 6.678817980710741 }, { "content": " let right_item = Item::build_test_item(\n\n ItemType::Part,\n\n ItemID(String::from(\"3039\")),\n\n Some(Color(5)),\n\n None,\n\n );\n\n\n\n increment_item(&mut left_item, &right_item);\n\n assert_eq!(left_item.min_qty.unwrap().0, 2);\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 40, "score": 6.215642870076979 }, { "content": " New,\n\n Used,\n\n Complete,\n\n Incomplete,\n\n Sealed,\n\n NotProvided,\n\n}\n\n\n\nimpl std::convert::From<String> for Condition {\n\n fn from(condition_str: String) -> Condition {\n\n match condition_str.as_str() {\n\n \"N\" => Self::New,\n\n \"U\" => Self::Used,\n\n \"C\" => Self::Complete,\n\n \"I\" => Self::Incomplete,\n\n \"S\" => Self::Sealed,\n\n \"X\" => Self::NotProvided,\n\n unsupported => panic!(format!(\"{} is not a supported Condition!\", unsupported)),\n\n }\n\n }\n", "file_path": "src/wanted.rs", "rank": 41, "score": 5.409840457260156 }, { "content": "\n\nimpl std::convert::From<SerdeWantedList> for WantedList {\n\n fn from(serde_wanted_list: SerdeWantedList) -> WantedList {\n\n WantedList {\n\n items: serde_wanted_list\n\n .items\n\n .into_iter()\n\n .map(|i| Item::from(i))\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\n/// A serde representation of an Item\n\n#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]\n\n#[serde(rename(serialize = \"ITEM\"))]\n\npub struct SerdeItem {\n\n #[serde(rename = \"ITEMTYPE\")]\n\n pub item_type: String,\n\n #[serde(rename = \"ITEMID\")]\n", "file_path": "src/wanted.rs", "rank": 42, "score": 5.3995786534693195 }, { "content": " f,\n\n \"\n\nTotal Items: {}, \n\nTotal Parts: {}, \n\nUnique Item/Color Count: {}, \n\nUnique Color Count: {}\",\n\n self.total_items,\n\n self.total_parts,\n\n self.unique_item_color_count,\n\n self.unique_color_count\n\n )\n\n }\n\n}\n\n\n\n/// The primary key of an WantedList Item\n\n#[derive(Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub struct OwnedItemColorHashKey {\n\n item_id: ItemID,\n\n color: Option<Color>,\n\n}\n\n\n", "file_path": "src/wanted.rs", "rank": 43, "score": 5.314633749634034 }, { "content": "//! Bricktools\n\n//!\n\n//! A small set of tools to manipulate Bricklink wanted lists and perform\n\n//! price analysis\n\nuse std::error;\n\nuse std::io::{Error as IOError, ErrorKind};\n\n\n\nuse brickline::join;\n\n\n\nuse clap::{App, Arg};\n\n\n\n/// CLI Tooling\n\n\n", "file_path": "src/main.rs", "rank": 44, "score": 4.91516830851849 }, { "content": " }\n\n}\n\n\n\nimpl std::convert::From<ItemID> for String {\n\n fn from(item_id: ItemID) -> String {\n\n item_id.0\n\n }\n\n}\n\n\n\n/// Color ID according to the Bricklink color catalog\n\n/// https://www.bricklink.com/catalogColors.asp\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]\n\npub struct Color(pub i8);\n\n\n\nimpl std::convert::From<i8> for Color {\n\n fn from(input_i8: i8) -> Color {\n\n Self(input_i8)\n\n }\n\n}\n\n\n", "file_path": "src/wanted.rs", "rank": 45, "score": 4.836981815253206 }, { "content": " item_id: &'a ItemID,\n\n color: &'a Option<Color>,\n\n}\n\n\n\n/// Get user input from stdout\n\n///\n\n/// # Arguments\n\n///\n\n/// * `message`: What message do you want to prompt the user with?\n\n///\n", "file_path": "src/lib.rs", "rank": 46, "score": 4.700824389027275 }, { "content": " match notify_str.as_str() {\n\n \"Y\" => Self::Y,\n\n \"N\" => Self::N,\n\n unsupported => panic!(format!(\"{} is not a supported Notify!\", unsupported)),\n\n }\n\n }\n\n}\n\n\n\nimpl std::convert::From<Notify> for String {\n\n fn from(notify: Notify) -> String {\n\n match notify {\n\n Notify::Y => \"Y\".to_string(),\n\n Notify::N => \"N\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n/// Show in items for sale queries?\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum WantedShow {\n", "file_path": "src/wanted.rs", "rank": 47, "score": 4.545205246060869 }, { "content": " pub color_set: HashSet<Color>,\n\n}\n\n\n\nimpl WantedListStatistics {\n\n pub fn init() -> WantedListStatistics {\n\n WantedListStatistics {\n\n total_items: 0,\n\n total_parts: 0,\n\n unique_item_color_count: 0,\n\n unique_color_count: 0,\n\n item_color_set: HashSet::new(),\n\n color_set: HashSet::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for WantedListStatistics {\n\n // This trait requires `fmt` with this exact signature.\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(\n", "file_path": "src/wanted.rs", "rank": 48, "score": 4.487644407966279 }, { "content": "impl std::convert::From<String> for ItemType {\n\n fn from(itemtype_str: String) -> ItemType {\n\n match itemtype_str.as_str() {\n\n \"S\" => Self::Set,\n\n \"P\" => Self::Part,\n\n \"M\" => Self::Minifig,\n\n \"B\" => Self::Book,\n\n \"G\" => Self::Gear,\n\n \"C\" => Self::Catalog,\n\n \"I\" => Self::Instruction,\n\n \"O\" => Self::OriginalBox,\n\n \"U\" => Self::UnsortedLot,\n\n unsupported => panic!(format!(\"{} is not a supported ItemType!\", unsupported)),\n\n }\n\n }\n\n}\n\n\n\nimpl std::convert::From<ItemType> for String {\n\n fn from(item_type: ItemType) -> String {\n\n match item_type {\n", "file_path": "src/wanted.rs", "rank": 49, "score": 4.349291318415983 }, { "content": " ///\n\n /// # Arguments\n\n ///\n\n /// * `wanted_list`: Bricklink WantedList\n\n /// ```\n\n fn try_from(wanted_list: WantedList) -> Result<Self, Self::Error> {\n\n let serde_wanted_list = SerdeWantedList::from(wanted_list);\n\n let stringified = to_string(&serde_wanted_list)?;\n\n Ok(SerdeWantedList::amend_serialized_string(stringified))\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct WantedListStatistics {\n\n pub total_items: i32,\n\n pub total_parts: i32,\n\n pub unique_item_color_count: i32,\n\n pub unique_color_count: i32,\n\n\n\n pub item_color_set: HashSet<OwnedItemColorHashKey>,\n", "file_path": "src/wanted.rs", "rank": 50, "score": 4.175111134723705 }, { "content": "//! A Bricklink Wanted List\n\n//!\n\n//! These types are all based on the Bricklink\n\n//! XML schema as described here: https://www.bricklink.com/help.asp?helpID=207\n\n//!\n\n//! So what's going on here? The quick_xml library has relatively limited support\n\n//! for complex types, and Rust doesn't have good support for a serializable decimel\n\n//! type. So between those two things we've ended up with SerdeIn and SerdeItem,\n\n//! top level structs that only use primitive types. We then iterate over the entire\n\n//! list of items and do a bunch of From/Into transformations to go from our primitive\n\n//! types to more complex ones. It's a bummer, but I don't expect to ever have Bricklink\n\n//! wanted lists longer than O(thousands) of Items, so I'm willing to take perf hit\n\n//! to do the full scan for deserialization/serialization.\n\nuse quick_xml::se::to_string;\n\nuse quick_xml::DeError;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashSet;\n\n\n\n/// The serde wanted_list of SerdeItems\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n", "file_path": "src/wanted.rs", "rank": 51, "score": 4.1650087338779285 }, { "content": "/// Given a path to an XML file, load that file to a String\n\n///\n\n/// # Arguments\n\n///\n\n/// * `file_path`: path to an XML file\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use std::path::PathBuf;\n\n/// use brickline::xml_to_string;\n\n///\n\n/// let path = PathBuf::from(\"/home/user/path/to/file.xml\");\n\n/// let xml_string = xml_to_string(&path);\n\n/// ```\n\npub fn xml_to_string(file_path: &PathBuf) -> Result<String, IOError> {\n\n let mut file = File::open(file_path)?;\n\n let mut xml_string = String::new();\n\n file.read_to_string(&mut xml_string)?;\n\n Ok(xml_string)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 52, "score": 4.099052154158472 }, { "content": "\n\n/// Quantity of the item you already have\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct QtyFilled(pub i32);\n\n\n\nimpl std::convert::From<i32> for QtyFilled {\n\n fn from(input_i32: i32) -> QtyFilled {\n\n Self(input_i32)\n\n }\n\n}\n\n\n\nimpl std::convert::From<QtyFilled> for i32 {\n\n fn from(qty_filled: QtyFilled) -> i32 {\n\n qty_filled.0\n\n }\n\n}\n\n\n\n/// Item condition\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Condition {\n", "file_path": "src/wanted.rs", "rank": 53, "score": 3.8759331671120583 }, { "content": "impl std::convert::From<Color> for i8 {\n\n fn from(color: Color) -> i8 {\n\n color.0\n\n }\n\n}\n\n\n\n/// Maximum Desired Price\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct MaxPrice(pub f32);\n\n\n\nimpl std::convert::From<String> for MaxPrice {\n\n fn from(input_string: String) -> MaxPrice {\n\n match input_string.parse::<f32>() {\n\n Ok(max_price) => return Self(max_price),\n\n Err(_e) => panic!(\"Could not parse MaxPrice {}\", input_string),\n\n };\n\n }\n\n}\n\n\n\nimpl std::convert::From<MaxPrice> for String {\n", "file_path": "src/wanted.rs", "rank": 54, "score": 2.456250742123376 }, { "content": " ItemType::Set => \"S\".to_string(),\n\n ItemType::Part => \"P\".to_string(),\n\n ItemType::Minifig => \"M\".to_string(),\n\n ItemType::Book => \"B\".to_string(),\n\n ItemType::Gear => \"G\".to_string(),\n\n ItemType::Catalog => \"C\".to_string(),\n\n ItemType::Instruction => \"I\".to_string(),\n\n ItemType::OriginalBox => \"O\".to_string(),\n\n ItemType::UnsortedLot => \"U\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n/// The canonical Lego catalog item number\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]\n\npub struct ItemID(pub String);\n\n\n\nimpl std::convert::From<String> for ItemID {\n\n fn from(input_str: String) -> ItemID {\n\n Self(input_str)\n", "file_path": "src/wanted.rs", "rank": 55, "score": 2.452226983183293 }, { "content": "#[serde(rename(serialize = \"INVENTORY\"))]\n\npub struct SerdeWantedList {\n\n #[serde(rename = \"ITEM\")]\n\n pub items: Vec<SerdeItem>,\n\n}\n\n\n\nimpl SerdeWantedList {\n\n /// Dirty fix for a serialization issue with the quick_xml library.\n\n /// When we try to serialize a Vec<SerdeItem>, we end up with\n\n /// <ITEM><ITEM>...</ITEM></ITEM> at the beginning and end of the\n\n /// vectors. So...we're going to straight up remove the redundant\n\n /// Items by replacing those ranges in the String.\n\n ///\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `serde_string`: Serialized String of a SerdeWantedList\n\n ///\n\n /// # Example\n\n ///\n", "file_path": "src/wanted.rs", "rank": 56, "score": 2.43906781945342 }, { "content": " wanted_show: None,\n\n wanted_list_id: None,\n\n }\n\n }\n\n}\n\n\n\n/// The type of the Lego Item\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum ItemType {\n\n Set,\n\n Part,\n\n Minifig,\n\n Book,\n\n Gear,\n\n Catalog,\n\n Instruction,\n\n OriginalBox,\n\n UnsortedLot,\n\n}\n\n\n", "file_path": "src/wanted.rs", "rank": 57, "score": 1.9215054937515426 }, { "content": " serde_string.replace_range(end_bound_1..end_bound_2, \"\");\n\n serde_string.insert_str(0, \"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\"?>\");\n\n serde_string\n\n }\n\n}\n\n\n\n/// A Bricklink WantedList\n\n#[derive(Debug, PartialEq)]\n\npub struct WantedList {\n\n pub items: Vec<Item>,\n\n}\n\n\n\n/// Serialize an WantedList to an XML String\n\nimpl std::convert::TryFrom<WantedList> for String {\n\n type Error = DeError;\n\n\n\n /// Given an WantedList, convert it to an XML string.\n\n /// This will go through the SerdeWantedList type as well as\n\n /// apply some of the ad-hoc fixes needed to make it a valid\n\n /// XML string.\n", "file_path": "src/wanted.rs", "rank": 58, "score": 1.4652479811495234 } ]
Rust
pingr/src/main.rs
13ABEL/internship-application-systems
1865456b46d621637147b194322dfc5d5791a126
mod main_clap; extern crate pnet; extern crate regex; use pnet::packet::icmp::{IcmpCode, IcmpTypes, MutableIcmpPacket}; use pnet::packet::icmpv6::{Icmpv6Types, MutableIcmpv6Packet}; use pnet::packet::ip::IpNextHeaderProtocols::{Icmp, Icmpv6}; use pnet::packet::ipv4::MutableIpv4Packet; use pnet::packet::ipv6::MutableIpv6Packet; use pnet::packet::ip::IpNextHeaderProtocols; use pnet::packet::MutablePacket; use pnet::transport::{icmp_packet_iter, transport_channel, TransportChannelType}; use pnet::util::checksum; use dns_lookup::lookup_host; use regex::Regex; use signal_hook::{register, SIGINT}; use std::error::Error; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; use std::time::Instant; use std::{process, thread, time}; const IPV4_HEADER_LEN: usize = 20; const IPV4_HEADER_WORD_LEN: u8 = 5; const IPV6_HEADER_LEN: usize = 40; const ICMP_HEADER_LEN: usize = 8; const ICMP_PAYLOAD_LEN_DEFAULT: u16 = 56; const ICMP_CHECKSUM_POS: usize = 1; const ICMP_CODE: u8 = 0; const DEFAULT_BUFF_SIZE: usize = 4096; const DEFAULT_SLEEP_TIME: u64 = 1000; const DEFAULT_TIMEOUT: usize = 1; const DEFAULT_TTL: u8 = 64; const MAX_IPV4_PACKET_LEN: usize = 1044; const MAX_ICMP_PACKET_LEN: usize = MAX_IPV4_PACKET_LEN - IPV4_HEADER_LEN; const MAX_TTL: u64 = 255; const MAX_TIMEOUT: usize = 20; enum SupportedPacketType<'a> { V4(MutableIpv4Packet<'a>), V6(MutableIpv6Packet<'a>), } static mut SENT: usize = 0; static mut RECEIVED: usize = 0; fn main() { let arg_matches = main_clap::clap_desc(); let arg_ping_dest = match arg_matches.value_of(main_clap::ARG_ADDRESS) { Some(input) => String::from(input), None => panic!("Please supply the address to ping"), }; let ttl = match arg_matches.value_of(main_clap::ARG_TTL) { Some(input) => { let full_ttl = input.parse::<u64>().expect("the ttl must be an integer"); match full_ttl { 1..=MAX_TTL => full_ttl as u8, _ => panic!("the ttl is 1 to {}", MAX_TTL), } } None => DEFAULT_TTL, }; let icmp_packet_len: usize = match arg_matches.value_of(main_clap::ARG_PACKET_SIZE) { Some(input) => { let full_payload_len = input .parse::<usize>() .expect("the packet size must be an integer"); match full_payload_len { 8..=MAX_ICMP_PACKET_LEN => full_payload_len, _ => panic!( "the icmp packet length must be between {} and {} bytes", ICMP_HEADER_LEN, MAX_ICMP_PACKET_LEN ), } } None => ICMP_PAYLOAD_LEN_DEFAULT as usize, }; let timeout_length = match arg_matches.value_of(main_clap::ARG_TIMEOUT) { Some(input) => { let full_timeout_len = input .parse::<usize>() .expect("the timeout must be an integer"); match full_timeout_len { 1..=MAX_TIMEOUT => full_timeout_len, _ => panic!( "the timeout must be between {} and {} seconds", 1, MAX_TIMEOUT ), } } None => DEFAULT_TIMEOUT, }; unsafe { register(SIGINT, || finish()).unwrap(); } let address = resolve_ip_address(&arg_ping_dest).unwrap(); let (ip_packet_size, protocol) = match address { IpAddr::V4(_) => { let size = IPV4_HEADER_LEN + icmp_packet_len; let protocol = Icmp; (size, protocol) } IpAddr::V6(_) => { let size = IPV6_HEADER_LEN + icmp_packet_len; let protocol = Icmpv6; (size, protocol) } }; let duration = time::Duration::from_millis(DEFAULT_SLEEP_TIME); let channel_type = TransportChannelType::Layer3(protocol); let (mut sender, mut receiver) = match transport_channel(DEFAULT_BUFF_SIZE, channel_type) { Ok((sender, receiver)) => (sender, receiver), Err(e) => panic!("Error initializing the channel {}", e), }; let mut receiver_iter = icmp_packet_iter(&mut receiver); println!( "PINGER: {}({}) with {} bytes of data", arg_ping_dest, address, icmp_packet_len ); loop { let mut ip_packet_buf = vec![0u8; ip_packet_size]; let mut icmp_packet_buf = vec![0u8; icmp_packet_len]; let packet = create_packet( address, ttl, &mut ip_packet_buf, &mut icmp_packet_buf, icmp_packet_len, ); let time_sent = Instant::now(); let send_result = match packet { SupportedPacketType::V4(packet) => sender.send_to(packet, address), SupportedPacketType::V6(packet) => sender.send_to(packet, address), }; match send_result { Ok(_) => { unsafe { SENT += 1 } } Err(e) => panic!("Error sending packet {}", e), }; match receiver_iter.next_with_timeout(time::Duration::from_secs(timeout_length as u64)) { Ok(Some((_, ip_addr))) => { println!( "{} bytes from {}: ttl={} time={} ms", icmp_packet_len, ip_addr, ttl, (time_sent.elapsed().as_micros() as f64) / 1000.0 ); unsafe { RECEIVED += 1 } } Ok(None) => { println!( "packet timed out: time={} ms", time_sent.elapsed().as_millis() ); } Err(e) => println!("Error receiving packet {}", e), } thread::sleep(duration); } } /* parse input as IP address */ fn resolve_ip_address(input: &String) -> Result<IpAddr, Box<dyn Error>> { let reg_ipv4 = Regex::new(r#"^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$"#)?; let reg_ipv6 = Regex::new("^(?:[a-fA-F0-9]{1,4}:){7}[a-fA-F0-9]{1,4}$")?; let reg_hostname = Regex::new( r#"^(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.){0,2}[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"#, )?; let ip_addr: IpAddr; if reg_hostname.is_match(input) { let lookup_results = lookup_host(input)?; match lookup_results.len() { 2 => ip_addr = lookup_results[1], 1 => ip_addr = lookup_results[0], _ => panic!("host name lookup returned with not results"), } } else if reg_ipv4.is_match(input) || reg_ipv6.is_match(input) { ip_addr = input.parse()?; } else { panic!("Please enter a valid domain or IP address"); } return Ok(ip_addr); } /* creates a packet for a given address based on address spec ie. Ipv4Packet for Ipv4Addr and Ipv6Packet for Ipv6Addr */ fn create_packet<'a>( address: IpAddr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> SupportedPacketType<'a> { return match address { IpAddr::V4(ip_addr) => SupportedPacketType::V4(create_ipv4_packet( ip_addr, ttl, ip_packet_buf, icmp_packet_buf, icmp_packet_len, )), IpAddr::V6(ip_addr) => SupportedPacketType::V6(create_ipv6_packet( ip_addr, ttl, ip_packet_buf, icmp_packet_buf, icmp_packet_len, )), }; } /* I referenced this: https://codereview.stackexchange.com/questions/208875/traceroute-implementation-in-rust which helped me understand I had to wrap my ICMP packet within a IP[v4/v6] packet */ fn create_ipv4_packet<'a>( dest: Ipv4Addr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> MutableIpv4Packet<'a> { let mut ipv4_packet = MutableIpv4Packet::new(ip_packet_buf).expect("unable to create ipv4 packet"); ipv4_packet.set_version(4); ipv4_packet.set_header_length(IPV4_HEADER_WORD_LEN); ipv4_packet.set_total_length((IPV4_HEADER_LEN + icmp_packet_len) as u16); ipv4_packet.set_ttl(ttl); ipv4_packet.set_next_level_protocol(IpNextHeaderProtocols::Icmp); ipv4_packet.set_destination(dest); let mut icmp_packet = MutableIcmpPacket::new(icmp_packet_buf).expect("unable to create icmp packet"); icmp_packet.set_icmp_type(IcmpTypes::EchoRequest); icmp_packet.set_icmp_code(IcmpCode::new(ICMP_CODE)); let checksum = checksum(&icmp_packet.packet_mut(), ICMP_CHECKSUM_POS); icmp_packet.set_checksum(checksum); ipv4_packet.set_payload(icmp_packet.packet_mut()); return ipv4_packet; } fn create_ipv6_packet<'a>( dest: Ipv6Addr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> MutableIpv6Packet<'a> { let mut ipv6_packet = MutableIpv6Packet::new(ip_packet_buf).expect("invalid packet buffer size"); ipv6_packet.set_version(6); ipv6_packet.set_destination(dest); ipv6_packet.set_hop_limit(ttl); let mut icmp_packet = MutableIcmpv6Packet::new(icmp_packet_buf).unwrap(); let checksum = checksum(&icmp_packet.packet_mut(), ICMP_CHECKSUM_POS); icmp_packet.set_checksum(checksum); icmp_packet.set_icmpv6_type(Icmpv6Types::EchoRequest); ipv6_packet.set_payload_length((ICMP_HEADER_LEN + icmp_packet_len) as u16); ipv6_packet.set_payload(icmp_packet.packet_mut()); return ipv6_packet; } unsafe fn finish() { println!("\n--- ping statistics ---"); let packet_loss = (SENT - RECEIVED) / (SENT + RECEIVED) * 100; println!( "{} packets transmitted, {} received, {}% packet loss", SENT, RECEIVED, packet_loss, ); process::exit(0); }
mod main_clap; extern crate pnet; extern crate regex; use pnet::packet::icmp::{IcmpCode, IcmpTypes, MutableIcmpPacket}; use pnet::packet::icmpv6::{Icmpv6Types, MutableIcmpv6Packet}; use pnet::packet::ip::IpNextHeaderProtocols::{Icmp, Icmpv6}; use pnet::packet::ipv4::MutableIpv4Packet; use pnet::packet::ipv6::MutableIpv6Packet; use pnet::packet::ip::IpNextHeaderProtocols; use pnet::packet::MutablePacket; use pnet::transport::{icmp_packet_iter, transport_channel, TransportChannelType}; use pnet::util::checksum; use dns_lookup::lookup_host; use regex::Regex; use signal_hook::{register, SIGINT}; use std::error::Error; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; use std::time::Instant; use std::{process, thread, time}; const IPV4_HEADER_LEN: usize = 20; const IPV4_HEADER_WORD_LEN: u8 = 5; const IPV6_HEADER_LEN: usize = 40; const ICMP_HEADER_LEN: usize = 8; const ICMP_PAYLOAD_LEN_DEFAULT: u16 = 56; const ICMP_CHECKSUM_POS: usize = 1; const ICMP_CODE: u8 = 0; const DEFAULT_BUFF_SIZE: usize = 4096; const DEFAULT_SLEEP_TIME: u64 = 1000; const DEFAULT_TIMEOUT: usize = 1; const DEFAULT_TTL: u8 = 64; const MAX_IPV4_PACKET_LEN: usize = 1044; const MAX_ICMP_PACKET_LEN: usize = MAX_IPV4_PACKET_LEN - IPV4_HEADER_LEN; const MAX_TTL: u64 = 255; const MAX_TIMEOUT: usize = 20; enum SupportedPacketType<'a> { V4(MutableIpv4Packet<'a>), V6(MutableIpv6Packet<'a>), } static mut SENT: usize = 0; static mut RECEIVED: usize = 0; fn main() { let arg_matches = main_clap::clap_desc(); let arg_ping_dest = match arg_matches.value_of(main_clap::ARG_ADDRESS) { Some(input) => String::from(input), None => panic!("Please supply the address to ping"), }; let ttl = match arg_matches.value_of(main_clap::ARG_TTL) { Some(input) => { let full_ttl = input.parse::<u64>().expect("the ttl must be an integer"); match full_ttl { 1..=MAX_TTL => full_ttl as u8, _ => panic!("the ttl is 1 to {}", MAX_TTL), } } None => DEFAULT_TTL, }; let icmp_packet_len: usize = match arg_matches.value_of(main_clap::ARG_PACKET_SIZE) { Some(input) => { let full_payload_len = input .parse::<usize>() .expect("the packet size must be an integer"); match full_payload_len { 8..=MAX_ICMP_PACKET_LEN => full_payload_len, _ => panic!( "the icmp packet length must be between {} and {} bytes", ICMP_HEADER_LEN, MAX_ICMP_PACKET_LEN ), } } None => ICMP_PAYLOAD_LEN_DEFAULT as usize, }; let timeout_length = match arg_matches.value_of(main_clap::ARG_TIMEOUT) { Some(input) => { let full_timeout_len = input .parse::<usize>() .expect("the timeout must be an integer"); match full_timeout_len { 1..=MAX_TIMEOUT => full_timeout_len, _ => panic!( "the timeout must be between {} and {} seconds", 1, MAX_TIMEOUT ), } } None => DEFAULT_TIMEOUT, }; unsafe { register(SIGINT, || finish()).unwrap(); } let address = resolve_ip_address(&arg_ping_dest).unwrap(); let (ip_packet_size, protocol) = match address { IpAddr::V4(_) => { let size = IPV4_HEADER_LEN + icmp_packet_len; let protocol = Icmp; (size, protocol) } IpAddr::V6(_) => { let size = IPV6_HEADER_LEN + icmp_packet_len; let protocol = Icmpv6; (size, protocol) } }; let duration = time::Duration::from_millis(DEFAULT_SLEEP_TIME); let channel_type = TransportChannelType::Layer3(protocol); let (mut sender, mut receiver) = match transport_channel(DEFAULT_BUFF_SIZE, channel_type) { Ok((sender, receiver)) => (sender, receiver), Err(e) => panic!("Error initializing the channel {}", e), }; let mut receiver_iter = icmp_packet_iter(&mut receiver); println!( "PINGER: {}({}) with {} bytes of data", arg_ping_dest, address, icmp_packet_len ); loop { let mut ip_packet_buf = vec![0u8; ip_packet_size]; let mut icmp_packet_buf = vec![0u8; icmp_packet_len]; let packet = create_packet( address, ttl, &mut ip_packet_buf, &mut icmp_packet_buf, icmp_packet_len, ); let time_sent = Instant::now(); let send_result = match packet { SupportedPacketType::V4(packet) => sender.send_to(packet, address), SupportedPacketType::V6(packet) => sender.send_to(packet, address), }; match send_result { Ok(_) => { unsafe { SENT += 1 } } Err(e) => panic!("Error sending packet {}", e), }; match receiver_iter.next_with_timeout(time::Duration::from_secs(timeout_length as u64)) { Ok(Some((_, ip_addr))) => { println!( "{} bytes from {}: ttl={} time={} ms", icmp_packet_len, ip_addr, ttl, (time_sent.elapsed().as_micros() as f64) / 1000.0 ); unsafe { RECEIVED += 1 } } Ok(None) => { println!( "packet timed out: time={} ms", time_sent.elapsed().as_millis() ); } Err(e) => println!("Error receiving packet {}", e), } thread::sleep(duration); } } /* parse input as IP address */ fn resolve_ip_address(input: &String) -> Result<IpAddr, Box<dyn Error>> { let reg_ipv4 = Regex::new(r#"^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$"#)?; let reg_ipv6 = Regex::new("^(?:[a-fA-F0-9]{1,4}:){7}[a-fA-F0-9]{1,4}$")?; let reg_hostname = Regex::new( r#"^(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.){0,2}[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$"#, )?; let ip_addr: IpAddr; if reg_hostname.is_match(input) { let lookup_results = lookup_host(input)?; match lookup_results.len() { 2 => ip_addr = lookup_results[1], 1 => ip_addr = lookup_results[0], _ => panic!("host name lookup returned with not results"), } } else if reg_ipv4.is_match(input) || reg_ipv6.is_match(input) { ip_addr = input.parse()?; } else { panic!("Please enter a valid domain or IP address"); } return Ok(ip_addr); } /* creates a packet for a given address based on address spec ie. Ipv4Packet for Ipv4Addr and Ipv6Packet for Ipv6Addr */ fn create_packet<'a>( address: IpAddr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> SupportedPacketType<'a> { return match address { IpAddr::V4(ip_addr) => SupportedPacketType::V4(create_ipv4_packet( ip_addr, ttl, ip_packet_buf, icmp_packet_buf, icmp_packet_len, )), IpAddr::V6(ip_addr) => SupportedPacketType::V6(create_ipv6_packet( ip_addr, ttl, ip_packet_buf, icmp_packet_buf, icmp_packet_len, )), }; } /* I referenced this: https://codereview.stackexchange.com/que
ER_WORD_LEN); ipv4_packet.set_total_length((IPV4_HEADER_LEN + icmp_packet_len) as u16); ipv4_packet.set_ttl(ttl); ipv4_packet.set_next_level_protocol(IpNextHeaderProtocols::Icmp); ipv4_packet.set_destination(dest); let mut icmp_packet = MutableIcmpPacket::new(icmp_packet_buf).expect("unable to create icmp packet"); icmp_packet.set_icmp_type(IcmpTypes::EchoRequest); icmp_packet.set_icmp_code(IcmpCode::new(ICMP_CODE)); let checksum = checksum(&icmp_packet.packet_mut(), ICMP_CHECKSUM_POS); icmp_packet.set_checksum(checksum); ipv4_packet.set_payload(icmp_packet.packet_mut()); return ipv4_packet; } fn create_ipv6_packet<'a>( dest: Ipv6Addr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> MutableIpv6Packet<'a> { let mut ipv6_packet = MutableIpv6Packet::new(ip_packet_buf).expect("invalid packet buffer size"); ipv6_packet.set_version(6); ipv6_packet.set_destination(dest); ipv6_packet.set_hop_limit(ttl); let mut icmp_packet = MutableIcmpv6Packet::new(icmp_packet_buf).unwrap(); let checksum = checksum(&icmp_packet.packet_mut(), ICMP_CHECKSUM_POS); icmp_packet.set_checksum(checksum); icmp_packet.set_icmpv6_type(Icmpv6Types::EchoRequest); ipv6_packet.set_payload_length((ICMP_HEADER_LEN + icmp_packet_len) as u16); ipv6_packet.set_payload(icmp_packet.packet_mut()); return ipv6_packet; } unsafe fn finish() { println!("\n--- ping statistics ---"); let packet_loss = (SENT - RECEIVED) / (SENT + RECEIVED) * 100; println!( "{} packets transmitted, {} received, {}% packet loss", SENT, RECEIVED, packet_loss, ); process::exit(0); }
stions/208875/traceroute-implementation-in-rust which helped me understand I had to wrap my ICMP packet within a IP[v4/v6] packet */ fn create_ipv4_packet<'a>( dest: Ipv4Addr, ttl: u8, ip_packet_buf: &'a mut [u8], icmp_packet_buf: &'a mut [u8], icmp_packet_len: usize, ) -> MutableIpv4Packet<'a> { let mut ipv4_packet = MutableIpv4Packet::new(ip_packet_buf).expect("unable to create ipv4 packet"); ipv4_packet.set_version(4); ipv4_packet.set_header_length(IPV4_HEAD
random
[ { "content": "pub fn clap_desc() -> ArgMatches<'static> {\n\n return App::new(\"pingr\")\n\n .version(\"0.1.0\")\n\n .author(\"Richard Wei <[email protected]>\")\n\n .arg(\n\n Arg::with_name(ARG_ADDRESS)\n\n .index(1)\n\n .takes_value(true)\n\n .help(\"The address or hostname to ping\"),\n\n )\n\n .arg(\n\n Arg::with_name(ARG_TTL)\n\n .short(\"t\")\n\n .takes_value(true)\n\n .help(\"How long the packet is allowed to be passed along before it 'dies' (1 - 255, default = 64)\"),\n\n )\n\n .arg(\n\n Arg::with_name(ARG_PACKET_SIZE)\n\n .short(\"s\")\n\n .takes_value(true)\n", "file_path": "pingr/src/main_clap.rs", "rank": 4, "score": 60958.84527194662 }, { "content": "extern crate clap;\n\n\n\nuse clap::{App, Arg, ArgMatches};\n\n\n\npub const ARG_ADDRESS : &'static str = \"ping-address\";\n\npub const ARG_TTL : &'static str = \"ttl\";\n\npub const ARG_PACKET_SIZE : &'static str = \"packet-size\";\n\npub const ARG_TIMEOUT : &'static str= \"timeout\";\n\n\n", "file_path": "pingr/src/main_clap.rs", "rank": 19, "score": 9674.91557825637 }, { "content": " \n\n .help(\"Number of bytes to be sent in the ICMP packet (default is 8 for header + 56 for body). (8 - 1024, default = 64)\"),\n\n )\n\n .arg(\n\n Arg::with_name(ARG_TIMEOUT)\n\n .short(\"W\")\n\n .takes_value(true)\n\n .help(\"Seconds to wait to receive a sent response (1 - 20, default = 1)\"),\n\n )\n\n .get_matches();\n\n}\n", "file_path": "pingr/src/main_clap.rs", "rank": 20, "score": 9670.308898319669 }, { "content": "# PINGR - a rust ping clone\n\nThis is a (basic) commandline ping cline written in Rust\n\n\n\n## Build instructions\n\nFrom the project root:\n\n```bash\n\n# the executable can be found in [./target/debug/ping]\n\ncargo build\n\n```\n\n\n\n## Usage\n\nRun the executable with either the hostname or IP destination address you want to ping.\n\nPINGER will send icmp echo requests to the destination in a loop and display response info until you terminate the program. \n\n\n\nTermination with SIGINT (ctrl + c) will display the total number of packets transmitted and lost, as well as packet loss\n\n\n\n```bash\n\nsudo ./target/debug/ping 127.0.0.1\n\n```\n\nNote: this need to run this as su because we need to use raw sockets to send icmp packets over ipv4/ipv6\n\n\n\n\n\n## Features:\n\n- setting custom **ttl**: the number of times a packet can be passed along before it is discarded - between 0 and 255 (default = 64)\n\n- set custom **icmp packet size**(including header): between 8 and 1024 (default = 64). The size constraint is explained in the see more section\n\n- set custom **time out**: time to wait for a response - between 1 and 20 seconds (default = 1) \n\n\n\nfor more information, you can run the compiled executable with the help flag\n\n```bash\n\n./target/debug/ping 127.0.0.1 --help\n\n```\n\n\n\nWhat doesn't work:\n\n- sending large packets using IPV4: the max packet size should be 65535, but it yields an error when I try to send it so I've set a hard limit for now\n\n```\n\nError sending packet Message too long (os error 90)\n\n```\n\n- IPV6: The ipv6 logic has been implemented, but I got blocked an error. I spent a while trying to debug this but haven't been able to find a solution (ipv6 is enabled on my device).\n\n```\n\nError sending packet Address family not supported by protocol (os error 97)\n", "file_path": "pingr/README.md", "rank": 21, "score": 16.151257842080692 }, { "content": "# Cloudflare Internship Application: Systems\n\n\n\n## What is it?\n\n\n\nPlease write a small Ping CLI application for MacOS or Linux.\n\nThe CLI app should accept a hostname or an IP address as its argument, then send ICMP \"echo requests\" in a loop to the target while receiving \"echo reply\" messages.\n\nIt should report loss and RTT times for each sent message.\n\n\n\nPlease choose from among these languages: C/C++/Go/Rust\n\n\n\n## Useful Links\n\n\n\n- [A Tour of Go](https://tour.golang.org/welcome/1)\n\n- [The Rust Programming Language](https://doc.rust-lang.org/book/index.html)\n\n\n\n## Requirements\n\n\n\n### 1. Use one of the specified languages\n\n\n\nPlease choose from among C/C++/Go/Rust. If you aren't familiar with these languages, you're not alone! Many engineers join Cloudflare without\n\nspecific langauge experience. Please consult [A Tour of Go](https://tour.golang.org/welcome/1) or [The Rust Programming Language](https://doc.rust-lang.org/book/index.html).\n\n\n\n### 2. Build a tool with a CLI interface\n\n\n\nThe tool should accept as a positional terminal argument a hostname or IP address.\n\n\n\n### 3. Send ICMP \"echo requests\" in an infinite loop\n\n\n\nAs long as the program is running it should continue to emit requests with a periodic delay.\n\n\n\n### 4. Report loss and RTT times for each message\n\n\n\nPacket loss and latency should be reported as each message received.\n\n\n\n## Submitting your project\n\n\n\nWhen submitting your project, you should prepare your code for upload to Greenhouse. The preferred method for doing this is to create a \"ZIP archive\" of your project folder: for more instructions on how to do this on Windows and Mac, see [this guide](https://www.sweetwater.com/sweetcare/articles/how-to-zip-and-unzip-files/).\n\n\n\nPlease provide the source code only, a compiled binary is not necessary.\n\n\n\n## Using Libraries\n\n\n\nYou may use libraries (both built-in and installed via package managers) and system calls as necessary. Please don't use the ping built-in application or a full library implementation of ping.\n\n\n\n## Extra Credit\n\n\n\n1. Add support for both IPv4 and IPv6\n\n2. Allow to set TTL as an argument and report the corresponding \"time exceeded” ICMP messages\n\n3. Any additional features listed in the ping man page or which you think would be valuable\n", "file_path": "README.md", "rank": 22, "score": 10.549676191706192 }, { "content": "# Contributor Covenant Code of Conduct\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as\n\ncontributors and maintainers pledge to making participation in our project and\n\nour community a harassment-free experience for everyone, regardless of age, body\n\nsize, disability, ethnicity, sex characteristics, gender identity and expression,\n\nlevel of experience, education, socio-economic status, nationality, personal\n\nappearance, race, religion, or sexual identity and orientation.\n\n\n\n## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment\n\ninclude:\n\n\n\n- Using welcoming and inclusive language\n\n- Being respectful of differing viewpoints and experiences\n\n- Gracefully accepting constructive criticism\n\n- Focusing on what is best for the community\n\n- Showing empathy towards other community members\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n- The use of sexualized language or imagery and unwelcome sexual attention or\n\n advances\n\n- Trolling, insulting/derogatory comments, and personal or political attacks\n\n- Public or private harassment\n\n- Publishing others' private information, such as a physical or electronic\n\n address, without explicit permission\n\n- Other conduct which could reasonably be considered inappropriate in a\n\n professional setting\n\n\n\n## Our Responsibilities\n\n\n\nProject maintainers are responsible for clarifying the standards of acceptable\n\nbehavior and are expected to take appropriate and fair corrective action in\n\nresponse to any instances of unacceptable behavior.\n\n\n\nProject maintainers have the right and responsibility to remove, edit, or\n\nreject comments, commits, code, wiki edits, issues, and other contributions\n\nthat are not aligned to this Code of Conduct, or to ban temporarily or\n\npermanently any contributor for other behaviors that they deem inappropriate,\n\nthreatening, offensive, or harmful.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 23, "score": 2.8723198738220854 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies both within project spaces and in public spaces\n\nwhen an individual is representing the project or its community. Examples of\n\nrepresenting a project or community include using an official project e-mail\n\naddress, posting via an official social media account, or acting as an appointed\n\nrepresentative at an online or offline event. Representation of a project may be\n\nfurther defined and clarified by project maintainers.\n\n\n\n## Enforcement\n\n\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\n\nreported by contacting the project team at [email protected]. All\n\ncomplaints will be reviewed and investigated and will result in a response that\n\nis deemed necessary and appropriate to the circumstances. The project team is\n\nobligated to maintain confidentiality with regard to the reporter of an incident.\n\nFurther details of specific enforcement policies may be posted separately.\n\n\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\n\nfaith may face temporary or permanent repercussions as determined by other\n\nmembers of the project's leadership.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\n\navailable at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html\n\n\n\n[homepage]: https://www.contributor-covenant.org\n\n\n\nFor answers to common questions about this code of conduct, see\n\nhttps://www.contributor-covenant.org/faq\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 24, "score": 2.0617505247184322 } ]
Rust
hive-core/src/lua/http/request.rs
hackerer1c/hive
a98ab9a97836f208646df252175283067a398b7b
use super::body::LuaBody; use super::header_map::LuaHeaderMap; use super::uri::LuaUri; use crate::path::Params; use hyper::header::{HeaderName, HeaderValue}; use hyper::http::request::Parts; use hyper::{Body, HeaderMap, Method, Request}; use mlua::{ExternalError, ExternalResult, FromLua, Lua, Table, UserData}; use std::cell::RefCell; use std::rc::Rc; pub struct LuaRequest { pub(crate) method: Method, pub(crate) uri: hyper::Uri, pub(crate) headers: Rc<RefCell<HeaderMap>>, pub(crate) body: Option<LuaBody>, params: Option<Params>, } impl LuaRequest { #[rustfmt::skip] pub fn new(req: Request<Body>, params: Params) -> Self { let (Parts { method, uri, headers, .. }, body) = req.into_parts(); let headers = Rc::new(RefCell::new(headers)); let body = Some(body.into()); let params = Some(params); Self { method, uri, headers, body, params } } } impl Default for LuaRequest { fn default() -> Self { Self { method: Method::GET, uri: Default::default(), headers: Default::default(), body: Some(LuaBody::Empty), params: None, } } } impl UserData for LuaRequest { fn add_fields<'lua, F: mlua::UserDataFields<'lua, Self>>(fields: &mut F) { fields.add_field_function_get("params", |lua, this| { this .get_named_user_value::<_, Table>("params") .or_else(|_err| { let mut this_ref = this.borrow_mut::<Self>()?; let params = this_ref .params .take() .map(|x| { let iter = x .into_iter() .map(|(k, v)| (k.into_string(), v.into_string())); lua.create_table_from(iter) }) .unwrap_or_else(|| lua.create_table())?; this.set_named_user_value("params", params.clone())?; Ok(params) }) }); fields.add_field_method_get("method", |lua, this| lua.pack(this.method.as_str())); fields.add_field_method_get("uri", |_lua, this| Ok(LuaUri(this.uri.clone()))); fields.add_field_function_get("body", |lua, this| { let mut this_ = this.borrow_mut::<Self>()?; let body = this_.body.take(); if let Some(body) = body { let x = lua.pack(body)?; this.set_named_user_value("body", x.clone())?; Ok(x) } else { this.get_named_user_value("body") } }); fields.add_field_method_get("headers", |_lua, this| { Ok(LuaHeaderMap(this.headers.clone())) }); } } impl<'lua> FromLua<'lua> for LuaRequest { fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua Lua) -> mlua::Result<Self> { match lua_value { mlua::Value::String(uri) => Ok(Self { uri: hyper::Uri::try_from(uri.as_bytes()).to_lua_err()?, ..Default::default() }), mlua::Value::Table(table) => { let method = table .raw_get::<_, Option<mlua::String>>("method")? .map(|x| Method::from_bytes(x.as_bytes())) .transpose() .to_lua_err()? .unwrap_or(Method::GET); let uri: hyper::Uri = table .raw_get::<_, mlua::String>("uri")? .as_bytes() .try_into() .to_lua_err()?; let headers_table: Option<Table> = table.raw_get("headers")?; let mut headers = HeaderMap::new(); if let Some(headers_table) = headers_table { for entry in headers_table.pairs::<mlua::String, mlua::Value>() { let (k, v) = entry?; let k = HeaderName::from_bytes(k.as_bytes()).to_lua_err()?; match v { mlua::Value::String(v) => { headers.append(k, HeaderValue::from_bytes(v.as_bytes()).to_lua_err()?); } mlua::Value::Table(vs) => { for v in vs.sequence_values::<mlua::String>() { let v = v?; headers.append(&k, HeaderValue::from_bytes(v.as_bytes()).to_lua_err()?); } } _ => return Err("expected string or table".to_lua_err()), } } } Ok(Self { method, uri, headers: Rc::new(RefCell::new(headers)), body: Some(table.raw_get("body")?), ..Default::default() }) } mlua::Value::UserData(x) => { let mut u = x.take::<Self>()?; if u.body.is_none() { let t = x.get_named_user_value::<_, LuaBody>("body")?; u.body = Some(t); } Ok(u) } _ => Err("expected string or table".to_lua_err()), } } } impl From<LuaRequest> for Request<Body> { fn from(x: LuaRequest) -> Self { let headers = Rc::try_unwrap(x.headers) .map(RefCell::into_inner) .unwrap_or_else(|x| x.borrow().clone()); let mut builder = Request::builder().method(x.method).uri(x.uri); *builder.headers_mut().unwrap() = headers; builder.body(x.body.unwrap().into()).unwrap() } }
use super::body::LuaBody; use super::header_map::LuaHeaderMap; use super::uri::LuaUri; use crate::path::Params; use hyper::header::{HeaderName, HeaderValue}; use hyper::http::request::Parts; use hyper::{Body, HeaderMap, Method, Request}; use mlua::{ExternalError, ExternalResult, FromLua, Lua, Table, UserData}; use std::cell::RefCell; use std::rc::Rc; pub struct LuaRequest { pub(crate) method: Method, pub(crate) uri: hyper::Uri, pub(crate) headers: Rc<RefCell<HeaderMap>>, pub(crate) body: Option<LuaBody>, params: Option<Params>, } impl LuaRequest { #[rustfmt::skip] pub fn new(req: Request<Body>, params: Params) -> Self { let (Parts { method, uri, headers, .. }, body) = req.into_parts(); let headers = Rc::new(RefCell::new(headers)); let body = Some(body.into()); let params = Some(params); Self { method, uri, headers, body, params } } } impl Default for LuaRequest { fn default() -> Self { Self { method: Method::GET, uri: Default::default(), headers: Default::default(), body: Some(LuaBody::Empty), params: None, } } } impl UserData for LuaRequest { fn add_fields<'lua, F: mlua::UserDataFields<'lua, Self>>(fields: &mut F) { fields.add_field_function_get("params", |lua, this| { this .get_named_user_value::<_, Table>("params") .or_else(|_err| { let mut this_ref = this.borrow_mut::<Self>()?; let params = this_ref .params .take() .map(|x| { let iter = x .into_iter() .map(|(k, v)| (k.into_string(), v.into_string())); lua.create_table_from(iter) }) .unwrap_or_else(|| lua.create_table())?; this.set_named_user_value("params", params.clone())?; Ok(params) }) }); fields.add_field_method_get("method", |lua, this| lua.pack(this.method.as_str())); fields.add_field_method_get("uri", |_lua, this| Ok(LuaUri(this.uri.clone()))); fields.add_field_function_get("body", |lua, this| { let mut this_ = this.borrow_mut::<Self>()?; let body = this_.body.take(); if let Some(body) = body { let x = lua.pack(body)?; this.set_named_user_value("body", x.clone())?; Ok(x) } else { this.get_named_user_value("body") } }); fields.add_field_method_get("headers", |_lua, this| { Ok(LuaHeaderMap(this.headers.clone())) }); } } impl<'lua> FromLua<'lua> for LuaRequest { fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua Lua) -> mlua::Result<Self> { match lua_value { mlua::Value::String(uri) => Ok(Self { uri: hyper::Uri::try_from(uri.as_bytes()).to_lua_err()?, ..Default::default() }), mlua::Value::Table(table) => { let method = table .raw_get::<_, Option<mlua::String>>("method")? .map(|x| Method::from_bytes(x.as_bytes())) .transpose() .to_lua_err()? .unwrap_or(Method::GET); let uri: hyper::Uri = table .raw_get::<_, mlua::String>("uri")? .as_bytes() .
} impl From<LuaRequest> for Request<Body> { fn from(x: LuaRequest) -> Self { let headers = Rc::try_unwrap(x.headers) .map(RefCell::into_inner) .unwrap_or_else(|x| x.borrow().clone()); let mut builder = Request::builder().method(x.method).uri(x.uri); *builder.headers_mut().unwrap() = headers; builder.body(x.body.unwrap().into()).unwrap() } }
try_into() .to_lua_err()?; let headers_table: Option<Table> = table.raw_get("headers")?; let mut headers = HeaderMap::new(); if let Some(headers_table) = headers_table { for entry in headers_table.pairs::<mlua::String, mlua::Value>() { let (k, v) = entry?; let k = HeaderName::from_bytes(k.as_bytes()).to_lua_err()?; match v { mlua::Value::String(v) => { headers.append(k, HeaderValue::from_bytes(v.as_bytes()).to_lua_err()?); } mlua::Value::Table(vs) => { for v in vs.sequence_values::<mlua::String>() { let v = v?; headers.append(&k, HeaderValue::from_bytes(v.as_bytes()).to_lua_err()?); } } _ => return Err("expected string or table".to_lua_err()), } } } Ok(Self { method, uri, headers: Rc::new(RefCell::new(headers)), body: Some(table.raw_get("body")?), ..Default::default() }) } mlua::Value::UserData(x) => { let mut u = x.take::<Self>()?; if u.body.is_none() { let t = x.get_named_user_value::<_, LuaBody>("body")?; u.body = Some(t); } Ok(u) } _ => Err("expected string or table".to_lua_err()), } }
function_block-function_prefix_line
[ { "content": "pub fn create_fn_create_uri(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|_lua, s: mlua::String| {\n\n Ok(LuaUri(hyper::Uri::try_from(s.as_bytes()).to_lua_err()?))\n\n })\n\n}\n", "file_path": "hive-core/src/lua/http/uri.rs", "rank": 0, "score": 196827.64431821514 }, { "content": "pub fn register<'lua>(lua: &'lua Lua, value: impl ToLua<'lua>) -> mlua::Result<()> {\n\n let context: Table = lua.named_registry_value(\"_hive_current_context\")?;\n\n context.raw_insert(context.raw_len() + 1, value)\n\n}\n", "file_path": "hive-core/src/lua/context.rs", "rank": 1, "score": 196395.49341417628 }, { "content": "pub fn apply_table_module_patch(lua: &Lua, table_module: Table) -> mlua::Result<()> {\n\n table_module.raw_set(\"dump\", create_fn_table_dump(lua)?)?;\n\n table_module.raw_set(\"insert\", create_fn_table_insert(lua)?)?;\n\n table_module.raw_set(\"scope\", create_fn_table_scope(lua)?)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 2, "score": 185802.4690357889 }, { "content": "pub fn destroy(lua: &Lua, context: Table) -> mlua::Result<()> {\n\n let code = mlua::chunk! {\n\n for _, v in ipairs($context) do\n\n pcall(function() local v2 <close> = v end)\n\n end\n\n };\n\n lua.load(code).set_name(\"_hive_destroy_context\")?.call(())\n\n}\n\n\n", "file_path": "hive-core/src/lua/context.rs", "rank": 3, "score": 181802.1556441838 }, { "content": "pub fn set_current(lua: &Lua, context: Option<Table>) -> mlua::Result<()> {\n\n lua.set_named_registry_value(\"_hive_current_context\", context)\n\n}\n\n\n", "file_path": "hive-core/src/lua/context.rs", "rank": 4, "score": 175026.2922289023 }, { "content": "pub fn json_response_raw(status: StatusCode, body: impl Serialize) -> Response<Body> {\n\n Response::builder()\n\n .status(status)\n\n .header(\"content-type\", \"application/json\")\n\n .body(serde_json::to_string(&body).unwrap().into())\n\n .unwrap()\n\n}\n\n\n\n/// Taken from `tokio::fs`\n\npub async fn asyncify<F, T, E>(f: F) -> Result<T>\n\nwhere\n\n F: FnOnce() -> Result<T, E> + Send + 'static,\n\n T: Send + 'static,\n\n E: Send + 'static,\n\n crate::Error: From<E>,\n\n{\n\n match spawn_blocking(f).await {\n\n Ok(res) => res.map_err(From::from),\n\n Err(_) => Err(io::Error::new(io::ErrorKind::Other, \"background task failed\").into()),\n\n }\n", "file_path": "hive-server/src/util.rs", "rank": 5, "score": 167575.70093097788 }, { "content": "pub fn create_module_permission(lua: &Lua, permissions: Arc<PermissionSet>) -> mlua::Result<Table> {\n\n let permission_table = lua.create_table()?;\n\n permission_table.raw_set(\n\n \"check\",\n\n lua.create_function(move |_lua, perm: Permission| Ok(permissions.clone().check_ok(&perm)))?,\n\n )?;\n\n Ok(permission_table)\n\n}\n", "file_path": "hive-core/src/lua/permission.rs", "rank": 6, "score": 166761.7909322516 }, { "content": "pub fn json_response(status: StatusCode, body: impl Serialize) -> Result<Response<Body>> {\n\n Ok(json_response_raw(status, body))\n\n}\n\n\n", "file_path": "hive-server/src/util.rs", "rank": 7, "score": 165485.03313424537 }, { "content": "#[self_referencing]\n\nstruct LuaHeaderMapIter {\n\n inner: Rc<RefCell<HeaderMap>>,\n\n\n\n #[borrows(inner)]\n\n #[not_covariant]\n\n borrow: RefMut<'this, HeaderMap>,\n\n\n\n #[borrows(borrow)]\n\n #[covariant]\n\n iter: hyper::header::Iter<'this, HeaderValue>,\n\n}\n\n\n\nimpl UserData for LuaHeaderMapIter {}\n", "file_path": "hive-core/src/lua/http/header_map.rs", "rank": 8, "score": 164429.92230903098 }, { "content": "pub fn create_fn_create_response(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|_lua, params: Table| {\n\n let body = params.raw_get::<_, LuaBody>(\"body\")?;\n\n let mut response = body.into_default_response();\n\n\n\n let status = params.raw_get::<_, Option<u16>>(\"status\")?;\n\n if let Some(x) = status {\n\n response.status = StatusCode::from_u16(x)\n\n .map_err(|_| format!(\"invalid status code: {x}\"))\n\n .to_lua_err()?;\n\n }\n\n\n\n let headers = params.raw_get::<_, Option<Table>>(\"headers\")?;\n\n if let Some(x) = headers {\n\n let mut headers = response.headers.borrow_mut();\n\n for f in x.pairs::<String, String>() {\n\n let (k, v) = f?;\n\n headers.insert(\n\n HeaderName::from_bytes(k.as_bytes())\n\n .map_err(|_| format!(\"invalid header value: {}\", k))\n", "file_path": "hive-core/src/lua/http/response.rs", "rank": 9, "score": 159733.62809436867 }, { "content": "pub fn create_preload_crypto(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, ()| {\n\n let crypto_table = lua.create_table()?;\n\n crypto_table.raw_set(\"thread_rng\", LuaRng(Box::new(thread_rng())))?;\n\n Ok(crypto_table)\n\n })\n\n}\n", "file_path": "hive-core/src/lua/crypto.rs", "rank": 10, "score": 153491.14332556404 }, { "content": "pub fn create_preload_json(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, ()| {\n\n let json_table = lua.create_table()?;\n\n json_table.raw_set(\"parse\", create_fn_json_parse(lua)?)?;\n\n json_table.raw_set(\"stringify\", create_fn_json_stringify(lua)?)?;\n\n json_table.raw_set(\"array\", create_fn_json_array(lua)?)?;\n\n json_table.raw_set(\"undo_array\", create_fn_json_undo_array(lua)?)?;\n\n json_table.raw_set(\"array_metatable\", lua.array_metatable())?;\n\n Ok(json_table)\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 11, "score": 153491.14332556404 }, { "content": "pub fn create_fn_os_getenv(lua: &Lua, permissions: Arc<PermissionSet>) -> mlua::Result<Function> {\n\n lua.create_function(move |_lua, name: mlua::String| {\n\n let name = std::str::from_utf8(name.as_bytes()).to_lua_err()?;\n\n permissions.check(&Permission::Env {\n\n name: Cow::Borrowed(name),\n\n })?;\n\n std::env::var(name).to_lua_err()\n\n })\n\n}\n", "file_path": "hive-core/src/lua/env.rs", "rank": 12, "score": 148863.60683120397 }, { "content": "pub fn method_not_allowed(expected: &[&'static str], got: &Method) -> Error {\n\n From::from((\n\n 405,\n\n \"method not allowed\",\n\n json!({ \"expected\": expected, \"got\": got.as_str() }),\n\n ))\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub struct ErrorAuthWrapper {\n\n inner: Error,\n\n uuid: Option<Uuid>,\n\n}\n\n\n\nimpl ErrorAuthWrapper {\n\n pub fn new(auth: bool, error: impl Into<Error>) -> Self {\n\n let inner = error.into();\n\n let uuid = if !auth && inner.kind.internal() {\n\n Some(Uuid::new_v4())\n\n } else {\n", "file_path": "hive-server/src/error.rs", "rank": 13, "score": 148442.95821393223 }, { "content": "pub fn create_fn_print<'a>(lua: &'a Lua, service_name: &str) -> mlua::Result<Function<'a>> {\n\n let tostring: Function = lua.globals().raw_get(\"tostring\")?;\n\n let target = format!(\"service '{service_name}'\");\n\n let f = lua.create_function(move |_lua, (tostring, args): (Function, MultiValue)| {\n\n let s = args\n\n .into_iter()\n\n .try_fold(String::new(), |mut init, x| -> mlua::Result<_> {\n\n let string = tostring.call::<_, mlua::String>(x)?;\n\n let string = std::str::from_utf8(string.as_bytes()).to_lua_err()?;\n\n init.push_str(string);\n\n (0..8 - string.as_bytes().len() % 8).for_each(|_| init.push(' '));\n\n Ok(init)\n\n })?;\n\n info!(target: &target, \"{s}\");\n\n Ok(())\n\n })?;\n\n f.bind(tostring)\n\n}\n", "file_path": "hive-core/src/lua/print.rs", "rank": 14, "score": 148053.95381678324 }, { "content": "fn create_fn_table_scope(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_async_function(|lua, (table, f): (mlua::Value, Function)| async move {\n\n match table {\n\n mlua::Value::Table(table) => f.call_async(table).await,\n\n mlua::Value::UserData(x) => {\n\n if let Ok(x) = x.borrow::<SharedTable>() {\n\n let x = lua.create_ser_userdata(SharedTableScope::new(x.0.clone()))?;\n\n let result = f.call_async::<_, mlua::Value>(x.clone()).await;\n\n x.take::<SharedTableScope>()?;\n\n return result;\n\n }\n\n if x.borrow::<SharedTableScope>().is_ok() {\n\n f.call_async::<_, mlua::Value>(x).await\n\n } else {\n\n Err(userdata_not_shared_table(\"scope\", 1))\n\n }\n\n }\n\n _ => Err(expected_table(\"scope\", 1, table.type_name())),\n\n }\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 15, "score": 143866.37030134202 }, { "content": "fn create_fn_table_dump(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, table: mlua::Value| match table {\n\n mlua::Value::Table(table) => Ok(table),\n\n mlua::Value::UserData(x) => {\n\n if let Ok(x) = x.borrow::<SharedTable>() {\n\n x.deep_dump(lua)\n\n } else if let Ok(x) = x.borrow::<SharedTableScope>() {\n\n x.deep_dump(lua)\n\n } else {\n\n Err(userdata_not_shared_table(\"dump\", 1))\n\n }\n\n }\n\n _ => Err(expected_table(\"dump\", 1, table.type_name())),\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 16, "score": 143866.37030134202 }, { "content": "fn create_fn_table_insert(lua: &Lua) -> mlua::Result<Function> {\n\n let old: Function = lua\n\n .globals()\n\n .raw_get_path(\"<global>\", &[\"table\", \"insert\"])?;\n\n let f = lua.create_function(\n\n |lua, (old, table, args): (Function, mlua::Value, MultiValue)| match table {\n\n mlua::Value::Table(table) => old.call::<_, ()>((table, args)),\n\n mlua::Value::UserData(table) => {\n\n let mut args = args.into_iter();\n\n match args.len() {\n\n 1 => table_insert_shared_2(lua, table, args.next().unwrap()),\n\n 2 => table_insert_shared_3(\n\n lua,\n\n table,\n\n lua.unpack(args.next().unwrap())?,\n\n args.next().unwrap(),\n\n ),\n\n _ => Err(\"wrong number of arguments\".to_lua_err()),\n\n }\n\n }\n\n _ => Err(format!(\"expected table or shared table, got {}\", table.type_name()).to_lua_err()),\n\n },\n\n )?;\n\n f.bind(old)\n\n}\n\n\n\n// Exceptions\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 17, "score": 143866.37030134202 }, { "content": "/// Taken from [Cargo](https://github.com/rust-lang/cargo/blob/af307a38c20a753ec60f0ad18be5abed3db3c9ac/src/cargo/util/paths.rs#L60-L85),\n\n/// and modified to force absolute path.\n\npub fn normalize_path(path: impl AsRef<Path>) -> PathBuf {\n\n let mut components = path.as_ref().components().peekable();\n\n let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {\n\n components.next();\n\n PathBuf::from(c.as_os_str())\n\n } else {\n\n PathBuf::new()\n\n };\n\n ret.push(\"/\");\n\n\n\n for component in components {\n\n match component {\n\n Component::Prefix(..) => unreachable!(),\n\n Component::RootDir => {\n\n ret.push(component.as_os_str());\n\n }\n\n Component::CurDir => {}\n\n Component::ParentDir => {\n\n ret.pop();\n\n }\n\n Component::Normal(c) => {\n\n ret.push(c);\n\n }\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "hive-core/src/path.rs", "rank": 18, "score": 140871.36381504056 }, { "content": "pub fn create_preload_http(lua: &Lua, permissions: Arc<PermissionSet>) -> mlua::Result<Function> {\n\n lua.create_function(move |lua, ()| {\n\n let http = lua.create_table()?;\n\n\n\n http.raw_set(\"request\", create_fn_request(lua, permissions.clone())?)?;\n\n http.raw_set(\"Response\", create_fn_create_response(lua)?)?;\n\n http.raw_set(\"Uri\", create_fn_create_uri(lua)?)?;\n\n\n\n Ok(http)\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/http/mod.rs", "rank": 19, "score": 139317.7172226205 }, { "content": "pub fn create_module_shared(lua: &Lua, service_name: Box<str>) -> mlua::Result<AnyUserData> {\n\n let shared = SHARED_STORE\n\n .entry(service_name)\n\n .or_insert(SharedTable::new())\n\n .clone();\n\n lua.create_ser_userdata(shared)\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 20, "score": 137956.6497984865 }, { "content": "fn parse_multipart(headers: &HeaderMap, body: Body) -> Result<Multipart<'static>> {\n\n let allowed_fields = vec![\"single\", \"multi\", \"config\"];\n\n let size_limit = SizeLimit::new()\n\n .for_field(\"single\", 1024u64.pow(2) * 5)\n\n .for_field(\"multi\", 1024u64.pow(2) * 100)\n\n .for_field(\"config\", 1024u64.pow(2) * 5);\n\n\n\n let content_type = headers\n\n .get(\"content-type\")\n\n .ok_or(\"no Content-Type given\")?\n\n .to_str()\n\n .or(Err(\"Content-Type is not valid UTF-8\"))?;\n\n let boundary = multer::parse_boundary(content_type)?;\n\n let constraints = Constraints::new()\n\n .allowed_fields(allowed_fields)\n\n .size_limit(size_limit);\n\n Ok(Multipart::with_constraints(body, boundary, constraints))\n\n}\n\n\n\nasync fn read_single<'a>(\n", "file_path": "hive-server/src/handle/upload.rs", "rank": 21, "score": 136393.50243497253 }, { "content": "fn create_fn_request(lua: &Lua, permissions: Arc<PermissionSet>) -> mlua::Result<Function> {\n\n lua.create_async_function(move |lua, req: LuaRequest| {\n\n let permissions = permissions.clone();\n\n extract_error_async(lua, async move {\n\n if let Some(auth) = req.uri.authority() {\n\n let host = auth.host();\n\n let port = (auth.port())\n\n .map(|x| NonZeroU16::new(x.as_u16()).ok_or(\"port is zero\"))\n\n .unwrap_or_else(|| {\n\n (req.uri.scheme())\n\n .map(|x| match x.as_str() {\n\n \"https\" => nonzero!(443u16),\n\n _ => nonzero!(80u16),\n\n })\n\n .ok_or(\"no URI scheme specified\")\n\n })\n\n .to_lua_err()?;\n\n permissions.check(&Permission::Net {\n\n host: host.into(),\n\n port,\n", "file_path": "hive-core/src/lua/http/mod.rs", "rank": 22, "score": 135043.39883603214 }, { "content": "fn get_context_table<'lua>(\n\n sandbox: &'lua Rc<Sandbox>,\n\n context: &Option<RegistryKey>,\n\n) -> mlua::Result<Option<Table<'lua>>> {\n\n context\n\n .as_ref()\n\n .map(|x| sandbox.lua.registry_value(x))\n\n .transpose()\n\n}\n\n\n\nimpl Future for TaskFuture {\n\n type Output = mlua::Result<()>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {\n\n let this = self.project();\n\n let tx = if let Some(tx) = this.tx.take() {\n\n tx\n\n } else {\n\n return Poll::Ready(Ok(()));\n\n };\n", "file_path": "hive-core/src/task/task_future.rs", "rank": 23, "score": 133746.6630494193 }, { "content": "fn table_insert_shared_2(lua: &Lua, table: AnyUserData, value: mlua::Value) -> mlua::Result<()> {\n\n let (borrowed, owned);\n\n let table = if let Ok(table) = table.borrow::<SharedTable>() {\n\n owned = SharedTableScope::new(table.0.clone());\n\n &owned\n\n } else if let Ok(table) = table.borrow::<SharedTableScope>() {\n\n borrowed = table;\n\n &borrowed\n\n } else {\n\n return Err(userdata_not_shared_table(\"insert\", 1));\n\n };\n\n\n\n table.push(lua.unpack(value)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 24, "score": 132463.5513643503 }, { "content": "fn len(x: &SharedTableRepr) -> i64 {\n\n x.int.iter().last().map(|x| 0.max(*x.0)).unwrap_or(0)\n\n}\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 25, "score": 128732.19351568309 }, { "content": "fn userdata_not_shared_table(fn_name: &'static str, pos: u8) -> mlua::Error {\n\n BadArgument::new(fn_name, pos, \"failed to borrow userdata as shared table\").into()\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 26, "score": 126949.38055003334 }, { "content": "pub trait LuaTableExt<'a> {\n\n fn raw_get_path<T: FromLua<'a>>(&self, base: &str, path: &[&str]) -> Result<T>;\n\n}\n\n\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 27, "score": 123529.20763189746 }, { "content": "fn bind_local_env_to_shared(lua: &Lua, local_env: Table, shared: mlua::Value) -> Result<()> {\n\n let index = lua\n\n .create_function(\n\n |lua, (shared, _this, key): (SharedTable, Table, mlua::Value)| {\n\n if let Ok(key) = lua.unpack::<SharedTableKey>(key) {\n\n lua.pack(&*shared.get(key))\n\n } else {\n\n Ok(mlua::Value::Nil)\n\n }\n\n },\n\n )?\n\n .bind(shared.clone())?;\n\n\n\n let newindex = lua\n\n .create_function(\n\n |lua, (shared, this, key, value): (SharedTable, Table, mlua::Value, mlua::Value)| {\n\n if let (Ok(key), Ok(value)) = (\n\n lua.unpack::<SharedTableKey>(key.clone()),\n\n lua.unpack::<SharedTableValue>(value.clone()),\n\n ) {\n", "file_path": "hive-core/src/lua/sandbox/local_env.rs", "rank": 28, "score": 122631.85743876267 }, { "content": "pub fn remove_service_shared_stores(service_name: &str) {\n\n SHARED_STORE.retain(|k, _| k.as_ref() != service_name);\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct SharedTable(Arc<RwLock<SharedTableRepr>>);\n\n\n\nimpl SharedTable {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n\n\n pub fn from_lua_table(lua: &Lua, table: Table) -> mlua::Result<Self> {\n\n let mut int = BTreeMap::new();\n\n let mut hash = HashMap::new();\n\n for kv in table.clone().pairs::<SharedTableKey, SharedTableValue>() {\n\n let (k, v) = kv?;\n\n if let Some(i) = k.to_i64() {\n\n int.insert(i, v);\n\n } else {\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 29, "score": 121183.30943861161 }, { "content": "#[derive(Default)]\n\nstruct SharedTableRepr {\n\n int: BTreeMap<i64, SharedTableValue>,\n\n hash: HashMap<SharedTableKey, SharedTableValue>,\n\n array: bool,\n\n}\n\n\n\nimpl SharedTableRepr {\n\n fn get(&self, key: SharedTableKey) -> &SharedTableValue {\n\n const CONST_NIL: SharedTableValue = SharedTableValue::Nil;\n\n\n\n (key.to_i64())\n\n .map(|i| self.int.get(&i))\n\n .unwrap_or_else(|| self.hash.get(&key))\n\n .unwrap_or(&CONST_NIL)\n\n }\n\n\n\n fn set(&mut self, key: SharedTableKey, value: SharedTableValue) -> SharedTableValue {\n\n if let Some(i) = key.to_i64() {\n\n self.int.insert(i, value)\n\n } else {\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 30, "score": 120345.23850537391 }, { "content": "fn create_fn_json_parse(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, string: mlua::String| {\n\n extract_error(lua, || {\n\n let result: serde_json::Value = serde_json::from_slice(string.as_bytes()).to_lua_err()?;\n\n lua.to_value(&result)\n\n })\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 31, "score": 117093.65214575002 }, { "content": "fn create_fn_json_array(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, table: mlua::Value| {\n\n match &table {\n\n mlua::Value::Table(table) => table.set_metatable(Some(lua.array_metatable())),\n\n mlua::Value::UserData(table) => {\n\n let table = table.borrow_mut::<SharedTable>()?;\n\n table.set_array(true);\n\n }\n\n _ => return Err(\"expected table or shared table\".to_lua_err()),\n\n }\n\n Ok(table)\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 32, "score": 117093.65214575002 }, { "content": "fn create_fn_json_stringify(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, (value, pretty): (mlua::Value, Option<bool>)| {\n\n extract_error(lua, || {\n\n let string = if pretty.unwrap_or_default() {\n\n serde_json::to_string_pretty(&value).to_lua_err()?\n\n } else {\n\n serde_json::to_string(&value).to_lua_err()?\n\n };\n\n Ok(string)\n\n })\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 33, "score": 117093.65214575002 }, { "content": "fn create_fn_json_undo_array(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, table: mlua::Value| {\n\n match &table {\n\n mlua::Value::Table(table) => {\n\n if table\n\n .get_metatable()\n\n .map(|x| x == lua.array_metatable())\n\n .unwrap_or(false)\n\n {\n\n table.set_metatable(None);\n\n }\n\n }\n\n mlua::Value::UserData(table) => {\n\n let table = table.borrow_mut::<SharedTable>()?;\n\n table.set_array(false);\n\n }\n\n _ => return Err(\"expected table or shared table\".to_lua_err()),\n\n }\n\n Ok(table)\n\n })\n\n}\n", "file_path": "hive-core/src/lua/json.rs", "rank": 34, "score": 116135.89399931929 }, { "content": "fn create_fn_current_worker(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, ()| std::thread::current().name().to_lua(lua))\n\n}\n", "file_path": "hive-core/src/lua/sandbox/global_env.rs", "rank": 35, "score": 115202.81042631083 }, { "content": "pub fn serialize_arc<S: Serializer>(arc: &Arc<impl Serialize>, ser: S) -> Result<S::Ok, S::Error> {\n\n arc.as_ref().serialize(ser)\n\n}\n", "file_path": "hive-core/src/util.rs", "rank": 36, "score": 113888.00364074201 }, { "content": "fn table_insert_shared_3(\n\n lua: &Lua,\n\n table: AnyUserData,\n\n pos: i64,\n\n value: mlua::Value,\n\n) -> mlua::Result<()> {\n\n if pos < 1 {\n\n return Err(out_of_bounds(\"insert\", 2));\n\n }\n\n let (borrowed, owned);\n\n let table = if let Ok(table) = table.borrow::<SharedTable>() {\n\n owned = SharedTableScope::new(table.0.clone());\n\n &owned\n\n } else if let Ok(table) = table.borrow::<SharedTableScope>() {\n\n borrowed = table;\n\n &borrowed\n\n } else {\n\n return Err(userdata_not_shared_table(\"insert\", 1));\n\n };\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 37, "score": 112695.31885035167 }, { "content": "fn raw_get_path<'a, T: FromLua<'a>>(\n\n table: &Table<'a>,\n\n base: &mut String,\n\n path: &[&str],\n\n) -> mlua::Result<T> {\n\n base.extend([\".\", path[0]]);\n\n if path.len() == 1 {\n\n Ok(table.raw_get(path[0])?)\n\n } else {\n\n raw_get_path(&table.raw_get::<_, Table>(path[0])?, base, &path[1..])\n\n }\n\n}\n\n\n\nimpl<'a> LuaTableExt<'a> for Table<'a> {\n\n fn raw_get_path<T: FromLua<'a>>(&self, base: &str, path: &[&str]) -> Result<T> {\n\n let mut base = base.into();\n\n let result = raw_get_path(self, &mut base, path).map_err(|mut error| {\n\n if let mlua::Error::FromLuaConversionError { message, .. } = &mut error {\n\n *message = Some(base);\n\n }\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 38, "score": 105640.27396857919 }, { "content": "struct SharedTableScope(RefCell<ArcRwLockWriteGuard<RawRwLock, SharedTableRepr>>);\n\n\n\nimpl SharedTableScope {\n\n fn new(x: Arc<RwLock<SharedTableRepr>>) -> Self {\n\n Self(RefCell::new(x.write_arc()))\n\n }\n\n\n\n fn push(&self, value: SharedTableValue) {\n\n let mut wl = self.0.borrow_mut();\n\n let pos = len(&wl) + 1;\n\n wl.set(SharedTableKey(SharedTableValue::Integer(pos)), value);\n\n }\n\n\n\n fn deep_dump<'lua>(&self, lua: &'lua Lua) -> mlua::Result<Table<'lua>> {\n\n let guard = self.0.borrow();\n\n guard._deep_dump(\n\n lua,\n\n Arc::as_ptr(ArcRwLockWriteGuard::rwlock(&guard)) as _,\n\n &mut HashMap::new(),\n\n )\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 39, "score": 101570.00430259784 }, { "content": "/// Similar to `hive_core::path::normalize_path`, but for `str`s instead of\n\n/// `Path`s.\n\n///\n\n/// The returned path is always relative, which is intentional and convenient\n\n/// for concatenating to other paths in usual cases.\n\npub fn normalize_path_str(path: &str) -> String {\n\n let mut result = Vec::new();\n\n let segments = path\n\n .split(['/', '\\\\'])\n\n .filter(|&x| !x.is_empty() && x != \".\");\n\n for s in segments {\n\n if s == \"..\" {\n\n result.pop();\n\n } else {\n\n result.push(s);\n\n }\n\n }\n\n result.join(\"/\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::ffi::OsString;\n\n use test_case::test_case;\n", "file_path": "hive-core/src/path.rs", "rank": 40, "score": 97963.7664650912 }, { "content": "fn expected_table(fn_name: &'static str, pos: u8, found: &str) -> mlua::Error {\n\n BadArgument::new(\n\n fn_name,\n\n pos,\n\n format!(\"expected table or shared table, found {found}\"),\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 41, "score": 97374.99272328173 }, { "content": "#[derive(Debug)]\n\nstruct LoadedService {\n\n service: RunningService,\n\n local_env: RegistryKey,\n\n internal: RegistryKey,\n\n}\n\n\n\nimpl Sandbox {\n\n pub fn new(state: Arc<HiveState>) -> Result<Self> {\n\n let lua = Lua::new();\n\n let loaded = RefCell::new(CLruCache::new(nonzero!(16usize)));\n\n modify_global_env(&lua)?;\n\n Ok(Self { lua, loaded, state })\n\n }\n\n}\n\n\n\nimpl Sandbox {\n\n async fn call_extract_error<'a, T, R>(&'a self, f: mlua::Value<'a>, v: T) -> Result<R>\n\n where\n\n T: ToLuaMulti<'a>,\n\n R: FromLuaMulti<'a>,\n", "file_path": "hive-core/src/lua/sandbox/mod.rs", "rank": 42, "score": 90000.61443790035 }, { "content": "fn create_fn_fs_remove(\n\n lua: &Lua,\n\n local_storage_path: Arc<Path>,\n\n permissions: Arc<PermissionSet>,\n\n) -> mlua::Result<Function> {\n\n lua.create_async_function(move |lua, (path, all): (mlua::String, bool)| {\n\n let local_storage_path = local_storage_path.clone();\n\n let permissions = permissions.clone();\n\n extract_error_async(lua, async move {\n\n let (scheme, path) = parse_path(&path)?;\n\n\n\n let path: Cow<Path> = match scheme {\n\n \"local\" => local_storage_path.join(normalize_path_str(path)).into(),\n\n \"external\" => {\n\n let path: Cow<_> = Path::new(path).into();\n\n permissions.check(&Permission::Write { path: path.clone() })?;\n\n path\n\n }\n\n \"source\" => return Err(\"cannot modify service source\".to_lua_err()),\n\n _ => return scheme_not_supported(scheme),\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 43, "score": 89656.26677038806 }, { "content": "fn create_fn_fs_mkdir(\n\n lua: &Lua,\n\n local_storage_path: Arc<Path>,\n\n permissions: Arc<PermissionSet>,\n\n) -> mlua::Result<Function> {\n\n lua.create_async_function(move |lua, (path, all): (mlua::String, bool)| {\n\n let local_storage_path = local_storage_path.clone();\n\n let permissions = permissions.clone();\n\n extract_error_async(lua, async move {\n\n let (scheme, path) = parse_path(&path)?;\n\n\n\n let path: Cow<Path> = match scheme {\n\n \"local\" => local_storage_path.join(normalize_path_str(path)).into(),\n\n \"external\" => {\n\n permissions.check(&Permission::Write {\n\n path: Cow::Borrowed(Path::new(path)),\n\n })?;\n\n Path::new(path).into()\n\n }\n\n \"source\" => return Err(\"cannot modify service source\".to_lua_err()),\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 44, "score": 89656.26677038806 }, { "content": "fn create_fn_fs_open(\n\n lua: &Lua,\n\n source: Source,\n\n local_storage_path: Arc<Path>,\n\n permissions: Arc<PermissionSet>,\n\n) -> mlua::Result<Function<'_>> {\n\n lua.create_async_function(\n\n move |lua, (path, mode): (mlua::String, Option<mlua::String>)| {\n\n use OpenMode::*;\n\n let source = source.clone();\n\n let local_storage_path = local_storage_path.clone();\n\n let permissions = permissions.clone();\n\n async move {\n\n let (scheme, path) = parse_path(&path)?;\n\n let mode = OpenMode::from_lua(mode)?;\n\n extract_error_async(lua, async {\n\n let file = match scheme {\n\n \"local\" => {\n\n let path = normalize_path_str(path);\n\n GenericFile::File(\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 45, "score": 89656.26677038806 }, { "content": "struct LuaRng(Box<dyn RngCore>);\n\n\n\nimpl UserData for LuaRng {\n\n fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {\n\n methods.add_method_mut(\"random\", |_lua, this, ()| Ok(this.0.gen::<f64>()));\n\n\n\n methods.add_method_mut(\"gen_range\", |_lua, this, (low, high): (i64, i64)| {\n\n if low >= high {\n\n Err(\"range is empty\".to_lua_err())\n\n } else {\n\n Ok(this.0.gen_range(low..=high))\n\n }\n\n })\n\n }\n\n}\n\n\n", "file_path": "hive-core/src/lua/crypto.rs", "rank": 46, "score": 89065.86003593683 }, { "content": "fn _create_preload_fs(\n\n lua: &Lua,\n\n local_storage_path: Arc<Path>,\n\n source: Source,\n\n permissions: Arc<PermissionSet>,\n\n) -> mlua::Result<Function<'_>> {\n\n lua.create_function(move |lua, ()| {\n\n let fs_table = lua.create_table()?;\n\n fs_table.raw_set(\n\n \"open\",\n\n create_fn_fs_open(\n\n lua,\n\n source.clone(),\n\n local_storage_path.clone(),\n\n permissions.clone(),\n\n )?,\n\n )?;\n\n fs_table.raw_set(\n\n \"mkdir\",\n\n create_fn_fs_mkdir(lua, local_storage_path.clone(), permissions.clone())?,\n\n )?;\n\n fs_table.raw_set(\n\n \"remove\",\n\n create_fn_fs_remove(lua, local_storage_path.clone(), permissions.clone())?,\n\n )?;\n\n Ok(fs_table)\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 47, "score": 82332.06586991044 }, { "content": "fn get_default_hive_path() -> PathBuf {\n\n let mut hive_path = home::home_dir().expect(\"no home directory found\");\n\n hive_path.push(\".hive\");\n\n hive_path\n\n}\n\n\n\n#[derive(Debug, Clone, Parser)]\n\n#[clap(author, version, about)]\n\npub struct ConfigArgs {\n\n /// Listening address [overrides config]\n\n #[clap(short, long)]\n\n pub listen: Option<SocketAddr>,\n\n\n\n /// Authentication token [overrides config]\n\n #[clap(long)]\n\n pub auth_token: Option<Uuid>,\n\n\n\n /// Hive executor pool size [overrides config]\n\n #[clap(long)]\n\n pub pool_size: Option<usize>,\n", "file_path": "hive-server/src/config.rs", "rank": 48, "score": 81892.53543581476 }, { "content": "fn out_of_bounds(fn_name: &'static str, pos: u8) -> mlua::Error {\n\n BadArgument::new(fn_name, pos, \"out of bounds\").into()\n\n}\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 49, "score": 75596.31517336132 }, { "content": "fn list(state: &MainState) -> Result<Response<Body>> {\n\n let services = state.hive.list_services().collect::<Vec<_>>();\n\n let services = (services.iter()).map(Service::upgrade).collect::<Vec<_>>();\n\n json_response(StatusCode::OK, services)\n\n}\n\n\n", "file_path": "hive-server/src/handle/mod.rs", "rank": 50, "score": 74833.47903988016 }, { "content": "local http = require \"http\"\n\n\n\nhive.register(\"/\", function(req)\n\n local resp = http.request \"https://httpbin.org/get\"\n\n\n\n return {\n\n status = resp.status,\n\n result = resp.body:parse_json(),\n\n }\n\nend)\n", "file_path": "examples/request/main.lua", "rank": 51, "score": 70862.9156417733 }, { "content": "fn get(state: &MainState, name: &str) -> Result<Response<Body>> {\n\n let service = state.hive.get_service(name)?;\n\n json_response(StatusCode::OK, service.upgrade())\n\n}\n\n\n\nasync fn start_stop(state: &MainState, name: &str, query: &str) -> Result<Response<Body>> {\n\n #[derive(Deserialize)]\n\n struct Query {\n\n op: Operation,\n\n }\n\n\n\n #[derive(Deserialize)]\n\n enum Operation {\n\n #[serde(rename = \"start\")]\n\n Start,\n\n #[serde(rename = \"stop\")]\n\n Stop,\n\n }\n\n\n\n let Query { op } = serde_qs::from_str(query)?;\n", "file_path": "hive-server/src/handle/mod.rs", "rank": 52, "score": 70754.32164338365 }, { "content": "fn scheme_not_supported<T>(scheme: &str) -> mlua::Result<T> {\n\n Err(format!(\"scheme currently not supported: {scheme}\").to_lua_err())\n\n}\n\n\n\npub async fn remove_service_local_storage(state: &HiveState, service_name: &str) -> Result<()> {\n\n let path = state.local_storage_path.join(service_name);\n\n Ok(tokio::fs::remove_dir_all(path).await?)\n\n}\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 53, "score": 69243.95284234376 }, { "content": "use crate::lua::extract_error;\n\nuse mlua::{\n\n AnyUserData, ExternalError, ExternalResult, FromLua, Function, Lua, LuaSerdeExt, UserData,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug)]\n\npub struct LuaUri(pub(crate) hyper::Uri);\n\n\n\nimpl UserData for LuaUri {\n\n fn add_fields<'lua, F: mlua::UserDataFields<'lua, Self>>(fields: &mut F) {\n\n fields.add_field_method_get(\"scheme\", |lua, this| lua.pack(this.0.scheme_str()));\n\n fields.add_field_method_get(\"host\", |lua, this| lua.pack(this.0.host()));\n\n fields.add_field_method_get(\"port\", |_lua, this| Ok(this.0.port_u16()));\n\n fields.add_field_method_get(\"path\", |lua, this| lua.pack(this.0.path()));\n\n fields.add_field_method_get(\"query_string\", |lua, this| lua.pack(this.0.query()));\n\n }\n\n\n\n fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {\n\n methods.add_meta_method(\"__tostring\", |_lua, this, ()| Ok(this.0.to_string()));\n", "file_path": "hive-core/src/lua/http/uri.rs", "rank": 54, "score": 65262.98550600324 }, { "content": " }\n\n}\n\n\n\nimpl<'lua> FromLua<'lua> for LuaUri {\n\n fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua Lua) -> mlua::Result<Self> {\n\n match lua_value {\n\n mlua::Value::String(s) => Ok(Self(hyper::Uri::try_from(s.as_bytes()).to_lua_err()?)),\n\n mlua::Value::UserData(x) => {\n\n let x = x.borrow::<Self>()?;\n\n Ok(Self(x.0.clone()))\n\n }\n\n _ => Err(\"failed to convert to URI\".to_lua_err()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "hive-core/src/lua/http/uri.rs", "rank": 55, "score": 65254.956241836204 }, { "content": "\n\n // TODO: support more complex QS structure (e.g. multiple queries with the same\n\n // name)\n\n methods.add_function(\"query\", |lua, this: AnyUserData| {\n\n extract_error(lua, || {\n\n let this_ = this.borrow::<Self>()?;\n\n if let Some(q) = this.get_named_user_value::<_, Option<mlua::Value>>(\"query\")? {\n\n Ok(q)\n\n } else {\n\n let x = (this_.0.query())\n\n .map(serde_qs::from_str::<HashMap<String, String>>)\n\n .transpose()\n\n .to_lua_err()?\n\n .unwrap_or_default();\n\n let x = lua.to_value(&x)?;\n\n lua.set_named_registry_value(\"query\", x.clone())?;\n\n Ok(x)\n\n }\n\n })\n\n })\n", "file_path": "hive-core/src/lua/http/uri.rs", "rank": 56, "score": 65248.59748814073 }, { "content": "use crate::lua::byte_stream::ByteStream;\n\nuse crate::lua::shared::SharedTable;\n\nuse crate::LuaResponse;\n\nuse hyper::header::HeaderValue;\n\nuse hyper::{Body, HeaderMap, StatusCode};\n\nuse mlua::{ExternalError, ExternalResult, FromLua, Lua, LuaSerdeExt, ToLua};\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\npub enum LuaBody {\n\n Empty,\n\n Json(serde_json::Value),\n\n Bytes(Vec<u8>),\n\n ByteStream(ByteStream),\n\n}\n\n\n\nimpl LuaBody {\n\n pub fn into_default_response(self) -> LuaResponse {\n\n let (status, headers) = match &self {\n\n Self::Empty => (StatusCode::NO_CONTENT, Default::default()),\n", "file_path": "hive-core/src/lua/http/body.rs", "rank": 57, "score": 65169.57365914404 }, { "content": " Self::Json(_) => {\n\n let mut headers = HeaderMap::new();\n\n headers.insert(\"content-type\", HeaderValue::from_static(\"application/json\"));\n\n (StatusCode::OK, headers)\n\n }\n\n _ => Default::default(),\n\n };\n\n LuaResponse {\n\n status,\n\n headers: Rc::new(RefCell::new(headers)),\n\n body: Some(self),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Body> for LuaBody {\n\n fn from(body: Body) -> Self {\n\n Self::ByteStream(body.into())\n\n }\n\n}\n", "file_path": "hive-core/src/lua/http/body.rs", "rank": 59, "score": 65164.544393393466 }, { "content": "\n\nimpl From<LuaBody> for Body {\n\n fn from(body: LuaBody) -> Self {\n\n match body {\n\n LuaBody::Empty => Body::empty(),\n\n LuaBody::Json(x) => x.to_string().into(),\n\n LuaBody::Bytes(x) => x.into(),\n\n LuaBody::ByteStream(x) => Body::wrap_stream(x.0),\n\n }\n\n }\n\n}\n\n\n\nimpl<'lua> FromLua<'lua> for LuaBody {\n\n fn from_lua(lua_value: mlua::Value<'lua>, _lua: &'lua Lua) -> mlua::Result<Self> {\n\n let result = match lua_value {\n\n mlua::Value::Nil => Self::Empty,\n\n x @ mlua::Value::Table(_) => Self::Json(serde_json::to_value(&x).to_lua_err()?),\n\n mlua::Value::String(s) => Self::Bytes(s.as_bytes().into()),\n\n mlua::Value::UserData(u) => {\n\n if let Ok(s) = u.take::<ByteStream>() {\n", "file_path": "hive-core/src/lua/http/body.rs", "rank": 60, "score": 65163.20552857968 }, { "content": " Self::ByteStream(s)\n\n } else if let Ok(x) = u.borrow::<SharedTable>() {\n\n Self::Json(serde_json::to_value(&*x).to_lua_err()?)\n\n } else {\n\n return Err(\"failed to turn object into body\".to_lua_err());\n\n }\n\n }\n\n _ => return Err(\"failed to turn object into body\".to_lua_err()),\n\n };\n\n Ok(result)\n\n }\n\n}\n\n\n\nimpl<'lua> ToLua<'lua> for LuaBody {\n\n fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> {\n\n match self {\n\n Self::Empty => Ok(mlua::Value::Nil),\n\n Self::Json(x) => lua.to_value(&x),\n\n Self::Bytes(x) => Ok(mlua::Value::String(lua.create_string(&x)?)),\n\n Self::ByteStream(x) => lua.pack(x),\n\n }\n\n }\n\n}\n", "file_path": "hive-core/src/lua/http/body.rs", "rank": 61, "score": 65159.00057344741 }, { "content": "fn parse_path<'a>(path: &'a mlua::String<'a>) -> mlua::Result<(&'a str, &'a str)> {\n\n let path = std::str::from_utf8(path.as_bytes()).to_lua_err()?;\n\n Ok(path.split_once(':').unwrap_or((\"local\", path)))\n\n}\n\n\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 69, "score": 64634.066116000206 }, { "content": "use hyper::header::{HeaderName, HeaderValue};\n\nuse hyper::HeaderMap;\n\nuse mlua::{AnyUserData, ExternalResult, UserData, UserDataMethods, Variadic};\n\nuse ouroboros::self_referencing;\n\nuse std::cell::{RefCell, RefMut};\n\nuse std::rc::Rc;\n\n\n\npub struct LuaHeaderMap(pub(crate) Rc<RefCell<HeaderMap>>);\n\n\n\nimpl UserData for LuaHeaderMap {\n\n fn add_methods<'lua, M: UserDataMethods<'lua, Self>>(methods: &mut M) {\n\n fn header_name(name: mlua::String) -> mlua::Result<HeaderName> {\n\n HeaderName::from_bytes(name.as_bytes()).to_lua_err()\n\n }\n\n\n\n methods.add_method(\"get\", |lua, this, name: mlua::String| {\n\n (this.0)\n\n .borrow()\n\n .get_all(header_name(name)?)\n\n .into_iter()\n", "file_path": "hive-core/src/lua/http/header_map.rs", "rank": 70, "score": 63531.43586239388 }, { "content": " let iter_fn = lua.create_function(|lua, iter: AnyUserData| {\n\n let mut iter = iter.borrow_mut::<LuaHeaderMapIter>()?;\n\n let result = iter\n\n .with_iter_mut(|x| x.next())\n\n .map(|(k, v)| {\n\n mlua::Result::Ok(Variadic::from_iter([\n\n lua.create_string(k.as_str())?,\n\n lua.create_string(v.as_bytes())?,\n\n ]))\n\n })\n\n .transpose()?\n\n .unwrap_or_else(Variadic::new);\n\n Ok(result)\n\n })?;\n\n\n\n iter_fn.bind(iter)\n\n });\n\n }\n\n}\n\n\n\n#[self_referencing]\n", "file_path": "hive-core/src/lua/http/header_map.rs", "rank": 71, "score": 63523.0729920299 }, { "content": " .map(|x| lua.create_string(x.as_bytes()))\n\n .collect::<mlua::Result<Variadic<_>>>()\n\n });\n\n\n\n methods.add_meta_method(\"__index\", |lua, this, name: mlua::String| {\n\n (this.0)\n\n .borrow()\n\n .get(header_name(name)?)\n\n .map(|x| lua.create_string(x.as_bytes()))\n\n .transpose()\n\n });\n\n\n\n methods.add_meta_method(\"__pairs\", |lua, this, ()| {\n\n let iter = LuaHeaderMapIterBuilder {\n\n inner: this.0.clone(),\n\n borrow_builder: |x| x.borrow_mut(),\n\n iter_builder: |x| x.iter(),\n\n }\n\n .build();\n\n\n", "file_path": "hive-core/src/lua/http/header_map.rs", "rank": 72, "score": 63522.60125801582 }, { "content": "struct MyWaker {\n\n tx: mpsc::UnboundedSender<()>,\n\n sent: AtomicBool,\n\n}\n\n\n\nimpl MyWaker {\n\n fn from_tx(tx: mpsc::UnboundedSender<()>) -> Waker {\n\n Waker::from(Arc::new(Self {\n\n tx,\n\n sent: AtomicBool::new(false),\n\n }))\n\n }\n\n}\n\n\n\nimpl Wake for MyWaker {\n\n fn wake(self: Arc<Self>) {\n\n self.wake_by_ref();\n\n }\n\n\n\n fn wake_by_ref(self: &Arc<Self>) {\n\n if !self.sent.load(Relaxed) {\n\n let _ = self.tx.send(());\n\n self.sent.store(true, Relaxed);\n\n }\n\n }\n\n}\n\n\n", "file_path": "hive-core/src/task/executor.rs", "rank": 73, "score": 62838.49818963422 }, { "content": "#[derive(Deserialize)]\n\nstruct UploadQuery {\n\n #[serde(default)]\n\n mode: UploadMode,\n\n}\n\n\n\n// TODO: Add load, cold update, supporting general `Service` as response\n\n//\n\n// Loading isn't always going to succeed, and sometimes we only need to load it\n\n// without starting it.\n\npub(crate) async fn upload(\n\n state: &MainState,\n\n name: Option<String>,\n\n req: Request<Body>,\n\n) -> Result<Response<Body>> {\n\n let (parts, body) = req.into_parts();\n\n let mut multipart = parse_multipart(&parts.headers, body)?;\n\n\n\n let UploadQuery { mode } = serde_qs::from_str(parts.uri.query().unwrap_or(\"\"))?;\n\n\n\n let source_field = multipart.next_field().await?.ok_or((\n", "file_path": "hive-server/src/handle/upload.rs", "rank": 74, "score": 61927.809737099786 }, { "content": "fn serialize_slice_as_str<S: Serializer>(slice: &[u8], serializer: S) -> Result<S::Ok, S::Error> {\n\n if let Ok(x) = std::str::from_utf8(slice) {\n\n serializer.serialize_str(x)\n\n } else {\n\n serializer.serialize_bytes(slice)\n\n }\n\n}\n\n\n\nimpl<'lua> FromLua<'lua> for SharedTableValue {\n\n fn from_lua(lua_value: mlua::Value<'lua>, lua: &'lua Lua) -> mlua::Result<Self> {\n\n use mlua::Value::*;\n\n let result = match lua_value {\n\n Nil => Self::Nil,\n\n Boolean(x) => Self::Boolean(x),\n\n Integer(x) => Self::Integer(x),\n\n Number(x) => Self::Number(x),\n\n String(x) => Self::String(x.as_bytes().into()),\n\n Table(x) => Self::Table(self::SharedTable::from_lua_table(lua, x)?),\n\n UserData(x) => {\n\n if let Ok(x) = x.borrow::<self::SharedTable>() {\n", "file_path": "hive-core/src/lua/shared/kv.rs", "rank": 75, "score": 58887.859517051096 }, { "content": "struct PanicNotifier(Arc<AtomicBool>);\n\n\n\nimpl Drop for PanicNotifier {\n\n fn drop(&mut self) {\n\n if std::thread::panicking() {\n\n self.0.store(true, Ordering::Release)\n\n }\n\n }\n\n}\n\n\n\npub struct Executor {\n\n pub task_count: Arc<AtomicU32>,\n\n panicked: Arc<AtomicBool>,\n\n task_tx: mpsc::Sender<Task>,\n\n _stop_tx: oneshot::Sender<()>,\n\n}\n\n\n\nimpl Executor {\n\n pub fn new(f: impl FnOnce() -> Result<Sandbox> + Send + 'static, name: String) -> Self {\n\n let task_count = Arc::new(AtomicU32::new(0));\n", "file_path": "hive-core/src/task/executor.rs", "rank": 76, "score": 56109.4365802485 }, { "content": "type SharedStore = Arc<DashMap<Box<str>, SharedTable>>;\n\n\n\nstatic SHARED_STORE: Lazy<SharedStore> = Lazy::new(|| Arc::new(DashMap::new()));\n\n\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 77, "score": 52269.67989410942 }, { "content": "fn main() -> anyhow::Result<()> {\n\n tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .worker_threads(*HALF_NUM_CPUS)\n\n .build()\n\n .unwrap()\n\n .block_on(run())\n\n}\n\n\n\n#[cfg(unix)]\n\nasync fn shutdown_signal() {\n\n use tokio::select;\n\n use tokio::signal::unix::{signal, SignalKind};\n\n\n\n let mut sigint = signal(SignalKind::interrupt()).unwrap();\n\n let mut sigterm = signal(SignalKind::terminate()).unwrap();\n\n\n\n let signal = select! {\n\n _ = sigint.recv() => \"SIGINT\",\n\n _ = sigterm.recv() => \"SIGTERM\",\n", "file_path": "hive-server/src/main.rs", "rank": 78, "score": 51686.92356664896 }, { "content": " allowed_methods = allowed_methods\n\n }\n\n }\n\n end\n\n end,\n\n}\n\n\n\nlocal function init_method_route(method, handler)\n\n return setmetatable({\n\n [\"$\" .. method] = handler\n\n }, mt)\n\nend\n\n\n\nlocal routing = {\n\n any = bind_one(init_method_route, \"_\"),\n\n}\n\n\n\nfor _, v in ipairs(methods) do\n\n local method_upper = v:upper()\n\n mt.__index[v] = bind_one(add_method_route, method_upper)\n\n routing[v] = bind_one(init_method_route, method_upper)\n\nend\n\n\n\nreturn routing\n", "file_path": "hive-core/src/lua/routing.lua", "rank": 79, "score": 38703.72869721386 }, { "content": " },\n\n __call = function(self, req)\n\n local handler = self[\"$\" .. req.method]\n\n local any = self[\"$_\"]\n\n if type(handler) == \"function\" then\n\n return handler(req)\n\n elseif type(any) == \"function\" then\n\n return any(req)\n\n else\n\n local allowed_methods = {}\n\n for k, _ in pairs(self) do\n\n if k:sub(1, 1) == \"$\" then\n\n allowed_methods[#allowed_methods + 1] = k:sub(2)\n\n end\n\n end\n\n\n\n error {\n\n status = 405,\n\n error = \"method not allowed\",\n\n detail = {\n", "file_path": "hive-core/src/lua/routing.lua", "rank": 80, "score": 38703.42392142479 }, { "content": "local methods = {\n\n \"get\", \"post\", \"put\",\n\n \"patch\", \"head\", \"delete\",\n\n \"trace\",\n\n}\n\n\n\nlocal function bind_one(f, arg)\n\n return function(...)\n\n return f(arg, ...)\n\n end\n\nend\n\n\n\nlocal function add_method_route(method, self, handler)\n\n self[\"$\" .. method] = handler\n\n return self\n\nend\n\n\n\nlocal mt = {\n\n __index = {\n\n any = bind_one(add_method_route, \"_\"),\n", "file_path": "hive-core/src/lua/routing.lua", "rank": 81, "score": 38703.38401628959 }, { "content": "fn serialize_error<E, S>(error: E, ser: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n E: std::error::Error,\n\n S: Serializer,\n\n{\n\n json!({ \"msg\": error.to_string() }).serialize(ser)\n\n}\n\n\n\nimpl ErrorKind {\n\n pub fn status(&self) -> StatusCode {\n\n match self {\n\n Self::LuaCustom { status, .. } => *status,\n\n _ => self.get_str(\"status\").unwrap().parse().unwrap(),\n\n }\n\n }\n\n\n\n pub fn error(&self) -> &str {\n\n match self {\n\n Self::LuaCustom { error, .. } => error,\n\n _ => self.get_str(\"error\").unwrap(),\n", "file_path": "hive-core/src/error.rs", "rank": 82, "score": 38278.506275770495 }, { "content": "fn serialize_error<E, S>(error: E, ser: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n E: std::error::Error,\n\n S: Serializer,\n\n{\n\n json!({ \"msg\": error.to_string() }).serialize(ser)\n\n}\n\n\n\nimpl ErrorKind {\n\n pub fn status(&self) -> StatusCode {\n\n match self {\n\n ErrorKind::Hive(error) => error.kind().status(),\n\n ErrorKind::Custom { status, .. } => *status,\n\n _ => self.get_str(\"status\").unwrap().parse().unwrap(),\n\n }\n\n }\n\n\n\n pub fn internal(&self) -> bool {\n\n match self {\n\n ErrorKind::Hive(error) => error.kind().internal(),\n\n _ => self.status().is_server_error(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "hive-server/src/error.rs", "rank": 83, "score": 38278.506275770495 }, { "content": " \"cannot call `hive.register` from places other than the top level of `main.lua`\"\n\n )\n\n local type_handler = type(handler)\n\n if type_handler ~= \"function\" then\n\n if type_handler == \"table\" then\n\n local mt = getmetatable(handler)\n\n if type(mt) == \"table\" and type(mt.__call) == \"function\" then\n\n goto ok\n\n end\n\n end\n\n error \"handler must either be a function or a callable table\"\n\n end\n\n\n\n ::ok::\n\n table.insert(internal.paths, { path, handler })\n\nend\n\n\n\nlocal function require(modname)\n\n local modname_type = type(modname)\n\n assert(\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 84, "score": 37424.10985827191 }, { "content": " table = {\n\n \"remove\", \"sort\", \"concat\", \"pack\",\n\n \"unpack\",\n\n },\n\n coroutine = {\n\n \"close\", \"create\", \"isyieldable\", \"resume\",\n\n \"running\", \"status\", \"wrap\", \"yield\",\n\n },\n\n}\n\n\n\nlocal monkey_patch = {\n\n [false] = {\n\n \"error\",\n\n },\n\n table = {\n\n \"insert\", \"dump\", \"scope\"\n\n },\n\n routing = \"*\"\n\n}\n\n\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 85, "score": 37421.526325766106 }, { "content": " modname_type == \"string\",\n\n \"bad argument #1 to 'require' (string expected, got \" .. modname_type .. \")\"\n\n )\n\n\n\n local package = internal.package;\n\n local error_msgs = {}\n\n if package.loaded[modname] then\n\n return table.unpack(package.loaded[modname])\n\n else\n\n for _, searcher in ipairs(package.searchers) do\n\n local loader, data = searcher(modname)\n\n if loader then\n\n local result = { loader(modname, data) }\n\n package.loaded[modname] = result\n\n return table.unpack(result)\n\n else\n\n table.insert(error_msgs, data)\n\n end\n\n end\n\n end\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 86, "score": 37421.51827893074 }, { "content": " local loader = preload[modname]\n\n if loader then\n\n return loader, \"<preload>\"\n\n else\n\n return nil, \"preload '\" .. modname .. \"' not found\"\n\n end\n\nend\n\n\n\nlocal function source_searcher(modname)\n\n local source = internal.source\n\n local path = \"\"\n\n for str in string.gmatch(modname, \"([^%.]+)\") do\n\n path = path .. \"/\" .. str\n\n end\n\n\n\n local file_exists = source:exists(path .. \".lua\")\n\n local init_exists = source:exists(path .. \"/init.lua\")\n\n\n\n if file_exists and init_exists then\n\n return nil, \"file '@source:\" .. path .. \".lua' and '@source:\" .. path .. \"/init.lua' conflicts\"\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 87, "score": 37421.42110224089 }, { "content": " elseif not file_exists and not init_exists then\n\n return nil, \"no file '@source:\" .. path .. \".lua'\\n\\tno file 'source:\" .. path .. \"/init.lua'\"\n\n else\n\n path = path .. (file_exists and \".lua\" or \"/init.lua\")\n\n local function source_loader(modname, path)\n\n return source:load(path, local_env)()\n\n end\n\n return source_loader, path\n\n end\n\nend\n\n\n\ninternal.package.searchers = { preload_searcher, source_searcher }\n\n\n\n-- Standard library whitelist --\n\n\n\nlocal whitelist = {\n\n [false] = {\n\n \"assert\", \"ipairs\", \"next\", \"pairs\",\n\n \"pcall\", \"print\", \"rawequal\", \"select\",\n\n \"setmetatable\", \"tonumber\", \"tostring\", \"type\",\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 88, "score": 37421.40920468316 }, { "content": "-- Fields with `nil` should be initialized in Rust\n\n\n\n-- Internal --\n\n\n\nlocal internal = {\n\n paths = {},\n\n sealed = false,\n\n source = nil,\n\n package = {\n\n loaded = {},\n\n preload = {},\n\n searchers = nil,\n\n },\n\n}\n\n\n\n-- Hive table --\n\n\n\nlocal function register(path, handler)\n\n assert(\n\n not internal.sealed,\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 89, "score": 37420.687957165705 }, { "content": " error(\"module '\" .. modname .. \"' not found:\\n\\t\" .. table.concat(error_msgs, \"\\n\"))\n\nend\n\n\n\n-- Local env --\n\n\n\nlocal local_env = {\n\n hive = {\n\n register = register,\n\n context = nil,\n\n permission = nil,\n\n current_worker = current_worker,\n\n Error = hive_Error,\n\n },\n\n require = require,\n\n}\n\n\n\n-- Searchers --\n\n\n\nlocal preload = internal.package.preload\n\nlocal function preload_searcher(modname)\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 90, "score": 37420.373742630676 }, { "content": " \"warn\", \"xpcall\", \"_VERSION\",\n\n },\n\n math = {\n\n \"abs\", \"acos\", \"asin\", \"atan\",\n\n \"atan2\", \"ceil\", \"cos\", \"deg\",\n\n \"exp\", \"floor\", \"fmod\", \"frexp\",\n\n \"huge\", \"ldexp\", \"log\", \"log10\",\n\n \"max\", \"maxinteger\", \"min\", \"mininteger\",\n\n \"modf\", \"pi\", \"pow\", \"rad\", \"random\",\n\n \"sin\", \"sinh\", \"sqrt\", \"tan\",\n\n \"tanh\", \"tointeger\", \"type\", \"ult\",\n\n },\n\n os = {\n\n \"clock\", \"difftime\", \"time\",\n\n },\n\n string = {\n\n \"byte\", \"char\", \"find\", \"format\",\n\n \"gmatch\", \"gsub\", \"len\", \"lower\",\n\n \"match\", \"reverse\", \"sub\", \"upper\",\n\n },\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 91, "score": 37420.07828556613 }, { "content": "local function apply_whitelist(whitelist)\n\n for module, fields in pairs(whitelist) do\n\n local from_module, to_module\n\n if module then\n\n from_module = _G[module]\n\n to_module = {}\n\n local_env[module] = to_module\n\n else\n\n from_module = _G\n\n to_module = local_env\n\n end\n\n\n\n if fields == \"*\" then\n\n for k, v in pairs(from_module) do\n\n to_module[k] = v\n\n end\n\n else\n\n for _, field in ipairs(fields) do\n\n to_module[field] = from_module[field]\n\n end\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 92, "score": 37417.65248577154 }, { "content": " end\n\n end\n\nend\n\n\n\napply_whitelist(whitelist)\n\napply_whitelist(monkey_patch)\n\n\n\nlocal_env.getmetatable = safe_getmetatable\n\n\n\nreturn local_env, internal\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 93, "score": 37417.65248577154 }, { "content": "function safe_getmetatable(t)\n\n local type_t = type(t)\n\n assert(\n\n type_t == \"table\",\n\n \"bad argument #1 to 'getmetatable' (table expected, got\" .. type_t .. \")\"\n\n )\n\n getmetatable(t)\n\nend\n", "file_path": "hive-core/src/lua/sandbox/global_env.lua", "rank": 94, "score": 36218.64896808601 }, { "content": "function hive_Error(obj)\n\n local status = obj.status\n\n local error = obj.error\n\n\n\n local result = {\n\n status = status,\n\n error = error,\n\n }\n\n local result_mt = {\n\n __call = function(detail)\n\n return {\n\n status = status,\n\n error = error,\n\n detail = detail,\n\n }\n\n end\n\n }\n\n\n\n return setmetatable(result, result_mt)\n\nend\n\n\n\nlocal lua_error = error\n\n\n", "file_path": "hive-core/src/lua/sandbox/global_env.lua", "rank": 95, "score": 35647.50827671372 }, { "content": "function error(msg, level)\n\n if type(msg) == \"table\" then\n\n local type_detail = type(msg.detail)\n\n assert(\n\n type_detail == \"nil\" or type_detail == \"string\" or type_detail == \"table\",\n\n \"error detail must be nil, string or table\"\n\n )\n\n end\n\n lua_error(msg, level)\n\nend\n\n\n", "file_path": "hive-core/src/lua/sandbox/global_env.lua", "rank": 96, "score": 35647.50827671372 }, { "content": "\n\nimpl ServiceState {\n\n pub fn name(&self) -> &str {\n\n match self {\n\n Self::Running(x) => &x.name,\n\n Self::Stopped(x) => &x.name,\n\n }\n\n }\n\n\n\n pub fn uuid(&self) -> Uuid {\n\n match self {\n\n Self::Running(x) => x.uuid,\n\n Self::Stopped(x) => x.uuid,\n\n }\n\n }\n\n\n\n pub fn into_impl(self) -> ServiceImpl {\n\n match self {\n\n Self::Running(x) => Arc::try_unwrap(x).unwrap_or_else(|arc| arc.as_ref().clone()),\n\n Self::Stopped(x) => x,\n", "file_path": "hive-core/src/service/impls.rs", "rank": 97, "score": 35477.4380479271 }, { "content": "\n\n pub fn is_stopped(&self) -> bool {\n\n matches!(self, Self::Stopped(_))\n\n }\n\n}\n\n\n\n#[derive(Serialize)]\n\n#[serde(tag = \"status\")]\n\npub enum ServiceGuard<'a> {\n\n #[serde(rename = \"running\")]\n\n Running { service: RunningServiceGuard<'a> },\n\n #[serde(rename = \"stopped\")]\n\n Stopped { service: &'a ServiceImpl },\n\n}\n\n\n\nimpl Deref for ServiceGuard<'_> {\n\n type Target = ServiceImpl;\n\n\n\n fn deref(&self) -> &ServiceImpl {\n\n match self {\n", "file_path": "hive-core/src/service/impls.rs", "rank": 98, "score": 35475.00124895175 }, { "content": " Stopped(StoppedService<'a>),\n\n}\n\n\n\nimpl Service<'_> {\n\n pub fn try_upgrade(&self) -> Result<ServiceGuard<'_>> {\n\n Ok(match self {\n\n Service::Running(x) => ServiceGuard::Running {\n\n service: x.try_upgrade()?,\n\n },\n\n Service::Stopped(service) => ServiceGuard::Stopped { service },\n\n })\n\n }\n\n\n\n pub fn upgrade(&self) -> ServiceGuard<'_> {\n\n self.try_upgrade().unwrap()\n\n }\n\n\n\n pub fn is_running(&self) -> bool {\n\n matches!(self, Self::Running(_))\n\n }\n", "file_path": "hive-core/src/service/impls.rs", "rank": 99, "score": 35474.0861469197 } ]
Rust
src/main.rs
wikrsh/raytracing_in_one_weekend_rust
d752ab4cdbc67d951553797792bbd93e51aa6b39
use rand::prelude::random; use raytracing_in_one_weekend::camera::Camera; use raytracing_in_one_weekend::geometry::{Hittable, HittableList, Ray, Sphere}; use raytracing_in_one_weekend::material::{Dielectric, Lambertian, Material, Metal}; use raytracing_in_one_weekend::utils::color::{write_color, Color}; use raytracing_in_one_weekend::utils::vec3::Vec3; use std::io::{self, BufWriter}; use std::rc::Rc; fn ray_color<T: Hittable>(r: &Ray, world: &T, depth: i32) -> Color { if depth <= 0 { return Color::new(0.0, 0.0, 0.0); } if let Some(rec) = world.hit(r, 0.001, f64::INFINITY) { return if let Some((attenuation, scattered)) = rec.mat.as_ref().scatter(r, &rec) { attenuation * ray_color(&scattered, world, depth - 1) } else { Color::new(0.0, 0.0, 0.0) }; } let unit_direction = r.direction().unit(); let t = 0.5 * (unit_direction.y() + 1.0); (1.0 - t) * Color::new(1.0, 1.0, 1.0) + t * Color::new(0.5, 0.7, 1.0) } fn random_scene() -> HittableList { let mut world = HittableList::new(); let ground_material: Rc<Box<dyn Material>> = Rc::new(Box::new(Lambertian::new(Color::new(0.5, 0.5, 0.5)))); world.add(Box::new(Sphere::new( Vec3::new(0.0, -1000.0, 0.0), 1000.0, &ground_material, ))); for a in -11..11 { for b in -11..11 { let choose_mat: f64 = random(); let center = Vec3::new( (a as f64) + 0.9 * random::<f64>(), 0.2, (b as f64) + 0.9 * random::<f64>(), ); let sphere_material: Rc<Box<dyn Material>> = if choose_mat < 0.8 { let albedo = Color::new_random(0.0, 1.0) * Color::new_random(0.0, 1.0); Rc::new(Box::new(Lambertian::new(albedo))) } else if choose_mat < 0.95 { let albedo = Color::new_random(0.5, 1.0); let fuzz = 0.5 * random::<f64>(); Rc::new(Box::new(Metal::new(albedo, fuzz))) } else { Rc::new(Box::new(Dielectric::new(1.5))) }; world.add(Box::new(Sphere::new(center, 0.2, &sphere_material))); } } let material1: Rc<Box<dyn Material>> = Rc::new(Box::new(Dielectric::new(1.5))); world.add(Box::new(Sphere::new( Vec3::new(0.0, 1.0, 0.0), 1.0, &material1, ))); let material2: Rc<Box<dyn Material>> = Rc::new(Box::new(Lambertian::new(Color::new(0.4, 0.2, 0.1)))); world.add(Box::new(Sphere::new( Vec3::new(-4.0, 1.0, 0.0), 1.0, &material2, ))); let material3: Rc<Box<dyn Material>> = Rc::new(Box::new(Metal::new(Color::new(0.7, 0.6, 0.5), 0.0))); world.add(Box::new(Sphere::new( Vec3::new(4.0, 1.0, 0.0), 1.0, &material3, ))); world } fn main() -> io::Result<()> { let aspect_ratio = 3.0 / 2.0; let image_width: usize = 1200; let image_height: usize = (image_width as f64 / aspect_ratio) as usize; let samples_per_pixel = 500; let max_depth = 50; let world = random_scene(); let lookfrom = Vec3::new(13.0, 2.0, 3.0); let lookat = Vec3::new(0.0, 0.0, 0.0); let dist_to_focus = 10.0; let aperture = 0.1; let camera = Camera::new( lookfrom, lookat, Vec3::new(0.0, 1.0, 0.0), 20.0, aspect_ratio, aperture, dist_to_focus, ); println!("P3"); println!("{} {}", image_width, image_height); println!("255"); let mut writer = BufWriter::new(io::stdout()); for h in (0..image_height).rev() { eprintln!("Scan lines remaining: {}", h); for w in 0..image_width { let mut pixel_color = Color::new(0.0, 0.0, 0.0); for _ in 0..samples_per_pixel { let u = (w as f64 + random::<f64>()) / (image_width - 1) as f64; let v = (h as f64 + random::<f64>()) / (image_height - 1) as f64; let r = camera.get_ray(u, v); pixel_color += ray_color(&r, &world, max_depth); } write_color(&mut writer, &pixel_color, samples_per_pixel)?; } } eprintln!("Done."); Ok(()) }
use rand::prelude::random; use raytracing_in_one_weekend::camera::Camera; use raytracing_in_one_weekend::geometry::{Hittable, HittableList, Ray, Sphere}; use raytracing_in_one_weekend::material::{Dielectric, Lambertian, Material, Metal}; use raytracing_in_one_weekend::utils::color::{write_color, Color}; use raytracing_in_one_weekend::utils::vec3::Vec3; use std::io::{self, BufWriter}; use std::rc::Rc; fn ray_color<T: Hittable>(r: &Ray, world: &T, depth: i32) -> Color { if depth <= 0 { return Color::new(0.0, 0.0, 0.0); } if let Some(rec) = world.hit(r, 0.001, f64::INFINITY) { return if let Some((attenuation, scattered)) = rec.mat.as_ref().scatter(r, &rec) { attenuation * ray_color(&scattered, world, depth - 1) } else { Color::new(0.0, 0.0, 0.0) }; } let unit_direction = r.direction().unit(); let t = 0.5 * (unit_direction.y() + 1.0); (1.0 - t) * Color::new(1.0, 1.0, 1.0) + t * Color::new(0.5, 0.7, 1.0) } fn random_scene() -> HittableList { let mut world = HittableList::new(); let ground_material: Rc<Box<dyn Material>> = Rc::new(Box::new(Lambertian::new(Color::new(0.5, 0.5, 0.5)))); world.add(Box::new(Sphere::new( Vec3::new(0.0, -1000.0, 0.0), 1000.0, &ground_material, ))); for a in -11..11 { for b in -11..11 { let choose_mat: f64 = random(); let center = Vec3::new( (a as f64) + 0.9 * random::<f64>(), 0.2, (b as f64) + 0.9 * random::<f64>(), ); let sphere_material: Rc<Box<dyn Material>> = if choose_mat < 0.8 { let albedo = Color::new_random(0.0, 1.0) * Color::new_random(0.0, 1.0); Rc::new(Box::new(Lambertian::new(albedo))) } else if choose_mat < 0.95 { let albedo = Color::new_random(0.5, 1.0); let fuzz = 0.5 * random::<f64>(); Rc::new(Box::new(Metal::new(albedo, fuzz))) } else { Rc::new(Box::new(Dielectric::new(1.5))) };
fn main() -> io::Result<()> { let aspect_ratio = 3.0 / 2.0; let image_width: usize = 1200; let image_height: usize = (image_width as f64 / aspect_ratio) as usize; let samples_per_pixel = 500; let max_depth = 50; let world = random_scene(); let lookfrom = Vec3::new(13.0, 2.0, 3.0); let lookat = Vec3::new(0.0, 0.0, 0.0); let dist_to_focus = 10.0; let aperture = 0.1; let camera = Camera::new( lookfrom, lookat, Vec3::new(0.0, 1.0, 0.0), 20.0, aspect_ratio, aperture, dist_to_focus, ); println!("P3"); println!("{} {}", image_width, image_height); println!("255"); let mut writer = BufWriter::new(io::stdout()); for h in (0..image_height).rev() { eprintln!("Scan lines remaining: {}", h); for w in 0..image_width { let mut pixel_color = Color::new(0.0, 0.0, 0.0); for _ in 0..samples_per_pixel { let u = (w as f64 + random::<f64>()) / (image_width - 1) as f64; let v = (h as f64 + random::<f64>()) / (image_height - 1) as f64; let r = camera.get_ray(u, v); pixel_color += ray_color(&r, &world, max_depth); } write_color(&mut writer, &pixel_color, samples_per_pixel)?; } } eprintln!("Done."); Ok(()) }
world.add(Box::new(Sphere::new(center, 0.2, &sphere_material))); } } let material1: Rc<Box<dyn Material>> = Rc::new(Box::new(Dielectric::new(1.5))); world.add(Box::new(Sphere::new( Vec3::new(0.0, 1.0, 0.0), 1.0, &material1, ))); let material2: Rc<Box<dyn Material>> = Rc::new(Box::new(Lambertian::new(Color::new(0.4, 0.2, 0.1)))); world.add(Box::new(Sphere::new( Vec3::new(-4.0, 1.0, 0.0), 1.0, &material2, ))); let material3: Rc<Box<dyn Material>> = Rc::new(Box::new(Metal::new(Color::new(0.7, 0.6, 0.5), 0.0))); world.add(Box::new(Sphere::new( Vec3::new(4.0, 1.0, 0.0), 1.0, &material3, ))); world }
function_block-function_prefix_line
[ { "content": "pub fn write_color<T: Write>(\n\n writer: &mut T,\n\n pixel_color: &Color,\n\n samples_per_pixels: i32,\n\n) -> io::Result<()> {\n\n // Divide the color by the number of samples and gamma-correct for gamma=2.0\n\n let scale = 1.0 / samples_per_pixels as f64;\n\n let r = (pixel_color.x() * scale).sqrt();\n\n let g = (pixel_color.y() * scale).sqrt();\n\n let b = (pixel_color.z() * scale).sqrt();\n\n\n\n let ir = (256.0 * r.clamp(0.0, 0.999)) as i32;\n\n let ig = (256.0 * g.clamp(0.0, 0.999)) as i32;\n\n let ib = (256.0 * b.clamp(0.0, 0.999)) as i32;\n\n\n\n writeln!(writer, \"{} {} {}\", ir, ig, ib)\n\n}\n", "file_path": "src/utils/color.rs", "rank": 2, "score": 46890.80342298835 }, { "content": "use super::material::Material;\n\nuse crate::geometry::HitRecord;\n\nuse crate::geometry::Ray;\n\nuse crate::utils::color::Color;\n\nuse crate::utils::vec3::Vec3;\n\n\n\npub struct Metal {\n\n albedo: Color,\n\n fuzz: f64,\n\n}\n\n\n\nimpl Metal {\n\n pub fn new(albedo: Color, fuzz: f64) -> Self {\n\n Self { albedo, fuzz }\n\n }\n\n}\n\n\n\nimpl Material for Metal {\n\n fn scatter(&self, r_in: &Ray, rec: &HitRecord) -> Option<(Color, Ray)> {\n\n let reflected = r_in.direction().unit().reflect(&rec.normal);\n", "file_path": "src/material/metal.rs", "rank": 3, "score": 42935.01356286363 }, { "content": "use super::material::Material;\n\nuse crate::geometry::HitRecord;\n\nuse crate::geometry::Ray;\n\nuse crate::utils::color::Color;\n\nuse crate::utils::vec3::Vec3;\n\n\n\npub struct Lambertian {\n\n albedo: Color,\n\n}\n\n\n\nimpl Lambertian {\n\n pub fn new(albedo: Color) -> Self {\n\n Self { albedo }\n\n }\n\n}\n\n\n\nimpl Material for Lambertian {\n\n fn scatter(&self, _r_in: &Ray, rec: &HitRecord) -> Option<(Color, Ray)> {\n\n let mut scatter_direction = rec.normal + Vec3::new_random_unit_vector();\n\n\n", "file_path": "src/material/lambertian.rs", "rank": 4, "score": 42934.22779618332 }, { "content": " let scattered = Ray::new(\n\n &rec.p,\n\n &(reflected + self.fuzz * Vec3::new_random_in_unit_sphere()),\n\n );\n\n\n\n if scattered.direction().dot(&rec.normal) > 0.0 {\n\n Some((self.albedo, scattered))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/material/metal.rs", "rank": 5, "score": 42930.89214655458 }, { "content": " // Catch degenerate scatter direction\n\n if scatter_direction.near_zero() {\n\n scatter_direction = rec.normal;\n\n }\n\n\n\n let scattered = Ray::new(&rec.p, &scatter_direction);\n\n Some((self.albedo, scattered))\n\n }\n\n}\n", "file_path": "src/material/lambertian.rs", "rank": 6, "score": 42925.583935693576 }, { "content": "pub trait Material {\n\n fn scatter(&self, r_in: &Ray, rec: &HitRecord) -> Option<(Color, Ray)>;\n\n}\n", "file_path": "src/material/material.rs", "rank": 7, "score": 28606.970657702215 }, { "content": "use crate::geometry::HitRecord;\n\nuse crate::geometry::Ray;\n\nuse crate::utils::color::Color;\n\n\n", "file_path": "src/material/material.rs", "rank": 8, "score": 27246.394482362666 }, { "content": "pub trait Hittable {\n\n fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord>;\n\n}\n", "file_path": "src/geometry/hittable.rs", "rank": 10, "score": 25757.2238780011 }, { "content": "mod dielectric;\n\nmod lambertian;\n\nmod material;\n\nmod metal;\n\n\n\npub use dielectric::Dielectric;\n\npub use lambertian::Lambertian;\n\npub use material::Material;\n\npub use metal::Metal;\n", "file_path": "src/material.rs", "rank": 11, "score": 22373.330351352306 }, { "content": "use super::hit_record::HitRecord;\n\nuse super::hittable::Hittable;\n\nuse super::ray::Ray;\n\nuse crate::material::Material;\n\nuse crate::utils::vec3::Vec3;\n\nuse std::rc::Rc;\n\n\n\npub struct Sphere {\n\n center: Vec3,\n\n radius: f64,\n\n mat: Rc<Box<dyn Material>>,\n\n}\n\n\n\nimpl Sphere {\n\n pub fn new(center: Vec3, radius: f64, mat: &Rc<Box<dyn Material>>) -> Self {\n\n Self {\n\n center,\n\n radius,\n\n mat: Rc::clone(mat),\n\n }\n", "file_path": "src/geometry/sphere.rs", "rank": 12, "score": 21749.94683639913 }, { "content": " }\n\n}\n\n\n\nimpl Hittable for Sphere {\n\n fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {\n\n let oc = r.origin().clone() - self.center;\n\n let a = r.direction().length_squared();\n\n let half_b = oc.dot(r.direction());\n\n let c = oc.length_squared() - (self.radius * self.radius);\n\n\n\n let discriminant = half_b * half_b - a * c;\n\n if discriminant < 0.0 {\n\n return None;\n\n }\n\n let sqrtd = discriminant.sqrt();\n\n\n\n // Find the nearest root that lies in the acceptable range.\n\n let mut root = (-half_b - sqrtd) / a;\n\n if root < t_min || t_max < root {\n\n root = (-half_b + sqrtd) / a;\n", "file_path": "src/geometry/sphere.rs", "rank": 13, "score": 21744.174967475625 }, { "content": "use super::vec3::Vec3;\n\nuse std::io::{self, Write};\n\n\n\npub type Color = Vec3;\n\n\n", "file_path": "src/utils/color.rs", "rank": 14, "score": 21742.002736362254 }, { "content": " if root < t_min || t_max < root {\n\n return None;\n\n }\n\n }\n\n\n\n let p = r.at(root);\n\n let outward_normal = (p - self.center) / self.radius;\n\n\n\n Some(HitRecord::new(p, root, r, &outward_normal, &self.mat))\n\n }\n\n}\n", "file_path": "src/geometry/sphere.rs", "rank": 15, "score": 21739.303842118108 }, { "content": " pub fn direction(&self) -> &Vec3 {\n\n &self.direction\n\n }\n\n\n\n pub fn at(&self, t: f64) -> Vec3 {\n\n self.origin.clone() + t * self.direction.clone()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EPSILON: f64 = 0.00001;\n\n\n\n #[test]\n\n fn new() {\n\n let r = Ray::new(&Vec3::new(1.0, 2.0, 3.0), &Vec3::new(4.0, 5.0, 6.0));\n\n\n\n assert!((r.origin.x() - 1.0).abs() < EPSILON);\n", "file_path": "src/geometry/ray.rs", "rank": 16, "score": 21634.6452998805 }, { "content": "use crate::utils::vec3::Vec3;\n\n\n\n#[derive(Debug)]\n\npub struct Ray {\n\n origin: Vec3,\n\n direction: Vec3,\n\n}\n\n\n\nimpl Ray {\n\n pub fn new(origin: &Vec3, direction: &Vec3) -> Ray {\n\n Ray {\n\n origin: origin.clone(),\n\n direction: direction.clone(),\n\n }\n\n }\n\n\n\n pub fn origin(&self) -> &Vec3 {\n\n &self.origin\n\n }\n\n\n", "file_path": "src/geometry/ray.rs", "rank": 17, "score": 21633.787719783242 }, { "content": " assert!((r.origin.y() - 2.0).abs() < EPSILON);\n\n assert!((r.origin.z() - 3.0).abs() < EPSILON);\n\n assert!((r.direction.x() - 4.0).abs() < EPSILON);\n\n assert!((r.direction.y() - 5.0).abs() < EPSILON);\n\n assert!((r.direction.z() - 6.0).abs() < EPSILON);\n\n }\n\n\n\n #[test]\n\n fn at() {\n\n let r = Ray::new(&Vec3::new(1.0, 0.0, 0.0), &Vec3::new(1.0, 1.0, 1.0));\n\n let p = r.at(0.5);\n\n\n\n assert!((p.x() - 1.5).abs() < EPSILON);\n\n assert!((p.y() - 0.5).abs() < EPSILON);\n\n assert!((p.z() - 0.5).abs() < EPSILON);\n\n }\n\n}\n", "file_path": "src/geometry/ray.rs", "rank": 18, "score": 21631.318782960196 }, { "content": "use super::ray::Ray;\n\nuse super::hit_record::HitRecord;\n\n\n", "file_path": "src/geometry/hittable.rs", "rank": 19, "score": 21417.301706915645 }, { "content": "use super::material::Material;\n\nuse crate::geometry::{HitRecord, Ray};\n\nuse crate::utils::color::Color;\n\nuse rand::random;\n\n\n\npub struct Dielectric {\n\n ir: f64, // Index of Refraction\n\n}\n\n\n\nimpl Dielectric {\n\n pub fn new(ir: f64) -> Self {\n\n Dielectric { ir }\n\n }\n\n\n\n fn reflectance(&self, cosine: f64, ref_idx: f64) -> f64 {\n\n // Use Schlick's approximation for reflectance.\n\n let r0 = ((1.0 - ref_idx) / (1.0 + ref_idx)).powi(2);\n\n r0 + (1.0 - r0) * ((1.0 - cosine).powi(5))\n\n }\n\n}\n", "file_path": "src/material/dielectric.rs", "rank": 20, "score": 21081.189658672833 }, { "content": "\n\nimpl Material for Dielectric {\n\n fn scatter(&self, r_in: &Ray, rec: &HitRecord) -> Option<(Color, Ray)> {\n\n let refraction_ratio = if rec.front_face {\n\n 1.0 / self.ir\n\n } else {\n\n self.ir\n\n };\n\n\n\n let unit_direction = r_in.direction().unit();\n\n let cos_theta = (-unit_direction.dot(&rec.normal)).min(1.0);\n\n let sin_theta = (1.0 - cos_theta * cos_theta).sqrt();\n\n\n\n let cannot_refract = refraction_ratio * sin_theta > 1.0;\n\n\n\n let direction =\n\n if cannot_refract || self.reflectance(cos_theta, refraction_ratio) > random() {\n\n unit_direction.reflect(&rec.normal)\n\n } else {\n\n unit_direction.refract(&rec.normal, refraction_ratio)\n\n };\n\n\n\n Some((Color::new(1.0, 1.0, 1.0), Ray::new(&rec.p, &direction)))\n\n }\n\n}\n", "file_path": "src/material/dielectric.rs", "rank": 21, "score": 21079.75274499135 }, { "content": " self.objects.push(object)\n\n }\n\n}\n\n\n\nimpl Hittable for HittableList {\n\n fn hit(&self, r: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {\n\n let mut temp_rec = None;\n\n let mut closest_so_far = t_max;\n\n\n\n for object in self.objects.iter() {\n\n if let Some(rec) = object.hit(r, t_min, closest_so_far) {\n\n closest_so_far = rec.t;\n\n temp_rec = Some(rec);\n\n }\n\n }\n\n\n\n temp_rec\n\n }\n\n}\n", "file_path": "src/geometry/hittable_list.rs", "rank": 22, "score": 20253.051834821887 }, { "content": "use super::hit_record::HitRecord;\n\nuse super::hittable::Hittable;\n\nuse super::ray::Ray;\n\n\n\npub struct HittableList {\n\n objects: Vec<Box<dyn Hittable>>,\n\n}\n\n\n\nimpl HittableList {\n\n pub fn new() -> Self {\n\n Self {\n\n objects: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n self.objects.clear()\n\n }\n\n\n\n pub fn add(&mut self, object: Box<dyn Hittable>) {\n", "file_path": "src/geometry/hittable_list.rs", "rank": 23, "score": 20251.74678423624 }, { "content": "mod hit_record;\n\nmod hittable;\n\nmod hittable_list;\n\nmod ray;\n\nmod sphere;\n\n\n\npub use hit_record::HitRecord;\n\npub use hittable::Hittable;\n\npub use hittable_list::HittableList;\n\npub use ray::Ray;\n\npub use sphere::Sphere;\n", "file_path": "src/geometry.rs", "rank": 27, "score": 9.899612033897807 }, { "content": "use crate::geometry::ray::Ray;\n\nuse crate::material::Material;\n\nuse crate::utils::vec3::Vec3;\n\nuse std::rc::Rc;\n\n\n\npub struct HitRecord {\n\n pub p: Vec3,\n\n pub normal: Vec3,\n\n pub t: f64,\n\n pub front_face: bool,\n\n pub mat: Rc<Box<dyn Material>>,\n\n}\n\n\n\nimpl HitRecord {\n\n pub fn new(\n\n p: Vec3,\n\n t: f64,\n\n r: &Ray,\n\n outward_normal: &Vec3,\n\n mat: &Rc<Box<dyn Material>>,\n", "file_path": "src/geometry/hit_record.rs", "rank": 29, "score": 7.668725537597954 }, { "content": " return p;\n\n }\n\n }\n\n }\n\n\n\n pub fn new_random_in_hemisphere(normal: &Self) -> Self {\n\n let in_unit_sphere = Self::new_random_in_unit_sphere();\n\n if in_unit_sphere.dot(normal) > 0.0 {\n\n // In the same hemisphere as the normal\n\n in_unit_sphere\n\n } else {\n\n -in_unit_sphere\n\n }\n\n }\n\n\n\n pub fn new_random_unit_vector() -> Self {\n\n Self::new_random_in_unit_sphere().unit()\n\n }\n\n\n\n pub fn x(&self) -> f64 {\n", "file_path": "src/utils/vec3.rs", "rank": 30, "score": 6.545065464627287 }, { "content": "use rand::{thread_rng, Rng};\n\nuse std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub};\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Vec3 {\n\n value: [f64; 3],\n\n}\n\n\n\nimpl Vec3 {\n\n pub fn new(x: f64, y: f64, z: f64) -> Self {\n\n Self { value: [x, y, z] }\n\n }\n\n\n\n pub fn new_random(min: f64, max: f64) -> Self {\n\n let mut rng = thread_rng();\n\n\n\n Self {\n\n value: [\n\n rng.gen_range(min..max),\n\n rng.gen_range(min..max),\n", "file_path": "src/utils/vec3.rs", "rank": 31, "score": 5.621477693490796 }, { "content": " horizontal,\n\n vertical,\n\n u,\n\n v,\n\n lens_radius: aperture / 2.0,\n\n }\n\n }\n\n\n\n pub fn get_ray(&self, s: f64, t: f64) -> Ray {\n\n let rd = self.lens_radius * Vec3::new_random_in_unit_disk();\n\n let offset = self.u * rd.x() + self.v * rd.y();\n\n\n\n let origin = self.origin + offset;\n\n let direction = self.lower_left_corner + s * self.horizontal + t * self.vertical - origin;\n\n\n\n Ray::new(&origin, &direction)\n\n }\n\n}\n", "file_path": "src/camera.rs", "rank": 32, "score": 5.173514571737631 }, { "content": " rng.gen_range(min..max),\n\n ],\n\n }\n\n }\n\n\n\n pub fn new_random_in_unit_sphere() -> Self {\n\n loop {\n\n let p = Self::new_random(-1.0, 1.0);\n\n if p.length_squared() < 1.0 {\n\n return p;\n\n }\n\n }\n\n }\n\n\n\n pub fn new_random_in_unit_disk() -> Self {\n\n let mut rng = thread_rng();\n\n\n\n loop {\n\n let p = Self::new(rng.gen_range(-1.0..1.0), rng.gen_range(-1.0..1.0), 0.0);\n\n if p.length_squared() < 1.0 {\n", "file_path": "src/utils/vec3.rs", "rank": 33, "score": 4.782283907176142 }, { "content": " });\n\n }\n\n}\n\n\n\nimpl Div<f64> for Vec3 {\n\n type Output = Self;\n\n\n\n fn div(self, rhs: f64) -> Self {\n\n let mut value = [0.0; 3];\n\n for i in 0..3 {\n\n value[i] = self.value[i] / rhs;\n\n }\n\n Self { value }\n\n }\n\n}\n\n\n\nimpl DivAssign<f64> for Vec3 {\n\n fn div_assign(&mut self, rhs: f64) {\n\n self.value.iter_mut().for_each(|v| {\n\n *v /= rhs;\n", "file_path": "src/utils/vec3.rs", "rank": 34, "score": 4.7381273467537675 }, { "content": "use crate::geometry::Ray;\n\nuse crate::utils::vec3::Vec3;\n\n\n\npub struct Camera {\n\n origin: Vec3,\n\n lower_left_corner: Vec3,\n\n horizontal: Vec3,\n\n vertical: Vec3,\n\n u: Vec3,\n\n v: Vec3,\n\n lens_radius: f64,\n\n}\n\n\n\nimpl Camera {\n\n pub fn new(\n\n lookfrom: Vec3,\n\n lookat: Vec3,\n\n vup: Vec3,\n\n vfov: f64, // vertical field-of-view in degrees\n\n aspect_ratio: f64,\n", "file_path": "src/camera.rs", "rank": 35, "score": 4.686689678273433 }, { "content": " let mut value = [0.0; 3];\n\n for i in 0..3 {\n\n value[i] = self.value[i] * rhs;\n\n }\n\n Self { value }\n\n }\n\n}\n\n\n\nimpl Mul<Vec3> for f64 {\n\n type Output = Vec3;\n\n\n\n fn mul(self, rhs: Vec3) -> Vec3 {\n\n rhs * self\n\n }\n\n}\n\n\n\nimpl MulAssign<f64> for Vec3 {\n\n fn mul_assign(&mut self, rhs: f64) {\n\n self.value.iter_mut().for_each(|v| {\n\n *v *= rhs;\n", "file_path": "src/utils/vec3.rs", "rank": 36, "score": 4.488993006761573 }, { "content": "impl Neg for Vec3 {\n\n type Output = Self;\n\n\n\n fn neg(self) -> Self {\n\n let mut value: [f64; 3] = [0.0; 3];\n\n for i in 0..3 {\n\n value[i] = -self.value[i];\n\n }\n\n Self { value }\n\n }\n\n}\n\n\n\nimpl Add for Vec3 {\n\n type Output = Self;\n\n\n\n fn add(self, other: Self) -> Self {\n\n let mut value = [0.0; 3];\n\n for i in 0..3 {\n\n value[i] = self.value[i] + other.value[i];\n\n }\n", "file_path": "src/utils/vec3.rs", "rank": 38, "score": 3.4713096208208807 }, { "content": " Self { value }\n\n }\n\n}\n\n\n\nimpl Mul<Vec3> for Vec3 {\n\n type Output = Self;\n\n\n\n fn mul(self, rhs: Vec3) -> Self {\n\n let mut value = [0.0; 3];\n\n for i in 0..3 {\n\n value[i] = self.value[i] * rhs.value[i];\n\n }\n\n Self { value }\n\n }\n\n}\n\n\n\nimpl Mul<f64> for Vec3 {\n\n type Output = Self;\n\n\n\n fn mul(self, rhs: f64) -> Self {\n", "file_path": "src/utils/vec3.rs", "rank": 39, "score": 3.2979954329310206 }, { "content": " });\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EPSILON: f64 = 0.00001;\n\n\n\n #[test]\n\n fn dot() {\n\n let v1 = Vec3::new(1.0, 2.0, 3.0);\n\n let v2 = Vec3::new(1.0, 1.0, 1.0);\n\n\n\n assert!((v1.dot(&v2) - 6.0).abs() < EPSILON);\n\n }\n\n\n\n #[test]\n\n fn cross() {\n", "file_path": "src/utils/vec3.rs", "rank": 40, "score": 3.102757135113464 }, { "content": " self.value[0]\n\n }\n\n\n\n pub fn y(&self) -> f64 {\n\n self.value[1]\n\n }\n\n\n\n pub fn z(&self) -> f64 {\n\n self.value[2]\n\n }\n\n\n\n pub fn length(&self) -> f64 {\n\n self.length_squared().sqrt()\n\n }\n\n\n\n pub fn length_squared(&self) -> f64 {\n\n self.value.iter().map(|&v| v * v).sum()\n\n }\n\n\n\n pub fn dot(&self, other: &Self) -> f64 {\n", "file_path": "src/utils/vec3.rs", "rank": 41, "score": 2.67493057060328 }, { "content": "pub mod color;\n\npub mod vec3;\n", "file_path": "src/utils.rs", "rank": 42, "score": 2.34914816073541 }, { "content": " Self { value }\n\n }\n\n}\n\n\n\nimpl AddAssign for Vec3 {\n\n fn add_assign(&mut self, rhs: Self) {\n\n self.value.iter_mut().enumerate().for_each(|(i, v)| {\n\n *v += rhs.value[i];\n\n });\n\n }\n\n}\n\n\n\nimpl Sub for Vec3 {\n\n type Output = Self;\n\n\n\n fn sub(self, other: Self) -> Self {\n\n let mut value = [0.0; 3];\n\n for i in 0..3 {\n\n value[i] = self.value[i] - other.value[i];\n\n }\n", "file_path": "src/utils/vec3.rs", "rank": 43, "score": 2.3475838425493913 }, { "content": " let mut v = Vec3::new(1.0, 2.0, 3.0);\n\n v *= 2.0;\n\n\n\n assert!((v.x() - 2.0).abs() < EPSILON);\n\n assert!((v.y() - 4.0).abs() < EPSILON);\n\n assert!((v.z() - 6.0).abs() < EPSILON);\n\n }\n\n\n\n #[test]\n\n fn div_assign() {\n\n let mut v = Vec3::new(2.0, 4.0, 6.0);\n\n v /= 2.0;\n\n\n\n assert!((v.x() - 1.0).abs() < EPSILON);\n\n assert!((v.y() - 2.0).abs() < EPSILON);\n\n assert!((v.z() - 3.0).abs() < EPSILON);\n\n }\n\n}\n", "file_path": "src/utils/vec3.rs", "rank": 44, "score": 2.0796095619195096 }, { "content": "pub mod camera;\n\npub mod geometry;\n\npub mod material;\n\npub mod utils;\n", "file_path": "src/lib.rs", "rank": 45, "score": 2.0383812626615834 }, { "content": "# raytracing_in_one_weekend_rust\n\nMy implementation of [_Ray Tracing in One Weekend_](https://raytracing.github.io/books/RayTracingInOneWeekend.html) book in Rust\n\n\n\n![result](result.jpg)\n\n\n\n## Run\n\n```\n\ncargo run --release > out.ppm\n\n```\n", "file_path": "README.md", "rank": 46, "score": 1.9028823838578115 }, { "content": "\n\n pub fn near_zero(&self) -> bool {\n\n const EPSILON: f64 = 1e-8;\n\n (self.x().abs() < EPSILON) && (self.y().abs() < EPSILON) && (self.z().abs() < EPSILON)\n\n }\n\n\n\n pub fn reflect(&self, n: &Vec3) -> Self {\n\n self.clone() - 2.0 * self.dot(n) * n.clone()\n\n }\n\n\n\n pub fn refract(&self, n: &Vec3, etai_over_etat: f64) -> Self {\n\n let cos_theta = (-self.clone()).dot(n).min(1.0);\n\n\n\n let r_out_perp = etai_over_etat * (self.clone() + cos_theta * n.clone());\n\n let r_out_parallel = -((1.0 - r_out_perp.length_squared()).abs().sqrt()) * n.clone();\n\n\n\n r_out_perp + r_out_parallel\n\n }\n\n}\n\n\n", "file_path": "src/utils/vec3.rs", "rank": 47, "score": 1.616601603616592 }, { "content": " aperture: f64,\n\n focus_dist: f64,\n\n ) -> Self {\n\n let theta = vfov.to_radians();\n\n let h = (theta / 2.0).tan();\n\n let viewport_height = 2.0 * h;\n\n let viewport_width = aspect_ratio * viewport_height;\n\n\n\n let w = (lookfrom - lookat).unit();\n\n let u = vup.cross(&w).unit();\n\n let v = w.cross(&u);\n\n\n\n let origin = lookfrom;\n\n let horizontal = focus_dist * viewport_width * u;\n\n let vertical = focus_dist * viewport_height * v;\n\n let lower_left_corner = origin - horizontal / 2.0 - vertical / 2.0 - focus_dist * w;\n\n\n\n Self {\n\n origin,\n\n lower_left_corner,\n", "file_path": "src/camera.rs", "rank": 48, "score": 1.5254740398507312 }, { "content": " let v1 = Vec3::new(1.0, 2.0, 3.0);\n\n let mut v2 = Vec3::new(1.0, 1.0, 1.0);\n\n v2 += v1;\n\n\n\n assert!((v2.x() - 2.0).abs() < EPSILON);\n\n assert!((v2.y() - 3.0).abs() < EPSILON);\n\n assert!((v2.z() - 4.0).abs() < EPSILON);\n\n }\n\n\n\n #[test]\n\n fn sub() {\n\n let v = Vec3::new(1.0, 2.0, 3.0) - Vec3::new(0.1, 0.2, 0.3);\n\n\n\n assert!((v.x() - 0.9).abs() < EPSILON);\n\n assert!((v.y() - 1.8).abs() < EPSILON);\n\n assert!((v.z() - 2.7).abs() < EPSILON);\n\n }\n\n\n\n #[test]\n\n fn mul_assign() {\n", "file_path": "src/utils/vec3.rs", "rank": 49, "score": 1.2187485529877118 } ]
Rust
src/client/protocol/keyboard.rs
lummax/wayland-client-rs
5f41fe43d8a287d0b107cc10a2cc5045d2a537b6
#![allow(unused_imports)] use std::{ptr, mem}; use std::ffi::{CStr, CString}; use std::os::unix::io::RawFd; use libc::{c_void, c_int, uint32_t}; use ffi; use client::protocol::{FromPrimitive, GetInterface}; use client::base::Proxy as BaseProxy; use client::base::{FromRawPtr, AsRawPtr, EventQueue}; #[link(name="wayland-client")] extern { static wl_keyboard_interface: ffi::wayland::WLInterface; } #[repr(C)] #[derive(Debug)] pub enum KeyboardKeymapFormat { NoKeymap = 0, XkbV1 = 1, } impl FromPrimitive for KeyboardKeymapFormat { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardKeymapFormat::NoKeymap), 1 => Some(KeyboardKeymapFormat::XkbV1), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } pub trait KeyboardKeymapFormatSet { fn has_no_keymap(&self) -> bool; fn has_xkb_v1(&self) -> bool; } impl KeyboardKeymapFormatSet for u32 { fn has_no_keymap(&self) -> bool { return self & (KeyboardKeymapFormat::NoKeymap as u32) != 0; } fn has_xkb_v1(&self) -> bool { return self & (KeyboardKeymapFormat::XkbV1 as u32) != 0; } } impl KeyboardKeymapFormatSet for i32 { fn has_no_keymap(&self) -> bool { return self & (KeyboardKeymapFormat::NoKeymap as i32) != 0; } fn has_xkb_v1(&self) -> bool { return self & (KeyboardKeymapFormat::XkbV1 as i32) != 0; } } #[repr(C)] #[derive(Debug)] pub enum KeyboardKeyState { Released = 0, Pressed = 1, } impl FromPrimitive for KeyboardKeyState { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardKeyState::Released), 1 => Some(KeyboardKeyState::Pressed), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } pub trait KeyboardKeyStateSet { fn has_released(&self) -> bool; fn has_pressed(&self) -> bool; } impl KeyboardKeyStateSet for u32 { fn has_released(&self) -> bool { return self & (KeyboardKeyState::Released as u32) != 0; } fn has_pressed(&self) -> bool { return self & (KeyboardKeyState::Pressed as u32) != 0; } } impl KeyboardKeyStateSet for i32 { fn has_released(&self) -> bool { return self & (KeyboardKeyState::Released as i32) != 0; } fn has_pressed(&self) -> bool { return self & (KeyboardKeyState::Pressed as i32) != 0; } } #[repr(C)] enum KeyboardEvent { Keymap = 0, Enter = 1, Leave = 2, Key = 3, Modifiers = 4, RepeatInfo = 5, } impl FromPrimitive for KeyboardEvent { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardEvent::Keymap), 1 => Some(KeyboardEvent::Enter), 2 => Some(KeyboardEvent::Leave), 3 => Some(KeyboardEvent::Key), 4 => Some(KeyboardEvent::Modifiers), 5 => Some(KeyboardEvent::RepeatInfo), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } #[repr(C)] enum KeyboardRequest { Release = 0, _Dummy, } impl FromPrimitive for KeyboardRequest { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardRequest::Release), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } #[derive(Debug)] pub struct Keyboard { proxy: BaseProxy, } impl Keyboard { pub fn release(mut self) { let proxy = self.as_mut_ptr() as *mut ffi::wayland::WLProxy; unsafe { ffi::wayland::wl_proxy_marshal( proxy, KeyboardRequest::Release as u32 ); } } pub fn get_id(&mut self) -> u32 { return self.proxy.get_id(); } pub fn get_class(&mut self) -> String { return self.proxy.get_class(); } pub fn set_queue(&mut self, queue: Option<&mut EventQueue>) { self.proxy.set_queue(queue); } } impl FromRawPtr<ffi::wayland::WLProxy> for Keyboard { fn from_mut_ptr(ptr: *mut ffi::wayland::WLProxy) -> Result<Self, &'static str> { return match FromRawPtr::from_mut_ptr(ptr) { Ok(proxy) => Ok(Keyboard { proxy: proxy, }), Err(str) => Err(str), } } } impl AsRawPtr<ffi::wayland::WLProxy> for Keyboard { fn as_mut_ptr(&mut self) -> *mut ffi::wayland::WLProxy { return self.proxy.as_mut_ptr(); } } impl GetInterface for Keyboard { fn get_interface() -> *const ffi::wayland::WLInterface { return &wl_keyboard_interface as *const ffi::wayland::WLInterface; } } #[allow(unused_variables)] extern fn keyboard_event_dispatcher<T: KeyboardEventHandler>( user_data: *mut c_void, _target: *mut c_void, opcode: uint32_t, _message: *const ffi::wayland::WLMessage, arguments: *mut ffi::wayland::WLArgument) -> c_int { let object = user_data as *mut T; return match KeyboardEvent::from_u32(opcode) { Some(event) => { match event { KeyboardEvent::Keymap => { let format = unsafe { *(*arguments.offset(0)).uint() }; let fd = unsafe { *(*arguments.offset(1)).file_descriptor() }; let size = unsafe { *(*arguments.offset(2)).uint() }; unsafe { (*object).on_keymap(format, fd, size); } }, KeyboardEvent::Enter => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let surface = unsafe { *(*arguments.offset(1)).object() }; let keys = unsafe { *(*arguments.offset(2)).array() }; unsafe { (*object).on_enter(serial, surface, keys); } }, KeyboardEvent::Leave => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let surface = unsafe { *(*arguments.offset(1)).object() }; unsafe { (*object).on_leave(serial, surface); } }, KeyboardEvent::Key => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let time = unsafe { *(*arguments.offset(1)).uint() }; let key = unsafe { *(*arguments.offset(2)).uint() }; let state = unsafe { *(*arguments.offset(3)).uint() }; unsafe { (*object).on_key(serial, time, key, state); } }, KeyboardEvent::Modifiers => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let mods_depressed = unsafe { *(*arguments.offset(1)).uint() }; let mods_latched = unsafe { *(*arguments.offset(2)).uint() }; let mods_locked = unsafe { *(*arguments.offset(3)).uint() }; let group = unsafe { *(*arguments.offset(4)).uint() }; unsafe { (*object).on_modifiers(serial, mods_depressed, mods_latched, mods_locked, group); } }, KeyboardEvent::RepeatInfo => { let rate = unsafe { *(*arguments.offset(0)).int() }; let delay = unsafe { *(*arguments.offset(1)).int() }; unsafe { (*object).on_repeat_info(rate, delay); } }, } 0 }, _ => -1, } } pub trait KeyboardEventHandler: Sized { fn connect_dispatcher(&mut self) { unsafe { ffi::wayland::wl_proxy_add_dispatcher( self.get_keyboard().as_mut_ptr(), keyboard_event_dispatcher::<Self>, self as *mut Self as *mut c_void, ptr::null_mut()); } } fn get_keyboard(&mut self) -> &mut Keyboard; #[allow(unused_variables)] fn on_keymap(&mut self, format: u32, fd: RawFd, size: u32) {} #[allow(unused_variables)] fn on_enter(&mut self, serial: u32, surface: *mut ffi::wayland::WLObject, keys: *mut ffi::wayland::WLArray) {} #[allow(unused_variables)] fn on_leave(&mut self, serial: u32, surface: *mut ffi::wayland::WLObject) {} #[allow(unused_variables)] fn on_key(&mut self, serial: u32, time: u32, key: u32, state: u32) {} #[allow(unused_variables)] fn on_modifiers(&mut self, serial: u32, mods_depressed: u32, mods_latched: u32, mods_locked: u32, group: u32) {} #[allow(unused_variables)] fn on_repeat_info(&mut self, rate: i32, delay: i32) {} }
#![allow(unused_imports)] use std::{ptr, mem}; use std::ffi::{CStr, CString}; use std::os::unix::io::RawFd; use libc::{c_void, c_int, uint32_t}; use ffi; use client::protocol::{FromPrimitive, GetInterface}; use client::base::Proxy as BaseProxy; use client::base::{FromRawPtr, AsRawPtr, EventQueue}; #[link(name="wayland-client")] extern { static wl_keyboard_interface: ffi::wayland::WLInterface; } #[repr(C)] #[derive(Debug)] pub enum KeyboardKeymapFormat { NoKeymap = 0, XkbV1 = 1, } impl FromPrimitive for KeyboardKeymapFormat { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardKeymapFormat::NoKeymap), 1 => Some(KeyboardKeymapFormat::XkbV1), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } pub trait KeyboardKeymapFormatSet { fn has_no_keymap(&self) -> bool; fn has_xkb_v1(&self) -> bool; } impl KeyboardKeymapFormatSet for u32 { fn has_no_keymap(&self) -> bool { return self & (KeyboardKeymapFormat::NoKeymap as u32) != 0; } fn has_xkb_v1(&self) -> bool { return self & (KeyboardKeymapFormat::XkbV1 as u32) != 0; } } impl KeyboardKeymapFormatSet for i32 { fn has_no_keymap(&self) -> bool { return self & (KeyboardKeymapFormat::NoKeymap as i32) != 0; } fn has_xkb_v1(&self) -> bool { return self & (KeyboardKeymapFormat::XkbV1 as i32) != 0; } } #[repr(C)] #[derive(Debug)] pub enum KeyboardKeyState { Released = 0, Pressed = 1, } impl FromPrimitive for KeyboardKeyState { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardKeyState::Released), 1 => Some(KeyboardKeyState::Pressed), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } pub trait KeyboardKeyStateSet { fn has_released(&self) -> bool; fn has_pressed(&self) -> bool; } impl KeyboardKeyStateSet for u32 { fn has_released(&self) -> bool { return self & (KeyboardKeyState::Released as u32) != 0; } fn has_pressed(&self) -> bool { return self & (KeyboardKeyState::Pressed as u32) != 0; } } impl KeyboardKeyStateSet for i32 { fn has_released(&self) -> bool { return self & (KeyboardKeyState::Released as i32) != 0; } fn has_pressed(&self) -> bool { return self & (KeyboardKeyState::Pressed as i32) != 0; } } #[repr(C)] enum KeyboardEvent { Keymap = 0, Enter = 1, Leave = 2, Key = 3, Modifiers = 4, RepeatInfo = 5, } impl FromPrimitive for KeyboardEvent { fn from_u32(num: u32) -> Option<Self> {
4 => Some(KeyboardEvent::Modifiers), 5 => Some(KeyboardEvent::RepeatInfo), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } #[repr(C)] enum KeyboardRequest { Release = 0, _Dummy, } impl FromPrimitive for KeyboardRequest { fn from_u32(num: u32) -> Option<Self> { return match num { 0 => Some(KeyboardRequest::Release), _ => None } } fn from_i32(num: i32) -> Option<Self> { return Self::from_u32(num as u32); } } #[derive(Debug)] pub struct Keyboard { proxy: BaseProxy, } impl Keyboard { pub fn release(mut self) { let proxy = self.as_mut_ptr() as *mut ffi::wayland::WLProxy; unsafe { ffi::wayland::wl_proxy_marshal( proxy, KeyboardRequest::Release as u32 ); } } pub fn get_id(&mut self) -> u32 { return self.proxy.get_id(); } pub fn get_class(&mut self) -> String { return self.proxy.get_class(); } pub fn set_queue(&mut self, queue: Option<&mut EventQueue>) { self.proxy.set_queue(queue); } } impl FromRawPtr<ffi::wayland::WLProxy> for Keyboard { fn from_mut_ptr(ptr: *mut ffi::wayland::WLProxy) -> Result<Self, &'static str> { return match FromRawPtr::from_mut_ptr(ptr) { Ok(proxy) => Ok(Keyboard { proxy: proxy, }), Err(str) => Err(str), } } } impl AsRawPtr<ffi::wayland::WLProxy> for Keyboard { fn as_mut_ptr(&mut self) -> *mut ffi::wayland::WLProxy { return self.proxy.as_mut_ptr(); } } impl GetInterface for Keyboard { fn get_interface() -> *const ffi::wayland::WLInterface { return &wl_keyboard_interface as *const ffi::wayland::WLInterface; } } #[allow(unused_variables)] extern fn keyboard_event_dispatcher<T: KeyboardEventHandler>( user_data: *mut c_void, _target: *mut c_void, opcode: uint32_t, _message: *const ffi::wayland::WLMessage, arguments: *mut ffi::wayland::WLArgument) -> c_int { let object = user_data as *mut T; return match KeyboardEvent::from_u32(opcode) { Some(event) => { match event { KeyboardEvent::Keymap => { let format = unsafe { *(*arguments.offset(0)).uint() }; let fd = unsafe { *(*arguments.offset(1)).file_descriptor() }; let size = unsafe { *(*arguments.offset(2)).uint() }; unsafe { (*object).on_keymap(format, fd, size); } }, KeyboardEvent::Enter => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let surface = unsafe { *(*arguments.offset(1)).object() }; let keys = unsafe { *(*arguments.offset(2)).array() }; unsafe { (*object).on_enter(serial, surface, keys); } }, KeyboardEvent::Leave => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let surface = unsafe { *(*arguments.offset(1)).object() }; unsafe { (*object).on_leave(serial, surface); } }, KeyboardEvent::Key => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let time = unsafe { *(*arguments.offset(1)).uint() }; let key = unsafe { *(*arguments.offset(2)).uint() }; let state = unsafe { *(*arguments.offset(3)).uint() }; unsafe { (*object).on_key(serial, time, key, state); } }, KeyboardEvent::Modifiers => { let serial = unsafe { *(*arguments.offset(0)).uint() }; let mods_depressed = unsafe { *(*arguments.offset(1)).uint() }; let mods_latched = unsafe { *(*arguments.offset(2)).uint() }; let mods_locked = unsafe { *(*arguments.offset(3)).uint() }; let group = unsafe { *(*arguments.offset(4)).uint() }; unsafe { (*object).on_modifiers(serial, mods_depressed, mods_latched, mods_locked, group); } }, KeyboardEvent::RepeatInfo => { let rate = unsafe { *(*arguments.offset(0)).int() }; let delay = unsafe { *(*arguments.offset(1)).int() }; unsafe { (*object).on_repeat_info(rate, delay); } }, } 0 }, _ => -1, } } pub trait KeyboardEventHandler: Sized { fn connect_dispatcher(&mut self) { unsafe { ffi::wayland::wl_proxy_add_dispatcher( self.get_keyboard().as_mut_ptr(), keyboard_event_dispatcher::<Self>, self as *mut Self as *mut c_void, ptr::null_mut()); } } fn get_keyboard(&mut self) -> &mut Keyboard; #[allow(unused_variables)] fn on_keymap(&mut self, format: u32, fd: RawFd, size: u32) {} #[allow(unused_variables)] fn on_enter(&mut self, serial: u32, surface: *mut ffi::wayland::WLObject, keys: *mut ffi::wayland::WLArray) {} #[allow(unused_variables)] fn on_leave(&mut self, serial: u32, surface: *mut ffi::wayland::WLObject) {} #[allow(unused_variables)] fn on_key(&mut self, serial: u32, time: u32, key: u32, state: u32) {} #[allow(unused_variables)] fn on_modifiers(&mut self, serial: u32, mods_depressed: u32, mods_latched: u32, mods_locked: u32, group: u32) {} #[allow(unused_variables)] fn on_repeat_info(&mut self, rate: i32, delay: i32) {} }
return match num { 0 => Some(KeyboardEvent::Keymap), 1 => Some(KeyboardEvent::Enter), 2 => Some(KeyboardEvent::Leave), 3 => Some(KeyboardEvent::Key),
function_block-random_span
[ { "content": "pub trait AsRawPtr<T> {\n\n fn as_mut_ptr(&mut self) -> *mut T;\n\n}\n", "file_path": "src/client/base/mod.rs", "rank": 2, "score": 128661.67143358607 }, { "content": "pub trait FromPrimitive {\n\n fn from_u32(num: u32) -> Option<Self>;\n\n fn from_i32(num: i32) -> Option<Self>;\n\n}\n\n\n", "file_path": "src/client/protocol/mod.rs", "rank": 3, "score": 114132.54330707053 }, { "content": "pub trait GetInterface {\n\n fn get_interface() -> *const ffi::wayland::WLInterface;\n\n}\n", "file_path": "src/client/protocol/mod.rs", "rank": 4, "score": 111879.43729863803 }, { "content": "pub trait DisplayErrorSet {\n\n fn has_invalid_object(&self) -> bool;\n\n fn has_invalid_method(&self) -> bool;\n\n fn has_no_memory(&self) -> bool;\n\n}\n\n\n\nimpl DisplayErrorSet for u32 {\n\n fn has_invalid_object(&self) -> bool {\n\n return self & (DisplayError::InvalidObject as u32) != 0;\n\n }\n\n fn has_invalid_method(&self) -> bool {\n\n return self & (DisplayError::InvalidMethod as u32) != 0;\n\n }\n\n fn has_no_memory(&self) -> bool {\n\n return self & (DisplayError::NoMemory as u32) != 0;\n\n }\n\n}\n\n\n\nimpl DisplayErrorSet for i32 {\n\n fn has_invalid_object(&self) -> bool {\n", "file_path": "src/client/protocol/display.rs", "rank": 5, "score": 109773.97502141027 }, { "content": "pub trait SurfaceErrorSet {\n\n fn has_invalid_scale(&self) -> bool;\n\n fn has_invalid_transform(&self) -> bool;\n\n}\n\n\n\nimpl SurfaceErrorSet for u32 {\n\n fn has_invalid_scale(&self) -> bool {\n\n return self & (SurfaceError::InvalidScale as u32) != 0;\n\n }\n\n fn has_invalid_transform(&self) -> bool {\n\n return self & (SurfaceError::InvalidTransform as u32) != 0;\n\n }\n\n}\n\n\n\nimpl SurfaceErrorSet for i32 {\n\n fn has_invalid_scale(&self) -> bool {\n\n return self & (SurfaceError::InvalidScale as i32) != 0;\n\n }\n\n fn has_invalid_transform(&self) -> bool {\n\n return self & (SurfaceError::InvalidTransform as i32) != 0;\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/surface.rs", "rank": 6, "score": 109773.97502141027 }, { "content": "pub trait ShmErrorSet {\n\n fn has_invalid_format(&self) -> bool;\n\n fn has_invalid_stride(&self) -> bool;\n\n fn has_invalid_fd(&self) -> bool;\n\n}\n\n\n\nimpl ShmErrorSet for u32 {\n\n fn has_invalid_format(&self) -> bool {\n\n return self & (ShmError::InvalidFormat as u32) != 0;\n\n }\n\n fn has_invalid_stride(&self) -> bool {\n\n return self & (ShmError::InvalidStride as u32) != 0;\n\n }\n\n fn has_invalid_fd(&self) -> bool {\n\n return self & (ShmError::InvalidFd as u32) != 0;\n\n }\n\n}\n\n\n\nimpl ShmErrorSet for i32 {\n\n fn has_invalid_format(&self) -> bool {\n", "file_path": "src/client/protocol/shm.rs", "rank": 7, "score": 109773.97502141027 }, { "content": "pub trait OutputSubpixelSet {\n\n fn has_unknown(&self) -> bool;\n\n fn has_none(&self) -> bool;\n\n fn has_horizontal_rgb(&self) -> bool;\n\n fn has_horizontal_bgr(&self) -> bool;\n\n fn has_vertical_rgb(&self) -> bool;\n\n fn has_vertical_bgr(&self) -> bool;\n\n}\n\n\n\nimpl OutputSubpixelSet for u32 {\n\n fn has_unknown(&self) -> bool {\n\n return self & (OutputSubpixel::Unknown as u32) != 0;\n\n }\n\n fn has_none(&self) -> bool {\n\n return self & (OutputSubpixel::None as u32) != 0;\n\n }\n\n fn has_horizontal_rgb(&self) -> bool {\n\n return self & (OutputSubpixel::HorizontalRgb as u32) != 0;\n\n }\n\n fn has_horizontal_bgr(&self) -> bool {\n", "file_path": "src/client/protocol/output.rs", "rank": 8, "score": 109773.97502141027 }, { "content": "pub trait PointerErrorSet {\n\n fn has_role(&self) -> bool;\n\n fn has_(&self) -> bool;\n\n}\n\n\n\nimpl PointerErrorSet for u32 {\n\n fn has_role(&self) -> bool {\n\n return self & (PointerError::Role as u32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (PointerError::_Dummy as u32) != 0;\n\n }\n\n}\n\n\n\nimpl PointerErrorSet for i32 {\n\n fn has_role(&self) -> bool {\n\n return self & (PointerError::Role as i32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (PointerError::_Dummy as i32) != 0;\n", "file_path": "src/client/protocol/pointer.rs", "rank": 9, "score": 109773.97502141027 }, { "content": "pub trait ShellErrorSet {\n\n fn has_role(&self) -> bool;\n\n fn has_(&self) -> bool;\n\n}\n\n\n\nimpl ShellErrorSet for u32 {\n\n fn has_role(&self) -> bool {\n\n return self & (ShellError::Role as u32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (ShellError::_Dummy as u32) != 0;\n\n }\n\n}\n\n\n\nimpl ShellErrorSet for i32 {\n\n fn has_role(&self) -> bool {\n\n return self & (ShellError::Role as i32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (ShellError::_Dummy as i32) != 0;\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/shell.rs", "rank": 10, "score": 109773.97502141027 }, { "content": "pub trait OutputModeSet {\n\n fn has_current(&self) -> bool;\n\n fn has_preferred(&self) -> bool;\n\n}\n\n\n\nimpl OutputModeSet for u32 {\n\n fn has_current(&self) -> bool {\n\n return self & (OutputMode::Current as u32) != 0;\n\n }\n\n fn has_preferred(&self) -> bool {\n\n return self & (OutputMode::Preferred as u32) != 0;\n\n }\n\n}\n\n\n\nimpl OutputModeSet for i32 {\n\n fn has_current(&self) -> bool {\n\n return self & (OutputMode::Current as i32) != 0;\n\n }\n\n fn has_preferred(&self) -> bool {\n\n return self & (OutputMode::Preferred as i32) != 0;\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/output.rs", "rank": 11, "score": 109773.97502141027 }, { "content": "pub trait SeatCapabilitySet {\n\n fn has_pointer(&self) -> bool;\n\n fn has_keyboard(&self) -> bool;\n\n fn has_touch(&self) -> bool;\n\n}\n\n\n\nimpl SeatCapabilitySet for u32 {\n\n fn has_pointer(&self) -> bool {\n\n return self & (SeatCapability::Pointer as u32) != 0;\n\n }\n\n fn has_keyboard(&self) -> bool {\n\n return self & (SeatCapability::Keyboard as u32) != 0;\n\n }\n\n fn has_touch(&self) -> bool {\n\n return self & (SeatCapability::Touch as u32) != 0;\n\n }\n\n}\n\n\n\nimpl SeatCapabilitySet for i32 {\n\n fn has_pointer(&self) -> bool {\n", "file_path": "src/client/protocol/seat.rs", "rank": 12, "score": 109773.97502141027 }, { "content": "pub trait ShmFormatSet {\n\n fn has_argb8888(&self) -> bool;\n\n fn has_xrgb8888(&self) -> bool;\n\n fn has_c8(&self) -> bool;\n\n fn has_rgb332(&self) -> bool;\n\n fn has_bgr233(&self) -> bool;\n\n fn has_xrgb4444(&self) -> bool;\n\n fn has_xbgr4444(&self) -> bool;\n\n fn has_rgbx4444(&self) -> bool;\n\n fn has_bgrx4444(&self) -> bool;\n\n fn has_argb4444(&self) -> bool;\n\n fn has_abgr4444(&self) -> bool;\n\n fn has_rgba4444(&self) -> bool;\n\n fn has_bgra4444(&self) -> bool;\n\n fn has_xrgb1555(&self) -> bool;\n\n fn has_xbgr1555(&self) -> bool;\n\n fn has_rgbx5551(&self) -> bool;\n\n fn has_bgrx5551(&self) -> bool;\n\n fn has_argb1555(&self) -> bool;\n\n fn has_abgr1555(&self) -> bool;\n", "file_path": "src/client/protocol/shm.rs", "rank": 13, "score": 109773.97502141027 }, { "content": "pub trait PointerAxisSet {\n\n fn has_vertical_scroll(&self) -> bool;\n\n fn has_horizontal_scroll(&self) -> bool;\n\n}\n\n\n\nimpl PointerAxisSet for u32 {\n\n fn has_vertical_scroll(&self) -> bool {\n\n return self & (PointerAxis::VerticalScroll as u32) != 0;\n\n }\n\n fn has_horizontal_scroll(&self) -> bool {\n\n return self & (PointerAxis::HorizontalScroll as u32) != 0;\n\n }\n\n}\n\n\n\nimpl PointerAxisSet for i32 {\n\n fn has_vertical_scroll(&self) -> bool {\n\n return self & (PointerAxis::VerticalScroll as i32) != 0;\n\n }\n\n fn has_horizontal_scroll(&self) -> bool {\n\n return self & (PointerAxis::HorizontalScroll as i32) != 0;\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/pointer.rs", "rank": 14, "score": 109773.97502141027 }, { "content": "pub trait SubsurfaceErrorSet {\n\n fn has_bad_surface(&self) -> bool;\n\n fn has_(&self) -> bool;\n\n}\n\n\n\nimpl SubsurfaceErrorSet for u32 {\n\n fn has_bad_surface(&self) -> bool {\n\n return self & (SubsurfaceError::BadSurface as u32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (SubsurfaceError::_Dummy as u32) != 0;\n\n }\n\n}\n\n\n\nimpl SubsurfaceErrorSet for i32 {\n\n fn has_bad_surface(&self) -> bool {\n\n return self & (SubsurfaceError::BadSurface as i32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (SubsurfaceError::_Dummy as i32) != 0;\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/subsurface.rs", "rank": 15, "score": 109773.97502141027 }, { "content": "pub trait SubcompositorErrorSet {\n\n fn has_bad_surface(&self) -> bool;\n\n fn has_(&self) -> bool;\n\n}\n\n\n\nimpl SubcompositorErrorSet for u32 {\n\n fn has_bad_surface(&self) -> bool {\n\n return self & (SubcompositorError::BadSurface as u32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (SubcompositorError::_Dummy as u32) != 0;\n\n }\n\n}\n\n\n\nimpl SubcompositorErrorSet for i32 {\n\n fn has_bad_surface(&self) -> bool {\n\n return self & (SubcompositorError::BadSurface as i32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (SubcompositorError::_Dummy as i32) != 0;\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/subcompositor.rs", "rank": 16, "score": 109773.97502141027 }, { "content": "pub trait OutputTransformSet {\n\n fn has_normal(&self) -> bool;\n\n fn has_90(&self) -> bool;\n\n fn has_180(&self) -> bool;\n\n fn has_270(&self) -> bool;\n\n fn has_flipped(&self) -> bool;\n\n fn has_flipped_90(&self) -> bool;\n\n fn has_flipped_180(&self) -> bool;\n\n fn has_flipped_270(&self) -> bool;\n\n}\n\n\n\nimpl OutputTransformSet for u32 {\n\n fn has_normal(&self) -> bool {\n\n return self & (OutputTransform::Normal as u32) != 0;\n\n }\n\n fn has_90(&self) -> bool {\n\n return self & (OutputTransform::_90 as u32) != 0;\n\n }\n\n fn has_180(&self) -> bool {\n\n return self & (OutputTransform::_180 as u32) != 0;\n", "file_path": "src/client/protocol/output.rs", "rank": 17, "score": 109773.97502141027 }, { "content": "pub trait PointerButtonStateSet {\n\n fn has_released(&self) -> bool;\n\n fn has_pressed(&self) -> bool;\n\n}\n\n\n\nimpl PointerButtonStateSet for u32 {\n\n fn has_released(&self) -> bool {\n\n return self & (PointerButtonState::Released as u32) != 0;\n\n }\n\n fn has_pressed(&self) -> bool {\n\n return self & (PointerButtonState::Pressed as u32) != 0;\n\n }\n\n}\n\n\n\nimpl PointerButtonStateSet for i32 {\n\n fn has_released(&self) -> bool {\n\n return self & (PointerButtonState::Released as i32) != 0;\n\n }\n\n fn has_pressed(&self) -> bool {\n\n return self & (PointerButtonState::Pressed as i32) != 0;\n", "file_path": "src/client/protocol/pointer.rs", "rank": 18, "score": 107802.1044682705 }, { "content": "pub trait DataDeviceErrorSet {\n\n fn has_role(&self) -> bool;\n\n fn has_(&self) -> bool;\n\n}\n\n\n\nimpl DataDeviceErrorSet for u32 {\n\n fn has_role(&self) -> bool {\n\n return self & (DataDeviceError::Role as u32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (DataDeviceError::_Dummy as u32) != 0;\n\n }\n\n}\n\n\n\nimpl DataDeviceErrorSet for i32 {\n\n fn has_role(&self) -> bool {\n\n return self & (DataDeviceError::Role as i32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (DataDeviceError::_Dummy as i32) != 0;\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/data_device.rs", "rank": 19, "score": 105951.50200114222 }, { "content": "pub trait ShellSurfaceTransientSet {\n\n fn has_inactive(&self) -> bool;\n\n fn has_(&self) -> bool;\n\n}\n\n\n\nimpl ShellSurfaceTransientSet for u32 {\n\n fn has_inactive(&self) -> bool {\n\n return self & (ShellSurfaceTransient::Inactive as u32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (ShellSurfaceTransient::_Dummy as u32) != 0;\n\n }\n\n}\n\n\n\nimpl ShellSurfaceTransientSet for i32 {\n\n fn has_inactive(&self) -> bool {\n\n return self & (ShellSurfaceTransient::Inactive as i32) != 0;\n\n }\n\n fn has_(&self) -> bool {\n\n return self & (ShellSurfaceTransient::_Dummy as i32) != 0;\n", "file_path": "src/client/protocol/shell_surface.rs", "rank": 20, "score": 105951.50200114222 }, { "content": "pub trait ShellSurfaceResizeSet {\n\n fn has_none(&self) -> bool;\n\n fn has_top(&self) -> bool;\n\n fn has_bottom(&self) -> bool;\n\n fn has_left(&self) -> bool;\n\n fn has_top_left(&self) -> bool;\n\n fn has_bottom_left(&self) -> bool;\n\n fn has_right(&self) -> bool;\n\n fn has_top_right(&self) -> bool;\n\n fn has_bottom_right(&self) -> bool;\n\n}\n\n\n\nimpl ShellSurfaceResizeSet for u32 {\n\n fn has_none(&self) -> bool {\n\n return self & (ShellSurfaceResize::None as u32) != 0;\n\n }\n\n fn has_top(&self) -> bool {\n\n return self & (ShellSurfaceResize::Top as u32) != 0;\n\n }\n\n fn has_bottom(&self) -> bool {\n", "file_path": "src/client/protocol/shell_surface.rs", "rank": 21, "score": 105951.50200114222 }, { "content": "pub trait FromRawPtr<T> {\n\n fn from_mut_ptr(ptr: *mut T) -> Result<Self, &'static str>;\n\n}\n\n\n", "file_path": "src/client/base/mod.rs", "rank": 22, "score": 105331.00321720637 }, { "content": "pub trait ShellSurfaceFullscreenMethodSet {\n\n fn has_default(&self) -> bool;\n\n fn has_scale(&self) -> bool;\n\n fn has_driver(&self) -> bool;\n\n fn has_fill(&self) -> bool;\n\n}\n\n\n\nimpl ShellSurfaceFullscreenMethodSet for u32 {\n\n fn has_default(&self) -> bool {\n\n return self & (ShellSurfaceFullscreenMethod::Default as u32) != 0;\n\n }\n\n fn has_scale(&self) -> bool {\n\n return self & (ShellSurfaceFullscreenMethod::Scale as u32) != 0;\n\n }\n\n fn has_driver(&self) -> bool {\n\n return self & (ShellSurfaceFullscreenMethod::Driver as u32) != 0;\n\n }\n\n fn has_fill(&self) -> bool {\n\n return self & (ShellSurfaceFullscreenMethod::Fill as u32) != 0;\n\n }\n", "file_path": "src/client/protocol/shell_surface.rs", "rank": 23, "score": 104211.31454590731 }, { "content": "pub trait RegistryEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_registry().as_mut_ptr(),\n\n registry_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_registry(&mut self) -> &mut Registry;\n\n \n\n /// Notify the client of global objects.\n\n /// \n\n /// The event notifies the client that a global object with\n\n /// the given name is now available, and it implements the\n\n /// given version of the given interface.\n\n #[allow(unused_variables)]\n\n fn on_global(&mut self, name: u32, interface: String, version: u32) {}\n", "file_path": "src/client/protocol/registry.rs", "rank": 24, "score": 103359.1326640666 }, { "content": "pub trait PointerEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_pointer().as_mut_ptr(),\n\n pointer_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_pointer(&mut self) -> &mut Pointer;\n\n \n\n /// Notification that this seat's pointer is focused on a certain\n\n /// surface.\n\n /// \n\n /// When an seat's focus enters a surface, the pointer image\n\n /// is undefined and a client should respond to this event by setting\n\n /// an appropriate pointer image with the set_cursor request.\n\n #[allow(unused_variables)]\n", "file_path": "src/client/protocol/pointer.rs", "rank": 26, "score": 103359.1326640666 }, { "content": "pub trait TouchEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_touch().as_mut_ptr(),\n\n touch_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_touch(&mut self) -> &mut Touch;\n\n \n\n /// A new touch point has appeared on the surface. This touch point is\n\n /// assigned a unique @id. Future events from this touchpoint reference\n\n /// this ID. The ID ceases to be valid after a touch up event and may be\n\n /// re-used in the future.\n\n #[allow(unused_variables)]\n\n fn on_down(&mut self, serial: u32, time: u32, surface: *mut ffi::wayland::WLObject, id: i32, x: i32, y: i32) {}\n\n \n", "file_path": "src/client/protocol/touch.rs", "rank": 27, "score": 103359.1326640666 }, { "content": "pub trait SeatEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_seat().as_mut_ptr(),\n\n seat_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_seat(&mut self) -> &mut Seat;\n\n \n\n /// This is emitted whenever a seat gains or loses the pointer,\n\n /// keyboard or touch capabilities. The argument is a capability\n\n /// enum containing the complete set of capabilities this seat has.\n\n #[allow(unused_variables)]\n\n fn on_capabilities(&mut self, capabilities: u32) {}\n\n \n\n /// In a multiseat configuration this can be used by the client to help\n\n /// identify which physical devices the seat represents. Based on\n\n /// the seat configuration used by the compositor.\n\n #[allow(unused_variables)]\n\n fn on_name(&mut self, name: String) {}\n\n}\n", "file_path": "src/client/protocol/seat.rs", "rank": 28, "score": 103359.1326640666 }, { "content": "pub trait OutputEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_output().as_mut_ptr(),\n\n output_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_output(&mut self) -> &mut Output;\n\n \n\n /// The geometry event describes geometric properties of the output.\n\n /// The event is sent when binding to the output object and whenever\n\n /// any of the properties change.\n\n #[allow(unused_variables)]\n\n fn on_geometry(&mut self, x: i32, y: i32, physical_width: i32, physical_height: i32, subpixel: i32, make: String, model: String, transform: i32) {}\n\n \n\n /// The mode event describes an available mode for the output.\n", "file_path": "src/client/protocol/output.rs", "rank": 29, "score": 103359.1326640666 }, { "content": "pub trait CallbackEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_callback().as_mut_ptr(),\n\n callback_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_callback(&mut self) -> &mut Callback;\n\n \n\n /// Notify the client when the related request is done.\n\n #[allow(unused_variables)]\n\n fn on_done(&mut self, callback_data: u32) {}\n\n}\n", "file_path": "src/client/protocol/callback.rs", "rank": 30, "score": 103359.1326640666 }, { "content": "pub trait SurfaceEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_surface().as_mut_ptr(),\n\n surface_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_surface(&mut self) -> &mut Surface;\n\n \n\n /// This is emitted whenever a surface's creation, movement, or resizing\n\n /// results in some part of it being within the scanout region of an\n\n /// output.\n\n /// \n\n /// Note that a surface may be overlapping with zero or more outputs.\n\n #[allow(unused_variables)]\n\n fn on_enter(&mut self, output: *mut ffi::wayland::WLObject) {}\n\n \n\n /// This is emitted whenever a surface's creation, movement, or resizing\n\n /// results in it no longer having any part of it within the scanout region\n\n /// of an output.\n\n #[allow(unused_variables)]\n\n fn on_leave(&mut self, output: *mut ffi::wayland::WLObject) {}\n\n}\n", "file_path": "src/client/protocol/surface.rs", "rank": 31, "score": 103359.1326640666 }, { "content": "pub trait BufferEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_buffer().as_mut_ptr(),\n\n buffer_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_buffer(&mut self) -> &mut Buffer;\n\n \n\n /// Sent when this wl_buffer is no longer used by the compositor.\n\n /// The client is now free to re-use or destroy this buffer and its\n\n /// backing storage.\n\n /// \n\n /// If a client receives a release event before the frame callback\n\n /// requested in the same wl_surface.commit that attaches this\n\n /// wl_buffer to a surface, then the client is immediately free to\n\n /// re-use the buffer and its backing storage, and does not need a\n\n /// second buffer for the next surface content update. Typically\n\n /// this is possible, when the compositor maintains a copy of the\n\n /// wl_surface contents, e.g. as a GL texture. This is an important\n\n /// optimization for GL(ES) compositors with wl_shm clients.\n\n #[allow(unused_variables)]\n\n fn on_release(&mut self) {}\n\n}\n", "file_path": "src/client/protocol/buffer.rs", "rank": 32, "score": 103359.1326640666 }, { "content": "pub trait ShmEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_shm().as_mut_ptr(),\n\n shm_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_shm(&mut self) -> &mut Shm;\n\n \n\n /// Informs the client about a valid pixel format that\n\n /// can be used for buffers. Known formats include\n\n /// argb8888 and xrgb8888.\n\n #[allow(unused_variables)]\n\n fn on_format(&mut self, format: u32) {}\n\n}\n", "file_path": "src/client/protocol/shm.rs", "rank": 33, "score": 103359.1326640666 }, { "content": "pub trait DisplayEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_display().as_mut_ptr() as *mut ffi::wayland::WLProxy,\n\n display_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_display(&mut self) -> &mut Display;\n\n \n\n /// The error event is sent out when a fatal (non-recoverable)\n\n /// error has occurred. The object_id argument is the object\n\n /// where the error occurred, most often in response to a request\n\n /// to that object. The code identifies the error and is defined\n\n /// by the object interface. As such, each interface defines its\n\n /// own set of error codes. The message is an brief description\n\n /// of the error, for (debugging) convenience.\n", "file_path": "src/client/protocol/display.rs", "rank": 34, "score": 103359.1326640666 }, { "content": "pub trait DataSourceEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_data_source().as_mut_ptr(),\n\n data_source_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_data_source(&mut self) -> &mut DataSource;\n\n \n\n /// Sent when a target accepts pointer_focus or motion events. If\n\n /// a target does not accept any of the offered types, type is NULL.\n\n /// \n\n /// Used for feedback during drag-and-drop.\n\n #[allow(unused_variables)]\n\n fn on_target(&mut self, mime_type: String) {}\n\n \n", "file_path": "src/client/protocol/data_source.rs", "rank": 35, "score": 99768.34274170341 }, { "content": "pub trait ShellSurfaceEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_shell_surface().as_mut_ptr(),\n\n shell_surface_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_shell_surface(&mut self) -> &mut ShellSurface;\n\n \n\n /// Ping a client to check if it is receiving events and sending\n\n /// requests. A client is expected to reply with a pong request.\n\n #[allow(unused_variables)]\n\n fn on_ping(&mut self, serial: u32) {}\n\n \n\n /// The configure event asks the client to resize its surface.\n\n /// \n", "file_path": "src/client/protocol/shell_surface.rs", "rank": 36, "score": 99768.34274170341 }, { "content": "pub trait DataDeviceEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_data_device().as_mut_ptr(),\n\n data_device_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_data_device(&mut self) -> &mut DataDevice;\n\n \n\n /// The data_offer event introduces a new wl_data_offer object,\n\n /// which will subsequently be used in either the\n\n /// data_device.enter event (for drag-and-drop) or the\n\n /// data_device.selection event (for selections). Immediately\n\n /// following the data_device_data_offer event, the new data_offer\n\n /// object will send out data_offer.offer events to describe the\n\n /// mime types it offers.\n", "file_path": "src/client/protocol/data_device.rs", "rank": 37, "score": 99768.34274170341 }, { "content": "pub trait DataOfferEventHandler: Sized {\n\n fn connect_dispatcher(&mut self) {\n\n unsafe {\n\n ffi::wayland::wl_proxy_add_dispatcher(\n\n self.get_data_offer().as_mut_ptr(),\n\n data_offer_event_dispatcher::<Self>,\n\n self as *mut Self as *mut c_void,\n\n ptr::null_mut());\n\n }\n\n }\n\n\n\n fn get_data_offer(&mut self) -> &mut DataOffer;\n\n \n\n /// Sent immediately after creating the wl_data_offer object. One\n\n /// event per offered mime type.\n\n #[allow(unused_variables)]\n\n fn on_offer(&mut self, mime_type: String) {}\n\n}\n", "file_path": "src/client/protocol/data_offer.rs", "rank": 38, "score": 99768.34274170341 }, { "content": "fn main() {\n\n let mut display = Display::connect(None).unwrap();\n\n let mut info = Info::new(display.get_registry().unwrap());\n\n RegistryEventHandler::connect_dispatcher(&mut info);\n\n\n\n while info.roundtrip {\n\n info.roundtrip = false;\n\n display.roundtrip();\n\n }\n\n info.print_it();\n\n}\n", "file_path": "examples/weston-info.rs", "rank": 39, "score": 64914.829375500805 }, { "content": "fn main() {\n\n let mut display = Display::connect(None).unwrap();\n\n let mut registry = MyRegistry::new(&mut display);\n\n registry.connect_dispatcher();\n\n display.roundtrip();\n\n\n\n let shm = registry.take_shm();\n\n let mut shell = registry.take_shell();\n\n let mut compositor = registry.take_compositor();\n\n\n\n let mut surface = compositor.create_surface().unwrap();\n\n let mut shell_surface = MyShellSurface::new(&mut shell, &mut surface);\n\n shell_surface.connect_dispatcher();\n\n\n\n let mut window = Window::new(shm, surface, 250, 250);\n\n window.redraw(0);\n\n\n\n loop { display.dispatch(); }\n\n}\n", "file_path": "examples/simple-shm.rs", "rank": 40, "score": 64914.829375500805 }, { "content": "#[repr(C)]\n\nenum SeatEvent {\n\n Capabilities = 0,\n\n Name = 1,\n\n}\n\n\n\nimpl FromPrimitive for SeatEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SeatEvent::Capabilities),\n\n 1 => Some(SeatEvent::Name),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/seat.rs", "rank": 42, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum SeatRequest {\n\n GetPointer = 0,\n\n GetKeyboard = 1,\n\n GetTouch = 2,\n\n}\n\n\n\nimpl FromPrimitive for SeatRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SeatRequest::GetPointer),\n\n 1 => Some(SeatRequest::GetKeyboard),\n\n 2 => Some(SeatRequest::GetTouch),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n", "file_path": "src/client/protocol/seat.rs", "rank": 43, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum SurfaceRequest {\n\n Destroy = 0,\n\n Attach = 1,\n\n Damage = 2,\n\n Frame = 3,\n\n SetOpaqueRegion = 4,\n\n SetInputRegion = 5,\n\n Commit = 6,\n\n SetBufferTransform = 7,\n\n SetBufferScale = 8,\n\n}\n\n\n\nimpl FromPrimitive for SurfaceRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SurfaceRequest::Destroy),\n\n 1 => Some(SurfaceRequest::Attach),\n\n 2 => Some(SurfaceRequest::Damage),\n\n 3 => Some(SurfaceRequest::Frame),\n\n 4 => Some(SurfaceRequest::SetOpaqueRegion),\n", "file_path": "src/client/protocol/surface.rs", "rank": 44, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum BufferEvent {\n\n Release = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for BufferEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(BufferEvent::Release),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/buffer.rs", "rank": 45, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum TouchEvent {\n\n Down = 0,\n\n Up = 1,\n\n Motion = 2,\n\n Frame = 3,\n\n Cancel = 4,\n\n}\n\n\n\nimpl FromPrimitive for TouchEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(TouchEvent::Down),\n\n 1 => Some(TouchEvent::Up),\n\n 2 => Some(TouchEvent::Motion),\n\n 3 => Some(TouchEvent::Frame),\n\n 4 => Some(TouchEvent::Cancel),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/touch.rs", "rank": 46, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum CallbackEvent {\n\n Done = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for CallbackEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(CallbackEvent::Done),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/callback.rs", "rank": 47, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum SurfaceEvent {\n\n Enter = 0,\n\n Leave = 1,\n\n}\n\n\n\nimpl FromPrimitive for SurfaceEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SurfaceEvent::Enter),\n\n 1 => Some(SurfaceEvent::Leave),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/surface.rs", "rank": 48, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum RegistryRequest {\n\n Bind = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for RegistryRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(RegistryRequest::Bind),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/registry.rs", "rank": 49, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum PointerEvent {\n\n Enter = 0,\n\n Leave = 1,\n\n Motion = 2,\n\n Button = 3,\n\n Axis = 4,\n\n}\n\n\n\nimpl FromPrimitive for PointerEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(PointerEvent::Enter),\n\n 1 => Some(PointerEvent::Leave),\n\n 2 => Some(PointerEvent::Motion),\n\n 3 => Some(PointerEvent::Button),\n\n 4 => Some(PointerEvent::Axis),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/pointer.rs", "rank": 50, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum SubsurfaceRequest {\n\n Destroy = 0,\n\n SetPosition = 1,\n\n PlaceAbove = 2,\n\n PlaceBelow = 3,\n\n SetSync = 4,\n\n SetDesync = 5,\n\n}\n\n\n\nimpl FromPrimitive for SubsurfaceRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SubsurfaceRequest::Destroy),\n\n 1 => Some(SubsurfaceRequest::SetPosition),\n\n 2 => Some(SubsurfaceRequest::PlaceAbove),\n\n 3 => Some(SubsurfaceRequest::PlaceBelow),\n\n 4 => Some(SubsurfaceRequest::SetSync),\n\n 5 => Some(SubsurfaceRequest::SetDesync),\n\n _ => None\n\n }\n", "file_path": "src/client/protocol/subsurface.rs", "rank": 51, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum RegionRequest {\n\n Destroy = 0,\n\n Add = 1,\n\n Subtract = 2,\n\n}\n\n\n\nimpl FromPrimitive for RegionRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(RegionRequest::Destroy),\n\n 1 => Some(RegionRequest::Add),\n\n 2 => Some(RegionRequest::Subtract),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n", "file_path": "src/client/protocol/region.rs", "rank": 52, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum TouchRequest {\n\n Release = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for TouchRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(TouchRequest::Release),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/touch.rs", "rank": 53, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum RegistryEvent {\n\n Global = 0,\n\n GlobalRemove = 1,\n\n}\n\n\n\nimpl FromPrimitive for RegistryEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(RegistryEvent::Global),\n\n 1 => Some(RegistryEvent::GlobalRemove),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/registry.rs", "rank": 54, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum ShmRequest {\n\n CreatePool = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for ShmRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(ShmRequest::CreatePool),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/shm.rs", "rank": 55, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum CompositorRequest {\n\n CreateSurface = 0,\n\n CreateRegion = 1,\n\n}\n\n\n\nimpl FromPrimitive for CompositorRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(CompositorRequest::CreateSurface),\n\n 1 => Some(CompositorRequest::CreateRegion),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/compositor.rs", "rank": 57, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum ShmEvent {\n\n Format = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for ShmEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(ShmEvent::Format),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/shm.rs", "rank": 58, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum PointerRequest {\n\n SetCursor = 0,\n\n Release = 1,\n\n}\n\n\n\nimpl FromPrimitive for PointerRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(PointerRequest::SetCursor),\n\n 1 => Some(PointerRequest::Release),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/pointer.rs", "rank": 59, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum SubcompositorRequest {\n\n Destroy = 0,\n\n GetSubsurface = 1,\n\n}\n\n\n\nimpl FromPrimitive for SubcompositorRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SubcompositorRequest::Destroy),\n\n 1 => Some(SubcompositorRequest::GetSubsurface),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/subcompositor.rs", "rank": 60, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum DisplayRequest {\n\n Sync = 0,\n\n GetRegistry = 1,\n\n}\n\n\n\nimpl FromPrimitive for DisplayRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DisplayRequest::Sync),\n\n 1 => Some(DisplayRequest::GetRegistry),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/display.rs", "rank": 61, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum OutputEvent {\n\n Geometry = 0,\n\n Mode = 1,\n\n Done = 2,\n\n Scale = 3,\n\n}\n\n\n\nimpl FromPrimitive for OutputEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(OutputEvent::Geometry),\n\n 1 => Some(OutputEvent::Mode),\n\n 2 => Some(OutputEvent::Done),\n\n 3 => Some(OutputEvent::Scale),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n", "file_path": "src/client/protocol/output.rs", "rank": 62, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum BufferRequest {\n\n Destroy = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for BufferRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(BufferRequest::Destroy),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/buffer.rs", "rank": 63, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum ShellRequest {\n\n GetShellSurface = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for ShellRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(ShellRequest::GetShellSurface),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/shell.rs", "rank": 64, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum DisplayEvent {\n\n Error = 0,\n\n DeleteId = 1,\n\n}\n\n\n\nimpl FromPrimitive for DisplayEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DisplayEvent::Error),\n\n 1 => Some(DisplayEvent::DeleteId),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/display.rs", "rank": 65, "score": 60060.9464309942 }, { "content": "#[repr(C)]\n\nenum ShmPoolRequest {\n\n CreateBuffer = 0,\n\n Destroy = 1,\n\n Resize = 2,\n\n}\n\n\n\nimpl FromPrimitive for ShmPoolRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(ShmPoolRequest::CreateBuffer),\n\n 1 => Some(ShmPoolRequest::Destroy),\n\n 2 => Some(ShmPoolRequest::Resize),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n", "file_path": "src/client/protocol/shm_pool.rs", "rank": 66, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum DataOfferRequest {\n\n Accept = 0,\n\n Receive = 1,\n\n Destroy = 2,\n\n}\n\n\n\nimpl FromPrimitive for DataOfferRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DataOfferRequest::Accept),\n\n 1 => Some(DataOfferRequest::Receive),\n\n 2 => Some(DataOfferRequest::Destroy),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n", "file_path": "src/client/protocol/data_offer.rs", "rank": 67, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum DataSourceEvent {\n\n Target = 0,\n\n Send = 1,\n\n Cancelled = 2,\n\n}\n\n\n\nimpl FromPrimitive for DataSourceEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DataSourceEvent::Target),\n\n 1 => Some(DataSourceEvent::Send),\n\n 2 => Some(DataSourceEvent::Cancelled),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/data_source.rs", "rank": 68, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum DataOfferEvent {\n\n Offer = 0,\n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for DataOfferEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DataOfferEvent::Offer),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/data_offer.rs", "rank": 69, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum DataSourceRequest {\n\n Offer = 0,\n\n Destroy = 1,\n\n}\n\n\n\nimpl FromPrimitive for DataSourceRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DataSourceRequest::Offer),\n\n 1 => Some(DataSourceRequest::Destroy),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/data_source.rs", "rank": 70, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum ShellSurfaceRequest {\n\n Pong = 0,\n\n Move = 1,\n\n Resize = 2,\n\n SetToplevel = 3,\n\n SetTransient = 4,\n\n SetFullscreen = 5,\n\n SetPopup = 6,\n\n SetMaximized = 7,\n\n SetTitle = 8,\n\n SetClass = 9,\n\n}\n\n\n\nimpl FromPrimitive for ShellSurfaceRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(ShellSurfaceRequest::Pong),\n\n 1 => Some(ShellSurfaceRequest::Move),\n\n 2 => Some(ShellSurfaceRequest::Resize),\n\n 3 => Some(ShellSurfaceRequest::SetToplevel),\n", "file_path": "src/client/protocol/shell_surface.rs", "rank": 71, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum DataDeviceEvent {\n\n DataOffer = 0,\n\n Enter = 1,\n\n Leave = 2,\n\n Motion = 3,\n\n Drop = 4,\n\n Selection = 5,\n\n}\n\n\n\nimpl FromPrimitive for DataDeviceEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DataDeviceEvent::DataOffer),\n\n 1 => Some(DataDeviceEvent::Enter),\n\n 2 => Some(DataDeviceEvent::Leave),\n\n 3 => Some(DataDeviceEvent::Motion),\n\n 4 => Some(DataDeviceEvent::Drop),\n\n 5 => Some(DataDeviceEvent::Selection),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/data_device.rs", "rank": 72, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum ShellSurfaceEvent {\n\n Ping = 0,\n\n Configure = 1,\n\n PopupDone = 2,\n\n}\n\n\n\nimpl FromPrimitive for ShellSurfaceEvent {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(ShellSurfaceEvent::Ping),\n\n 1 => Some(ShellSurfaceEvent::Configure),\n\n 2 => Some(ShellSurfaceEvent::PopupDone),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/client/protocol/shell_surface.rs", "rank": 73, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum DataDeviceRequest {\n\n StartDrag = 0,\n\n SetSelection = 1,\n\n Release = 2,\n\n}\n\n\n\nimpl FromPrimitive for DataDeviceRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DataDeviceRequest::StartDrag),\n\n 1 => Some(DataDeviceRequest::SetSelection),\n\n 2 => Some(DataDeviceRequest::Release),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n", "file_path": "src/client/protocol/data_device.rs", "rank": 74, "score": 57881.66228816406 }, { "content": "#[repr(C)]\n\nenum DataDeviceManagerRequest {\n\n CreateDataSource = 0,\n\n GetDataDevice = 1,\n\n}\n\n\n\nimpl FromPrimitive for DataDeviceManagerRequest {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DataDeviceManagerRequest::CreateDataSource),\n\n 1 => Some(DataDeviceManagerRequest::GetDataDevice),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/client/protocol/data_device_manager.rs", "rank": 75, "score": 55970.42577803004 }, { "content": "impl WLArgument {\n\n pub fn int(&mut self) -> *mut int32_t {\n\n unsafe { ::std::mem::transmute(self) }\n\n }\n\n\n\n pub fn uint(&mut self) -> *mut uint32_t {\n\n unsafe { ::std::mem::transmute(self) }\n\n }\n\n\n\n pub fn fixed_point(&mut self) -> *mut int32_t {\n\n unsafe { ::std::mem::transmute(self) }\n\n }\n\n\n\n pub fn string(&mut self) -> *mut *const ::libc::c_char {\n\n unsafe { ::std::mem::transmute(self) }\n\n }\n\n\n\n pub fn object(&mut self) -> *mut *mut WLObject {\n\n unsafe { ::std::mem::transmute(self) }\n\n }\n", "file_path": "src/ffi/wayland.rs", "rank": 76, "score": 37632.61579817945 }, { "content": "\n\n pub fn new_id(&mut self) -> *mut *mut WLProxy {\n\n unsafe { ::std::mem::transmute(self) }\n\n }\n\n\n\n pub fn array(&mut self) -> *mut *mut WLArray {\n\n unsafe { ::std::mem::transmute(self) }\n\n }\n\n\n\n pub fn file_descriptor(&mut self) -> *mut int32_t {\n\n unsafe { ::std::mem::transmute(self) }\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub type wl_dispatcher_func_t = extern fn(*mut c_void, *mut c_void,\n\n uint32_t, *const WLMessage,\n\n *mut WLArgument) -> c_int;\n\n\n\n#[repr(C)]\n", "file_path": "src/ffi/wayland.rs", "rank": 77, "score": 37631.89960217949 }, { "content": "// Copyright (c) <2015> <lummax>\n\n// Licensed under MIT (http://opensource.org/licenses/MIT)\n\n\n\n#![allow(dead_code)]\n\n\n\nuse libc::{c_int, c_char, c_void, int32_t, uint32_t};\n\n\n\n#[repr(C)]\n\npub struct WLInterface {\n\n pub name: *const c_char,\n\n pub version: c_int,\n\n pub method_count: c_int,\n\n pub methods: *const WLMessage,\n\n pub event_count: c_int,\n\n pub events: *const WLMessage,\n\n}\n\n\n\n#[repr(C)]\n\npub struct WLMessage;\n\n\n", "file_path": "src/ffi/wayland.rs", "rank": 78, "score": 37628.217731657605 }, { "content": "pub type wl_log_func_t = extern fn(_: *const c_char, ...);\n\n\n\n#[link(name = \"wayland-client\")]\n\nextern {\n\n pub fn wl_event_queue_destroy(queue: *mut WLEventQueue);\n\n\n\n pub fn wl_proxy_marshal(proxy: *mut WLProxy, opcode: uint32_t, ...);\n\n pub fn wl_proxy_marshal_array(proxy: *mut WLProxy, opcode: uint32_t,\n\n arguments: *mut WLArgument);\n\n pub fn wl_proxy_create(factory: *mut WLProxy,\n\n interface: *mut WLInterface) -> *mut WLProxy;\n\n pub fn wl_proxy_marshal_constructor(proxy: *mut WLProxy,\n\n opcode: uint32_t,\n\n interface: *const WLInterface,\n\n ...) -> *mut WLProxy;\n\n pub fn wl_proxy_marshal_array_constructor(proxy: *mut WLProxy,\n\n opcode: uint32_t,\n\n arguments: *mut WLArgument,\n\n interface: *const WLInterface) -> *mut WLProxy;\n\n\n", "file_path": "src/ffi/wayland.rs", "rank": 79, "score": 37628.09001127281 }, { "content": " pub fn wl_proxy_destroy(proxy: *mut WLProxy);\n\n pub fn wl_proxy_add_listener(proxy: *mut WLProxy,\n\n implementation: *mut extern fn(),\n\n data: *mut c_void) -> c_int;\n\n pub fn wl_proxy_get_listener(proxy: *mut WLProxy) -> *const c_void;\n\n pub fn wl_proxy_add_dispatcher(proxy: *mut WLProxy,\n\n dispatcher_func: wl_dispatcher_func_t,\n\n dispatcher_data: *mut c_void,\n\n data: *mut c_void) -> c_int;\n\n pub fn wl_proxy_set_user_data(proxy: *mut WLProxy, user_data: *mut c_void);\n\n pub fn wl_proxy_get_user_data(proxy: *mut WLProxy) -> *mut c_void;\n\n pub fn wl_proxy_get_id(proxy: *mut WLProxy) -> uint32_t;\n\n pub fn wl_proxy_get_class(proxy: *mut WLProxy) -> *const c_char;\n\n pub fn wl_proxy_set_queue(proxy: *mut WLProxy, queue: *mut WLEventQueue);\n\n\n\n pub fn wl_display_connect(name: *const c_char) -> *mut WLDisplay;\n\n pub fn wl_display_connect_to_fd(fd: c_int) -> *mut WLDisplay;\n\n pub fn wl_display_disconnect(display: *mut WLDisplay);\n\n pub fn wl_display_get_fd(display: *mut WLDisplay) -> c_int;\n\n pub fn wl_display_dispatch(display: *mut WLDisplay) -> c_int;\n", "file_path": "src/ffi/wayland.rs", "rank": 80, "score": 37626.79225089854 }, { "content": "#[repr(C)]\n\npub struct WLArray;\n\n\n\n#[repr(C)]\n\npub struct WLProxy;\n\n\n\n#[repr(C)]\n\npub struct WLDisplay;\n\n\n\n#[repr(C)]\n\npub struct WLEventQueue;\n\n\n\n#[repr(C)]\n\npub struct WLObject;\n\n\n\n#[repr(C)]\n\npub struct WLArgument {\n\n data: u64,\n\n}\n\n\n", "file_path": "src/ffi/wayland.rs", "rank": 81, "score": 37625.61110991797 }, { "content": " pub fn wl_display_dispatch_queue(display: *mut WLDisplay,\n\n queue: *mut WLEventQueue) -> c_int;\n\n pub fn wl_display_dispatch_queue_pending(display: *mut WLDisplay,\n\n queue: *mut WLEventQueue) -> c_int;\n\n pub fn wl_display_dispatch_pending(display: *mut WLDisplay) -> c_int;\n\n pub fn wl_display_get_error(display: *mut WLDisplay) -> c_int;\n\n pub fn wl_display_get_protocol_error(display: *mut WLDisplay,\n\n interface: *mut *mut WLInterface,\n\n id: *mut uint32_t) -> uint32_t;\n\n\n\n pub fn wl_display_flush(display: *mut WLDisplay) -> c_int;\n\n pub fn wl_display_roundtrip_queue(display: *mut WLDisplay,\n\n queue: *mut WLEventQueue) -> c_int;\n\n pub fn wl_display_roundtrip(display: *mut WLDisplay) -> c_int;\n\n pub fn wl_display_create_queue(display: *mut WLDisplay) -> *mut WLEventQueue;\n\n\n\n pub fn wl_display_prepare_read_queue(display: *mut WLDisplay,\n\n queue: *mut WLEventQueue) -> c_int;\n\n pub fn wl_display_prepare_read(display: *mut WLDisplay) -> c_int;\n\n pub fn wl_display_cancel_read(display: *mut WLDisplay);\n\n pub fn wl_display_read_events(display: *mut WLDisplay) -> c_int;\n\n\n\n pub fn wl_log_set_handler_client(handler: wl_log_func_t);\n\n}\n", "file_path": "src/ffi/wayland.rs", "rank": 82, "score": 37625.29846742798 }, { "content": "// Copyright (c) <2015> <lummax>\n\n// Licensed under MIT (http://opensource.org/licenses/MIT)\n\n\n\npub mod wayland;\n", "file_path": "src/ffi/mod.rs", "rank": 83, "score": 37624.47524047041 }, { "content": "def parse_enum(node):\n\n return Enum(node.get('name', ''),\n\n parse_description(node.find('description')),\n", "file_path": "tools/scanner/parser.py", "rank": 84, "score": 29166.280406085174 }, { "content": "def context_enum(node, interface):\n\n return Enum(name=classify_name(interface) + classify_name(node.name),\n\n wl_name=de_keyword_ize(node.name),\n\n description=node.description,\n", "file_path": "tools/scanner/context.py", "rank": 85, "score": 29166.280406085174 }, { "content": "def generate_enum(enum):\n\n entries = list({'entry_description': process_description(entry.summary),\n\n 'entry_name': entry.name,\n\n 'wl_name': entry.wl_name.lstrip('_'),\n\n 'entry_value': entry.value} for entry in enum.entries)\n\n if len(enum.entries) == 1:\n\n entries.append({\n\n 'entry_description': 'Needed due to a bug in rustc',\n\n 'entry_name': '_Dummy',\n\n })\n\n data = {\n\n 'description': process_description(enum.description),\n\n 'is_pub': True,\n\n 'name': enum.name,\n\n 'entries': entries,\n\n }\n", "file_path": "tools/scanner/generator/client.py", "rank": 86, "score": 28296.186678467722 }, { "content": "def generate_event_request_enum(name, items):\n\n if not items: return ''\n\n entries = list({'entry_name': item.name,\n\n 'entry_value': item.opcode} for item in items)\n\n if len(items) == 1:\n\n entries.append({\n\n 'entry_name': '_Dummy',\n\n })\n\n data = {\n\n 'name': name,\n\n 'entries': entries,\n\n }\n", "file_path": "tools/scanner/generator/client.py", "rank": 87, "score": 26702.971002389768 }, { "content": "extern {\n\n static wl_pointer_interface: ffi::wayland::WLInterface;\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum PointerError {\n\n /// given wl_surface has another role\n\n Role = 0,\n\n \n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for PointerError {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(PointerError::Role),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/pointer.rs", "rank": 90, "score": 35.67636539138801 }, { "content": "extern {\n\n static wl_subcompositor_interface: ffi::wayland::WLInterface;\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum SubcompositorError {\n\n /// the to-be sub-surface is invalid\n\n BadSurface = 0,\n\n \n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for SubcompositorError {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SubcompositorError::BadSurface),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/subcompositor.rs", "rank": 91, "score": 35.67636539138802 }, { "content": "extern {\n\n static wl_shell_interface: ffi::wayland::WLInterface;\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum ShellError {\n\n /// given wl_surface has another role\n\n Role = 0,\n\n \n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for ShellError {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(ShellError::Role),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/shell.rs", "rank": 92, "score": 35.67636539138802 }, { "content": "extern {\n\n static wl_subsurface_interface: ffi::wayland::WLInterface;\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum SubsurfaceError {\n\n /// wl_surface is not a sibling or the parent\n\n BadSurface = 0,\n\n \n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for SubsurfaceError {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SubsurfaceError::BadSurface),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/subsurface.rs", "rank": 93, "score": 35.46309556238052 }, { "content": "extern {\n\n static wl_data_device_interface: ffi::wayland::WLInterface;\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum DataDeviceError {\n\n /// given wl_surface has another role\n\n Role = 0,\n\n \n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for DataDeviceError {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(DataDeviceError::Role),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/data_device.rs", "rank": 94, "score": 34.84003900605661 }, { "content": "extern {\n\n static wl_surface_interface: ffi::wayland::WLInterface;\n\n}\n\n\n\n/// These errors can be emitted in response to wl_surface requests.\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum SurfaceError {\n\n /// buffer scale value is invalid\n\n InvalidScale = 0,\n\n /// buffer transform value is invalid\n\n InvalidTransform = 1,\n\n}\n\n\n\nimpl FromPrimitive for SurfaceError {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(SurfaceError::InvalidScale),\n\n 1 => Some(SurfaceError::InvalidTransform),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/surface.rs", "rank": 95, "score": 32.39888320353717 }, { "content": " }\n\n}\n\n\n\n\n\n/// Describes the physical state of a button which provoked the button\n\n/// event.\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum PointerButtonState {\n\n /// The button is not pressed\n\n Released = 0,\n\n /// The button is pressed\n\n Pressed = 1,\n\n}\n\n\n\nimpl FromPrimitive for PointerButtonState {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(PointerButtonState::Released),\n\n 1 => Some(PointerButtonState::Pressed),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/pointer.rs", "rank": 96, "score": 31.56897075475823 }, { "content": "extern {\n\n static wl_output_interface: ffi::wayland::WLInterface;\n\n}\n\n\n\n/// This enumeration describes how the physical\n\n/// pixels on an output are layed out.\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum OutputSubpixel {\n\n Unknown = 0,\n\n None = 1,\n\n HorizontalRgb = 2,\n\n HorizontalBgr = 3,\n\n VerticalRgb = 4,\n\n VerticalBgr = 5,\n\n}\n\n\n\nimpl FromPrimitive for OutputSubpixel {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n", "file_path": "src/client/protocol/output.rs", "rank": 97, "score": 29.867935889220668 }, { "content": " }\n\n}\n\n\n\n\n\n/// Describes the axis types of scroll events.\n\n#[repr(C)]\n\n#[derive(Debug)]\n\npub enum PointerAxis {\n\n VerticalScroll = 0,\n\n HorizontalScroll = 1,\n\n}\n\n\n\nimpl FromPrimitive for PointerAxis {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0 => Some(PointerAxis::VerticalScroll),\n\n 1 => Some(PointerAxis::HorizontalScroll),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/pointer.rs", "rank": 98, "score": 28.04917815401751 }, { "content": " \n\n _Dummy,\n\n}\n\n\n\nimpl FromPrimitive for ShellSurfaceTransient {\n\n fn from_u32(num: u32) -> Option<Self> {\n\n return match num {\n\n 0x1 => Some(ShellSurfaceTransient::Inactive),\n\n _ => None\n\n }\n\n }\n\n\n\n fn from_i32(num: i32) -> Option<Self> {\n\n return Self::from_u32(num as u32);\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol/shell_surface.rs", "rank": 99, "score": 27.750767827297274 } ]
Rust
src/lib.rs
norse-rs/norse-billow
cfef6391371ee44137a977a44b564d5920fd2a06
/*! Allocator for SoA data layout. `billow` allows to define a [`BlockLayout`](struct.BlockLayout.html) which encodes a SoA data layout. This layout can be used to subdivide user allocated memory blocks in a tight and aligned fashion. ## Struct of Arrays Struct of Arrays (SoA) describes a deinterleaved memory layout of struct fields. Each array has the same number of elements. This layout is usually better suited for SIMD operations, ```ignore +-----+-----+-----+----- | A | A | A | ... +-----+-----+-----+----- +-------+-------+-------+----- | B | B | B | ... +-------+-------+-------+----- +---+---+---+----- | C | C | C | ... +---+---+---+----- ``` ## Examples Allocating an aligned memory block from the system allocator and define a layout for the following struct in SoA layout: ```rust type Transform = [[f32; 4]; 4]; type Velocity = [f32; 3]; struct Block<'a> { transforms: &'a mut [Transform], velocity: &'a mut [Velocity], } ``` ```rust # use norse_billow as billow; # use std::alloc::{self, Layout, LayoutErr}; # use std::ptr::NonNull; # type Transform = [[f32; 4]; 4]; # type Velocity = [f32; 3]; # fn main() -> Result<(), LayoutErr> { const NUM_ELEMENTS: usize = 128; // Define SoA layout. let mut layout = billow::BlockLayout::build(); let transform_id = layout.add::<Transform>(); let velocity_id = layout.add::<Velocity>(); let block_layout = layout.finish(); // Allocate memory block for holding the elements. let layout = block_layout.layout(); let size = layout.size() * NUM_ELEMENTS; let memory = unsafe { alloc::alloc(Layout::from_size_align(size, layout.align())?) }; let block = block_layout.apply(NonNull::new(memory).unwrap(), layout.size() * 128); assert_eq!(block.len(), NUM_ELEMENTS); // Get struct fields. let transforms = unsafe { block.as_slice::<Transform>(transform_id) }; let velocities = unsafe { block.as_slice::<Velocity>(velocity_id) }; assert_eq!(transforms.len(), velocities.len()); # Ok(()) # } ``` */ use indexmap::IndexMap; use std::alloc::Layout; use std::ops::Range; use std::ptr::NonNull; use std::slice; pub type LayoutSlot = usize; pub struct LayoutBuilder { layouts: Vec<(LayoutSlot, Layout)>, max_alignment: usize, element_size: usize, } impl LayoutBuilder { pub fn add<T>(&mut self) -> LayoutSlot { let layout = Layout::new::<T>(); self.max_alignment = self.max_alignment.max(layout.align()); self.element_size += layout.size(); let slot = self.layouts.len(); self.layouts.push((slot, layout)); slot } pub fn finish(mut self) -> BlockLayout { self.layouts .sort_by(|(slot_a, layout_a), (slot_b, layout_b)| { layout_a .align() .cmp(&layout_b.align()) .reverse() .then(slot_a.cmp(slot_b)) }); &self.layouts; let slot_map = self .layouts .iter() .enumerate() .map(|(i, (slot, _))| (*slot, i)) .collect(); &slot_map; let sub_layouts = self.layouts.into_iter().map(|(_, layout)| layout).collect(); let layout = Layout::from_size_align(self.element_size, self.max_alignment).unwrap(); BlockLayout { slot_map, layout, sub_layouts, } } } pub struct BlockLayout { slot_map: IndexMap<LayoutSlot, usize>, layout: Layout, sub_layouts: Vec<Layout>, } impl BlockLayout { pub fn build() -> LayoutBuilder { LayoutBuilder { layouts: Vec::new(), max_alignment: 1, element_size: 0, } } pub fn layout(&self) -> Layout { self.layout } pub fn apply(&self, data: NonNull<u8>, size: usize) -> Block { if self.sub_layouts.is_empty() { return Block { range: 0..0, len: 0, slices: Vec::new(), }; } assert_eq!(self.layout.align() & (self.layout.align() - 1), 0); let ptr = data.as_ptr(); let start = (ptr as usize + self.layout.align() - 1) & !(self.layout.align() - 1); let end = (ptr as usize + size) & !(self.layout.align() - 1); let initial_offset = start - ptr as usize; let size_aligned = end - start; let len = if self.layout.size() == 0 { !0 } else { size_aligned / self.layout.size() }; let mut offset = 0; let mut offsets = Vec::with_capacity(self.sub_layouts.len()); for layout in &self.sub_layouts { assert_eq!(offset % layout.align(), 0); offsets.push(offset); offset += layout.size() * len; } let mut slices = Vec::with_capacity(self.sub_layouts.len()); for slot in self.slot_map.values() { let offset = offsets[*slot]; slices.push(NonNull::new(unsafe { (start as *mut u8).offset(offset as _) }).unwrap()); } Block { range: initial_offset..initial_offset + size_aligned, len, slices, } } } pub struct Block { range: Range<usize>, len: usize, slices: Vec<NonNull<u8>>, } impl Block { pub fn range(&self) -> Range<usize> { self.range.clone() } pub fn len(&self) -> usize { self.len } pub unsafe fn as_raw<T>(&self, slot: LayoutSlot) -> (*mut T, usize) { let slice = &self.slices[slot]; (slice.cast::<T>().as_ptr(), self.len) } pub unsafe fn as_slice<T: Copy>(&self, slot: LayoutSlot) -> &mut [T] { let slice = &self.slices[slot]; slice::from_raw_parts_mut(slice.cast::<T>().as_ptr(), self.len) } } #[cfg(test)] mod test { use super::*; #[test] fn empty() { let layout = BlockLayout::build().finish(); let mut block = [0; 32]; layout.apply(NonNull::new(block.as_mut_ptr()).unwrap(), 32); } #[test] fn single_zst() { struct Foo; let (layout, foo) = { let mut layout = BlockLayout::build(); let foo = layout.add::<Foo>(); (layout.finish(), foo) }; let mut data = [0; 32]; let block = layout.apply(NonNull::new(data.as_mut_ptr()).unwrap(), 32); unsafe { block.as_raw::<Foo>(foo); } } #[test] fn ordering() { #[derive(Copy, Clone)] struct Small { _a: u8, _b: u8, _c: u8, } #[derive(Copy, Clone)] struct Large { _a: f32, _b: [u64; 8], } let (layout, small, large) = { let mut layout = BlockLayout::build(); let small = layout.add::<Small>(); let large = layout.add::<Large>(); (layout.finish(), small, large) }; let mut data = [0; 512]; let block = layout.apply(NonNull::new(data.as_mut_ptr()).unwrap(), 512); let small_layout = Layout::new::<Small>(); let large_layout = Layout::new::<Large>(); assert_eq!( layout.layout().align(), small_layout.align().max(large_layout.align()) ); assert_eq!( layout.layout().size(), small_layout.size() + large_layout.size() ); unsafe { block.as_slice::<Small>(small); block.as_slice::<Large>(large); } } }
/*! Allocator for SoA data layout. `billow` allows to define a [`BlockLayout`](struct.BlockLayout.html) which encodes a SoA data layout. This layout can be used to subdivide user allocated memory blocks in a tight and aligned fashion. ## Struct of Arrays Struct of Arrays (SoA) describes a deinterleaved memory layout of struct fields. Each array has the same number of elements. This layout is usually better suited for SIMD operations, ```ignore +-----+-----+-----+----- | A | A | A | ... +-----+-----+-----+----- +-------+-------+-------+----- | B | B | B | ... +-------+-------+-------+----- +---+---+---+----- | C | C | C | ... +---+---+---+----- ``` ## Examples Allocating an aligned memory block from the system allocator and define a layout for the following struct in SoA layout: ```rust type Transform = [[f32; 4]; 4]; type Velocity = [f32; 3]; struct Block<'a> { transforms: &'a mut [Transform], velocity: &'a mut [Velocity], } ``` ```rust # use norse_billow as billow; # use std::alloc::{self, Layout, LayoutErr}; # use std::ptr::NonNull; # type Transform = [[f32; 4]; 4]; # type Velocity = [f32; 3]; # fn main() -> Result<(), LayoutErr> { const NUM_ELEMENTS: usize = 128; // Define SoA layout. let mut layout = billow::BlockLayout::build(); let transform_id = layout.add::<Transform>(); let velocity_id = layout.add::<Velocity>(); let block_layout = layout.finish(); // Allocate memory block for holding the elements. let layout = block_layout.layout(); let size = layout.size() * NUM_ELEMENTS; let memory = unsafe { alloc::alloc(Layout::from_size_align(size, layout.align())?) }; let block = block_layout.apply(NonNull::new(memory).unwrap(), layout.size() * 128); assert_eq!(block.len(), NUM_ELEMENTS); // Get struct fields. let transforms = unsafe { block.as_slice::<Transform>(transform_id) }; let velocities = unsafe { block.as_slice::<Velocity>(velocity_id) }; assert_eq!(transforms.len(), velocities.len()); # Ok(()) # } ``` */ use indexmap::IndexMap; use std::alloc::Layout; use std::ops::Range; use std::ptr::NonNull; use std::slice; pub type LayoutSlot = usize; pub struct LayoutBuilder { layouts: Vec<(LayoutSlot, Layout)>, max_alignment: usize, element_size: usize, } impl LayoutBuilder { pub fn add<T>(&mut self) -> LayoutSlot { let layout = Layout::new::<T>(); self.max_alignment = self.max_alignment.max(layout.align()); self.element_size += layout.size(); let slot = self.layouts.len(); self.layouts.push((slot, layout)); slot } pub fn finish(mut self) -> BlockLayout { self.layouts .sort_by(|(slot_a, layout_a), (slot_b, layout_b)| { layout_a .align() .cmp(&layout_b.align()) .reverse() .then(slot_a.cmp(slot_b)) }); &self.layouts; let slot_map = self .layouts .iter() .enumerate() .map(|(i, (slot, _))| (*slot, i)) .collect(); &slot_map; let sub_layouts = self.layouts.into_iter().map(|(_, layout)| layout).collect(); let layout = Layout::from_size_align(self.element_size, self.max_alignment).unwrap(); BlockLayout { slot_map, layout, sub_layouts, } } } pub struct BlockLayout { slot_map: IndexMap<LayoutSlot, usize>, layout: Layout, sub_layouts: Vec<Layout>, } impl BlockLayout { pub fn build() -> LayoutBuilder { LayoutBuilder { layouts: Vec::new(), max_alignment: 1, element_size: 0, } } pub fn layout(&self) -> Layout { self.layout } pub fn apply(&self, data: NonNull<u8>, size: usize) -> Block { if self.sub_layouts.is_empty() { return Block { range: 0..0, len: 0, slices: Vec::new(), }; } assert_eq!(self.layout.align() & (self.layout.align() - 1), 0); let ptr = data.as_ptr(); let start = (ptr as usize + self.layout.align() - 1) & !(self.layout.align() - 1); let end = (ptr as usize + size) & !(self.layout.align() - 1); let initial_offset = start - ptr as usize; let size_aligned = end - start; let len = if self.layout.size() == 0 { !0 } else { size_aligned / self.layout.size() }; let mut offset = 0; let mut offsets = Vec::with_capacity(self.sub_layouts.le
} pub struct Block { range: Range<usize>, len: usize, slices: Vec<NonNull<u8>>, } impl Block { pub fn range(&self) -> Range<usize> { self.range.clone() } pub fn len(&self) -> usize { self.len } pub unsafe fn as_raw<T>(&self, slot: LayoutSlot) -> (*mut T, usize) { let slice = &self.slices[slot]; (slice.cast::<T>().as_ptr(), self.len) } pub unsafe fn as_slice<T: Copy>(&self, slot: LayoutSlot) -> &mut [T] { let slice = &self.slices[slot]; slice::from_raw_parts_mut(slice.cast::<T>().as_ptr(), self.len) } } #[cfg(test)] mod test { use super::*; #[test] fn empty() { let layout = BlockLayout::build().finish(); let mut block = [0; 32]; layout.apply(NonNull::new(block.as_mut_ptr()).unwrap(), 32); } #[test] fn single_zst() { struct Foo; let (layout, foo) = { let mut layout = BlockLayout::build(); let foo = layout.add::<Foo>(); (layout.finish(), foo) }; let mut data = [0; 32]; let block = layout.apply(NonNull::new(data.as_mut_ptr()).unwrap(), 32); unsafe { block.as_raw::<Foo>(foo); } } #[test] fn ordering() { #[derive(Copy, Clone)] struct Small { _a: u8, _b: u8, _c: u8, } #[derive(Copy, Clone)] struct Large { _a: f32, _b: [u64; 8], } let (layout, small, large) = { let mut layout = BlockLayout::build(); let small = layout.add::<Small>(); let large = layout.add::<Large>(); (layout.finish(), small, large) }; let mut data = [0; 512]; let block = layout.apply(NonNull::new(data.as_mut_ptr()).unwrap(), 512); let small_layout = Layout::new::<Small>(); let large_layout = Layout::new::<Large>(); assert_eq!( layout.layout().align(), small_layout.align().max(large_layout.align()) ); assert_eq!( layout.layout().size(), small_layout.size() + large_layout.size() ); unsafe { block.as_slice::<Small>(small); block.as_slice::<Large>(large); } } }
n()); for layout in &self.sub_layouts { assert_eq!(offset % layout.align(), 0); offsets.push(offset); offset += layout.size() * len; } let mut slices = Vec::with_capacity(self.sub_layouts.len()); for slot in self.slot_map.values() { let offset = offsets[*slot]; slices.push(NonNull::new(unsafe { (start as *mut u8).offset(offset as _) }).unwrap()); } Block { range: initial_offset..initial_offset + size_aligned, len, slices, } }
function_block-function_prefixed
[ { "content": "\n\n<h1 align=\"center\">billow</h1>\n\n<p align=\"center\">\n\n <a href=\"https://github.com/norse-rs\">\n\n <img src=\"https://img.shields.io/badge/project-norse-9cf.svg?style=flat-square\" alt=\"NORSE\">\n\n </a>\n\n <a href=\"LICENSE-MIT\">\n\n <img src=\"https://img.shields.io/badge/license-MIT-green.svg?style=flat-square\" alt=\"License - MIT\">\n\n </a>\n\n <a href=\"LICENSE-APACHE\">\n\n <img src=\"https://img.shields.io/badge/license-APACHE2-green.svg?style=flat-square\" alt=\"License - Apache2\">\n\n </a>\n\n</p>\n\n\n\n`billow` is an utility library for suballocating memory blocks in cache-friendly way for SoA data structures.\n\n\n\n```toml\n\n[dependencies]\n\nnorse-billow = \"0.1\"\n\n```\n\n\n\n## Usage\n\n\n\n```rust\n\nconst NUM_ELEMENTS: usize = 128;\n\n\n\ntype Transform = [[f32; 4]; 4];\n\ntype Velocity = [f32; 3];\n\n\n\n// Build layout for SoA:\n\n//\n\n// struct Block {\n\n// transforms: &mut [Transform],\n\n// velocity: &mut [Velocity],\n\n// }\n\nlet mut layout = billow::BlockLayout::build();\n\nlet transform_id = layout.add::<Transform>();\n\nlet velocity_id = layout.add::<Velocity>();\n\nlet block_layout = layout.finish();\n\n\n\n// Allocate memory block for holding `NUM_ELEMENTS` elements.\n\nlet layout = block_layout.layout();\n\nlet size = layout.size() * NUM_ELEMENTS;\n\nlet memory = unsafe {\n\n alloc::alloc(Layout::from_size_align(\n\n size, layout.align()\n\n )?)\n\n};\n\n\n\nlet block = block_layout.apply(NonNull::new(memory).unwrap(), layout.size() * 128);\n\nassert_eq!(block.len(), NUM_ELEMENTS);\n\n\n\nlet transforms = unsafe { block.as_slice::<Transform>(transform_id) };\n\nlet velocities = unsafe { block.as_slice::<Velocity>(velocity_id) };\n\n\n\nassert_eq!(transforms.len(), velocities.len());\n\n```\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any Contribution intentionally submitted for inclusion in this crate by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 3, "score": 7185.138345561391 } ]
Rust
noria-server/dataflow/src/state/mk_key.rs
JustusAdam/noria
093fed9d7fec410a3f1876870ab39455d8056b78
use prelude::*; pub(super) trait MakeKey<A> { fn from_row(key: &[usize], row: &[A]) -> Self; fn from_key(key: &[A]) -> Self; } impl<A: Clone> MakeKey<A> for (A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 2); (row[key[0]].clone(), row[key[1]].clone()) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 2); (key[0].clone(), key[1].clone()) } } impl<A: Clone> MakeKey<A> for (A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 3); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 3); (key[0].clone(), key[1].clone(), key[2].clone()) } } impl<A: Clone> MakeKey<A> for (A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 4); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 4); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), ) } } impl<A: Clone> MakeKey<A> for (A, A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 5); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), row[key[4]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 5); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), key[4].clone(), ) } } impl<A: Clone> MakeKey<A> for (A, A, A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 6); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), row[key[4]].clone(), row[key[5]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 6); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), key[4].clone(), key[5].clone(), ) } } #[inline(always)] pub fn key_type_from_row<'a>(key: &[usize], row: &'a[DataType]) -> KeyType<'a> { match key.len() { 1 => KeyType::Single(&row[0]), 2 => KeyType::Double(MakeKey::from_row(key, row)), 3 => KeyType::Tri(MakeKey::from_row(key, row)), 4 => KeyType::Quad(MakeKey::from_row(key, row)), 5 => KeyType::Quin(MakeKey::from_row(key, row)), 6 => KeyType::Sex(MakeKey::from_row(key, row)), s => panic!("No state key implemented for keys of size {}", s), } } #[inline(always)] pub fn key_type_from_key<'a>(key: &'a [DataType]) -> KeyType<'a> { match key.len() { 1 => KeyType::Single(&key[0]), 2 => KeyType::Double(MakeKey::from_key(key)), 3 => KeyType::Tri(MakeKey::from_key(key)), 4 => KeyType::Quad(MakeKey::from_key(key)), 5 => KeyType::Quin(MakeKey::from_key(key)), 6 => KeyType::Sex(MakeKey::from_key(key)), s => panic!("No state key implemented for keys of size {}", s), } }
use prelude::*; pub(super) trait MakeKey<A> { fn from_row(key: &[usize], row: &[A]) -> Self; fn from_key(key: &[A]) -> Self; } impl<A: Clone> MakeKey<A> for (A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 2); (row[key[0]].clone(), row[key[1]].clone()) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 2); (key[0].clone(), key[1].clone()) } } impl<A: Clone> MakeKey<A> for (A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 3); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 3); (key[0].clone(), key[1].clone(), key[2].clone()) } } impl<A: Clone> MakeKey<A> for (A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 4); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 4); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), ) } } impl<A: Clone> MakeKey<A> for (A, A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 5); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), row[key[4]].clone(), ) } #[inline(always)]
} impl<A: Clone> MakeKey<A> for (A, A, A, A, A, A) { #[inline(always)] fn from_row(key: &[usize], row: &[A]) -> Self { debug_assert_eq!(key.len(), 6); ( row[key[0]].clone(), row[key[1]].clone(), row[key[2]].clone(), row[key[3]].clone(), row[key[4]].clone(), row[key[5]].clone(), ) } #[inline(always)] fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 6); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), key[4].clone(), key[5].clone(), ) } } #[inline(always)] pub fn key_type_from_row<'a>(key: &[usize], row: &'a[DataType]) -> KeyType<'a> { match key.len() { 1 => KeyType::Single(&row[0]), 2 => KeyType::Double(MakeKey::from_row(key, row)), 3 => KeyType::Tri(MakeKey::from_row(key, row)), 4 => KeyType::Quad(MakeKey::from_row(key, row)), 5 => KeyType::Quin(MakeKey::from_row(key, row)), 6 => KeyType::Sex(MakeKey::from_row(key, row)), s => panic!("No state key implemented for keys of size {}", s), } } #[inline(always)] pub fn key_type_from_key<'a>(key: &'a [DataType]) -> KeyType<'a> { match key.len() { 1 => KeyType::Single(&key[0]), 2 => KeyType::Double(MakeKey::from_key(key)), 3 => KeyType::Tri(MakeKey::from_key(key)), 4 => KeyType::Quad(MakeKey::from_key(key)), 5 => KeyType::Quin(MakeKey::from_key(key)), 6 => KeyType::Sex(MakeKey::from_key(key)), s => panic!("No state key implemented for keys of size {}", s), } }
fn from_key(key: &[A]) -> Self { debug_assert_eq!(key.len(), 5); ( key[0].clone(), key[1].clone(), key[2].clone(), key[3].clone(), key[4].clone(), ) }
function_block-full_function
[ { "content": "/// Trait for implementing operations that collapse a group of records into a single record.\n\n///\n\n/// Implementors of this trait can be used as nodes in a `flow::FlowGraph` by wrapping them in a\n\n/// `GroupedOperator`.\n\n///\n\n/// At a high level, the operator is expected to work in the following way:\n\n///\n\n/// - if a group has no records, its aggregated value is `GroupedOperation::zero()`\n\n/// - if a group has one record `r`, its aggregated value is\n\n///\n\n/// ```rust,ignore\n\n/// self.succ(self.zero(), vec![self.one(r, true), _])\n\n/// ```\n\n///\n\n/// - if a group has current value `v` (as returned by `GroupedOperation::succ()`), and a set of\n\n/// records `[rs]` arrives for the group, the updated value is\n\n///\n\n/// ```rust,ignore\n\n/// self.succ(v, rs.map(|(r, is_positive, ts)| (self.one(r, is_positive), ts)).collect())\n\n/// ```\n\npub trait GroupedOperation: fmt::Debug + Clone {\n\n /// The type used to represent a single\n\n type Diff: 'static;\n\n\n\n /// Called once before any other methods in this trait are called.\n\n ///\n\n /// Implementors should use this call to initialize any cache state and to pre-compute\n\n /// optimized configuration structures to quickly execute the other trait methods.\n\n ///\n\n /// `parent` is a reference to the single ancestor node of this node in the flow graph.\n\n fn setup(&mut self, parent: &Node);\n\n\n\n /// List the columns used to group records.\n\n ///\n\n /// All records with the same value for the returned columns are assigned to the same group.\n\n fn group_by(&self) -> &[usize];\n\n\n\n /// Extract the aggregation value from a single record.\n\n fn to_diff(&self, record: &[DataType], is_positive: bool) -> Self::Diff;\n\n\n", "file_path": "noria-server/dataflow/src/ops/grouped/mod.rs", "rank": 1, "score": 155588.87140004616 }, { "content": "fn populate_table(backend: &mut Backend, data: &Path, use_txn: bool) -> usize {\n\n use std::str::FromStr;\n\n\n\n let table_name = data.file_stem().unwrap().to_str().unwrap();\n\n let mut putter = backend.g.table(table_name).unwrap().into_sync();\n\n\n\n let f = File::open(data).unwrap();\n\n let mut reader = BufReader::new(f);\n\n\n\n let mut s = String::new();\n\n println!(\"Populating {}...\", table_name);\n\n let start = time::Instant::now();\n\n let mut i = 0;\n\n while reader.read_line(&mut s).unwrap() > 0 {\n\n {\n\n let fields: Vec<&str> = s.split('\\t').map(str::trim).collect();\n\n let rec: Vec<DataType> = fields\n\n .into_iter()\n\n .map(|s| match i64::from_str(s) {\n\n Ok(v) => v.into(),\n", "file_path": "noria-benchmarks/hotsoup/populate.rs", "rank": 2, "score": 152262.87557401173 }, { "content": "pub trait Executor {\n\n fn ack(&mut self, tag: SourceChannelIdentifier);\n\n fn create_universe(&mut self, req: HashMap<String, DataType>);\n\n}\n", "file_path": "noria-server/dataflow/src/prelude.rs", "rank": 3, "score": 140258.4047278748 }, { "content": "#[doc(hidden)]\n\n#[inline]\n\npub fn shard_by(dt: &DataType, shards: usize) -> usize {\n\n match *dt {\n\n DataType::Int(n) => n as usize % shards,\n\n DataType::BigInt(n) => n as usize % shards,\n\n DataType::Text(..) | DataType::TinyText(..) => {\n\n use std::borrow::Cow;\n\n use std::hash::Hasher;\n\n let mut hasher = fnv::FnvHasher::default();\n\n let s: Cow<str> = dt.into();\n\n hasher.write(s.as_bytes());\n\n hasher.finish() as usize % shards\n\n }\n\n // a bit hacky: send all NULL values to the first shard\n\n DataType::None => 0,\n\n ref x => {\n\n unimplemented!(\"asked to shard on value {:?}\", x);\n\n }\n\n }\n\n}\n\n\n", "file_path": "noria/src/lib.rs", "rank": 4, "score": 132623.52933518979 }, { "content": "fn traverse(path: &Path, start: usize, stop: usize) -> HashMap<usize, PathBuf> {\n\n use std::fs;\n\n use std::str::FromStr;\n\n\n\n let mut files = HashMap::new();\n\n for entry in fs::read_dir(path).unwrap() {\n\n let entry = entry.unwrap();\n\n let path = entry.path();\n\n if path.is_file() {\n\n let fname = path.file_name().unwrap().to_str().unwrap();\n\n if fname.starts_with(\"hotcrp_v\") {\n\n let sv = usize::from_str(\n\n &fname[fname.find(\"_\").unwrap() + 2..fname.rfind(\"_\").unwrap()],\n\n ).unwrap();\n\n if sv >= start && sv <= stop {\n\n files.insert(sv, path.clone());\n\n }\n\n }\n\n }\n\n }\n\n files\n\n}\n\n\n", "file_path": "noria-benchmarks/hotsoup/process_paper_queries.rs", "rank": 5, "score": 125017.8570774034 }, { "content": "fn key_val(i: usize, col: usize, r: &TableOperation) -> &DataType {\n\n match *r {\n\n TableOperation::Insert(ref row) => &row[col],\n\n TableOperation::Delete { ref key } => &key[i],\n\n TableOperation::Update { ref key, .. } => &key[i],\n\n TableOperation::InsertOrUpdate { ref row, .. } => &row[col],\n\n }\n\n}\n\n\n", "file_path": "noria-server/dataflow/src/node/special/base.rs", "rank": 6, "score": 123271.84808207148 }, { "content": "fn throughput(ops: usize, took: time::Duration) -> f64 {\n\n ops as f64 / took.as_secs_f64()\n\n}\n\n\n\nconst MAX_BATCH_TIME_US: u32 = 1000;\n\n\n\nmod clients;\n\nuse self::clients::{Parameters, ReadRequest, VoteClient, WriteRequest};\n\n\n", "file_path": "noria-benchmarks/vote/main.rs", "rank": 7, "score": 119706.06519374943 }, { "content": "pub fn populate_countries(backend: &mut Backend, data_location: &str) -> usize {\n\n let f = File::open(format!(\"{}/countries.tsv\", data_location)).unwrap();\n\n let mut reader = BufReader::new(f);\n\n\n\n println!(\"Prepopulating countries...\");\n\n\n\n let mut s = String::new();\n\n let mut records = Vec::new();\n\n while reader.read_line(&mut s).unwrap() > 0 {\n\n {\n\n let fields: Vec<&str> = s.split('\\t').map(str::trim).collect();\n\n let co_id = i32::from_str(fields[0]).unwrap();\n\n let co_name = fields[1];\n\n let co_exchange = f64::from_str(fields[2]).unwrap();\n\n let co_currency = fields[3];\n\n records.push(vec![\n\n co_id.into(),\n\n co_name.into(),\n\n co_exchange.into(),\n\n co_currency.into(),\n\n ]);\n\n }\n\n s.clear();\n\n }\n\n\n\n populate(backend, \"ship_co\", records.clone());\n\n populate(backend, \"bill_co\", records.clone());\n\n populate(backend, \"country\", records)\n\n}\n\n\n", "file_path": "noria-benchmarks/tpc_w/populate.rs", "rank": 8, "score": 112964.20827628291 }, { "content": "pub fn populate_customers(backend: &mut Backend, data_location: &str) -> usize {\n\n let f = File::open(format!(\"{}/customers.tsv\", data_location)).unwrap();\n\n let mut reader = BufReader::new(f);\n\n\n\n println!(\"Prepopulating customers...\");\n\n\n\n let mut s = String::new();\n\n let mut records = Vec::new();\n\n while reader.read_line(&mut s).unwrap() > 0 {\n\n {\n\n let fields: Vec<&str> = s.split('\\t').map(str::trim).collect();\n\n let c_id = i32::from_str(fields[0]).unwrap();\n\n let c_uname = fields[1];\n\n let c_passwd = fields[2];\n\n let c_fname = fields[3];\n\n let c_lname = fields[4];\n\n let c_addr_id = i32::from_str(fields[5]).unwrap();\n\n let c_phone = fields[6];\n\n let c_email = fields[7];\n\n let c_since = parse_ymd_to_timestamp(fields[8]);\n", "file_path": "noria-benchmarks/tpc_w/populate.rs", "rank": 9, "score": 112964.20827628291 }, { "content": "pub fn populate_orders(backend: &mut Backend, data_location: &str) -> usize {\n\n let f = File::open(format!(\"{}/orders.tsv\", data_location)).unwrap();\n\n let mut reader = BufReader::new(f);\n\n\n\n println!(\"Prepopulating orders...\");\n\n\n\n let mut s = String::new();\n\n let mut records = Vec::new();\n\n while reader.read_line(&mut s).unwrap() > 0 {\n\n {\n\n let fields: Vec<&str> = s.split('\\t').map(str::trim).collect();\n\n let o_id = i32::from_str(fields[0]).unwrap();\n\n let o_c_id = i32::from_str(fields[1]).unwrap();\n\n let o_date = NaiveDateTime::parse_from_str(fields[2], \"'%Y-%m-%d %H:%M:%S'\")\n\n .unwrap()\n\n .timestamp();\n\n let o_sub_total = f64::from_str(fields[3]).unwrap();\n\n let o_tax = f64::from_str(fields[4]).unwrap();\n\n let o_total = f64::from_str(fields[5]).unwrap();\n\n let o_ship_type = fields[6];\n", "file_path": "noria-benchmarks/tpc_w/populate.rs", "rank": 10, "score": 112964.20827628291 }, { "content": "pub fn populate_addresses(backend: &mut Backend, data_location: &str) -> usize {\n\n let f = File::open(format!(\"{}/addresses.tsv\", data_location)).unwrap();\n\n let mut reader = BufReader::new(f);\n\n\n\n println!(\"Prepopulating addresses...\");\n\n\n\n let mut s = String::new();\n\n let mut records = Vec::new();\n\n while reader.read_line(&mut s).unwrap() > 0 {\n\n {\n\n let fields: Vec<&str> = s.split('\\t').map(str::trim).collect();\n\n let addr_id = i32::from_str(fields[0]).unwrap();\n\n let addr_street1 = fields[1];\n\n let addr_street2 = fields[2];\n\n let addr_city = fields[3];\n\n let addr_state = fields[4];\n\n let addr_zip = fields[5];\n\n let addr_co_id = i32::from_str(fields[6]).unwrap();\n\n records.push(vec![\n\n addr_id.into(),\n", "file_path": "noria-benchmarks/tpc_w/populate.rs", "rank": 11, "score": 112964.20827628291 }, { "content": "fn populate(g: &mut SyncHandle<ZookeeperAuthority>, rows: i64, skewed: bool) {\n\n let mut mutator = g.table(\"TableRow\").unwrap().into_sync();\n\n\n\n (0..rows)\n\n .map(|i| {\n\n let row: Vec<DataType> = if skewed {\n\n let mut row = vec![SKEWED_KEY.into(); 10];\n\n row[0] = i.into();\n\n row\n\n } else {\n\n vec![i.into(); 10]\n\n };\n\n\n\n row\n\n })\n\n .chunks(BATCH_SIZE)\n\n .into_iter()\n\n .for_each(|chunk| {\n\n let rs: Vec<Vec<DataType>> = chunk.collect();\n\n mutator.perform_all(rs).unwrap();\n\n });\n\n}\n\n\n", "file_path": "noria-benchmarks/replay/main.rs", "rank": 12, "score": 111762.34348543893 }, { "content": "pub fn populate_cc_xacts(backend: &mut Backend, data_location: &str) -> usize {\n\n let f = File::open(format!(\"{}/cc_xacts.data\", data_location)).unwrap();\n\n let mut reader = BufReader::new(f);\n\n\n\n println!(\"Prepopulating cc_xacts...\");\n\n\n\n let mut s = String::new();\n\n let mut records = Vec::new();\n\n while reader.read_line(&mut s).unwrap() > 0 {\n\n {\n\n let fields: Vec<&str> = s.split('\\t').map(str::trim).collect();\n\n let cx_o_id = i32::from_str(fields[0]).unwrap();\n\n let cx_type = fields[1];\n\n let cx_num = fields[2];\n\n let cx_name = fields[3];\n\n let cx_expire = parse_ymd_to_timestamp(fields[4]);\n\n let cx_auth_id = fields[5];\n\n let cx_amt = f64::from_str(fields[6]).unwrap();\n\n let xact_date = NaiveDateTime::parse_from_str(fields[7], \"%Y-%m-%d %H:%M:%S\");\n\n let cx_xact_date = xact_date.unwrap().timestamp();\n", "file_path": "noria-benchmarks/tpc_w/populate.rs", "rank": 13, "score": 111305.63815427195 }, { "content": "fn build(prefix: &str, sharding: Option<usize>, log: bool) -> SyncHandle<LocalAuthority> {\n\n use crate::logger_pls;\n\n let mut builder = Builder::default();\n\n if log {\n\n builder.log_with(logger_pls());\n\n }\n\n builder.set_sharding(sharding);\n\n builder.set_persistence(get_persistence_params(prefix));\n\n builder.start_simple().unwrap()\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 14, "score": 107361.14385551919 }, { "content": "fn key_from_record<'a, R>(key: &[usize], contiguous: bool, record: R) -> Key<'a>\n\nwhere\n\n R: Into<Cow<'a, [DataType]>>,\n\n{\n\n match record.into() {\n\n Cow::Owned(mut record) => {\n\n let mut i = 0;\n\n let mut keep = key.iter().peekable();\n\n record.retain(|_| {\n\n i += 1;\n\n if let Some(&&next) = keep.peek() {\n\n if next != i - 1 {\n\n return false;\n\n }\n\n } else {\n\n return false;\n\n }\n\n\n\n assert_eq!(*keep.next().unwrap(), i - 1);\n\n true\n", "file_path": "noria-server/dataflow/src/backlog/mod.rs", "rank": 15, "score": 106331.82715858253 }, { "content": "pub fn populate(backend: &mut Backend, data_location: &str, use_txn: bool) -> io::Result<()> {\n\n use std::fs;\n\n\n\n let dir = Path::new(data_location);\n\n if dir.is_dir() {\n\n for entry in fs::read_dir(dir)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n if path.is_file() {\n\n populate_table(backend, &path, use_txn);\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "noria-benchmarks/hotsoup/populate.rs", "rank": 16, "score": 104818.28802411188 }, { "content": "pub fn validate(log: &Logger, graph: &Graph, topo_list: &[NodeIndex], sharding_factor: usize) {\n\n // ensure that each node matches the sharding of each of its ancestors, unless the ancestor is\n\n // a sharder or a shard merger\n\n for &node in topo_list {\n\n let n = &graph[node];\n\n if n.is_internal() && n.is_shard_merger() {\n\n // shard mergers legitimately have a different sharding than their ancestors\n\n continue;\n\n }\n\n\n\n let inputs: Vec<_> = graph\n\n .neighbors_directed(node, petgraph::EdgeDirection::Incoming)\n\n .filter(|ni| !graph[*ni].is_source())\n\n .collect();\n\n\n\n let remap = |nd: &Node, pni: NodeIndex, ps: Sharding| -> Sharding {\n\n if nd.is_internal() || nd.is_base() {\n\n if let Sharding::ByColumn(c, shards) = ps {\n\n // remap c according to node's semantics\n\n let src = (0..nd.fields().len()).find(|&col| {\n", "file_path": "noria-server/src/controller/migrate/sharding.rs", "rank": 17, "score": 102918.6985542909 }, { "content": "fn key_of<'a>(key_cols: &'a [usize], r: &'a TableOperation) -> impl Iterator<Item = &'a DataType> {\n\n key_cols\n\n .iter()\n\n .enumerate()\n\n .map(move |(i, col)| key_val(i, *col, r))\n\n}\n\n\n\nimpl Base {\n\n pub(in crate::node) fn take(&mut self) -> Self {\n\n Clone::clone(self)\n\n }\n\n\n\n pub(in crate::node) fn process(\n\n &mut self,\n\n us: LocalNodeIndex,\n\n mut ops: Vec<TableOperation>,\n\n state: &StateMap,\n\n ) -> Records {\n\n if self.primary_key.is_none() || ops.is_empty() {\n\n return ops\n", "file_path": "noria-server/dataflow/src/node/special/base.rs", "rank": 18, "score": 101492.41861434927 }, { "content": "fn populate(backend: &mut Backend, name: &'static str, mut records: Vec<Vec<DataType>>) -> usize {\n\n let mut mutator = backend.g.table(name).unwrap().into_sync();\n\n\n\n let i = records.len();\n\n\n\n let mut do_prepop = move || {\n\n let start = time::Instant::now();\n\n\n\n let i = records.len();\n\n for r in records.drain(..) {\n\n mutator.insert(r).unwrap();\n\n }\n\n\n\n let dur = start.elapsed().as_secs_f64();\n\n println!(\n\n \"Inserted {} {} in {:.2}s ({:.2} PUTs/sec)!\",\n\n i,\n\n name,\n\n dur,\n\n i as f64 / dur\n", "file_path": "noria-benchmarks/tpc_w/populate.rs", "rank": 19, "score": 101492.41861434927 }, { "content": "pub fn assign(log: &Logger, graph: &mut Graph, topo_list: &[NodeIndex], ndomains: &mut usize) {\n\n // we need to walk the data flow graph and assign domains to all new nodes.\n\n // we generally want as few domains as possible, but in *some* cases we must make new ones.\n\n // specifically:\n\n //\n\n // - the child of a Sharder is always in a different domain from the sharder\n\n // - shard merge nodes are never in the same domain as their sharded ancestors\n\n\n\n let mut next_domain = || {\n\n *ndomains += 1;\n\n *ndomains - 1\n\n };\n\n\n\n for &node in topo_list {\n\n #[allow(clippy::cognitive_complexity)]\n\n let assignment = (|| {\n\n let graph = &*graph;\n\n let n = &graph[node];\n\n\n\n if n.is_shard_merger() {\n", "file_path": "noria-server/src/controller/migrate/assignment.rs", "rank": 20, "score": 99565.30359837593 }, { "content": "trait ConvenientSession {\n\n fn exec<'a>(&'a self, cmd: &[&str]) -> Result<ssh2::Channel<'a>, Error>;\n\n fn just_exec(&self, cmd: &[&str]) -> Result<Result<(), String>, Error>;\n\n fn in_noria(&self, cmd: &[&str]) -> Result<Result<(), String>, Error>;\n\n}\n", "file_path": "noria-benchmarks/vote/orchestrator.rs", "rank": 21, "score": 94340.0729629586 }, { "content": "trait ConvenientSession {\n\n fn exec<'a>(&'a self, cmd: &[&str]) -> Result<ssh2::Channel<'a>, Error>;\n\n fn just_exec(&self, cmd: &[&str]) -> Result<Result<String, String>, Error>;\n\n}\n", "file_path": "noria-benchmarks/vote/eintopf.rs", "rank": 22, "score": 94340.0729629586 }, { "content": "trait ConvenientSession {\n\n fn exec<'a>(&'a self, cmd: &[&str]) -> Result<ssh2::Channel<'a>, Error>;\n\n fn just_exec(&self, cmd: &[&str]) -> Result<Result<String, String>, Error>;\n\n}\n", "file_path": "noria-benchmarks/vote/distributed.rs", "rank": 23, "score": 94340.0729629586 }, { "content": "#[test]\n\nfn votes() {\n\n // set up graph\n\n let mut g = start_simple(\"votes\");\n\n let _ = g.migrate(|mig| {\n\n // add article base nodes (we use two so we can exercise unions too)\n\n let article1 = mig.add_base(\"article1\", &[\"id\", \"title\"], Base::default());\n\n let article2 = mig.add_base(\"article2\", &[\"id\", \"title\"], Base::default());\n\n\n\n // add a (stupid) union of article1 + article2\n\n let mut emits = HashMap::new();\n\n emits.insert(article1, vec![0, 1]);\n\n emits.insert(article2, vec![0, 1]);\n\n let u = Union::new(emits);\n\n let article = mig.add_ingredient(\"article\", &[\"id\", \"title\"], u);\n\n mig.maintain_anonymous(article, &[0]);\n\n\n\n // add vote base table\n\n let vote = mig.add_base(\"vote\", &[\"user\", \"id\"], Base::default());\n\n\n\n // add vote count\n", "file_path": "noria-server/src/integration.rs", "rank": 24, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n\n\n let args = App::new(\"vote-distributed\")\n\n .version(\"0.1\")\n\n .about(\"Orchestrate runs of the distributed vote benchmark\")\n\n .arg(\n\n Arg::with_name(\"articles\")\n\n .short(\"a\")\n\n .long(\"articles\")\n\n .value_name(\"N\")\n\n .default_value(\"500000\")\n\n .takes_value(true)\n\n .help(\"Number of articles to prepopulate the database with\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"runtime\")\n\n .short(\"r\")\n\n .long(\"runtime\")\n\n .value_name(\"N\")\n", "file_path": "noria-benchmarks/vote/distributed.rs", "rank": 25, "score": 92593.71902440053 }, { "content": "fn make(\n\n recipe_location: &str,\n\n parallel: bool,\n\n single_query: bool,\n\n disable_partial: bool,\n\n) -> Backend {\n\n use std::fs::File;\n\n use std::io::Read;\n\n\n\n // set up graph\n\n let mut b = Builder::default();\n\n\n\n let main_log = noria::logger_pls();\n\n b.log_with(main_log);\n\n if disable_partial {\n\n b.disable_partial();\n\n }\n\n\n\n let mut g = b.start_simple().unwrap();\n\n\n", "file_path": "noria-benchmarks/tpc_w/tpc_w.rs", "rank": 26, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg, SubCommand};\n\n\n\n let args = App::new(\"vote\")\n\n .version(\"0.1\")\n\n .about(\"Benchmarks user-curated news aggregator throughput for in-memory Soup\")\n\n .arg(\n\n Arg::with_name(\"articles\")\n\n .short(\"a\")\n\n .long(\"articles\")\n\n .value_name(\"N\")\n\n .default_value(\"100000\")\n\n .help(\"Number of articles to prepopulate the database with\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"threads\")\n\n .short(\"t\")\n\n .long(\"threads\")\n\n .value_name(\"N\")\n\n .default_value(\"4\")\n", "file_path": "noria-benchmarks/vote/main.rs", "rank": 27, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n\n\n let args = App::new(\"eintof-distributed\")\n\n .version(\"0.1\")\n\n .about(\"Orchestrate runs of the distributed eintopf benchmark\")\n\n .arg(\n\n Arg::with_name(\"articles\")\n\n .short(\"a\")\n\n .long(\"articles\")\n\n .value_name(\"N\")\n\n .default_value(\"100000\")\n\n .takes_value(true)\n\n .help(\"Number of articles to prepopulate the database with\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"availability_zone\")\n\n .long(\"availability-zone\")\n\n .value_name(\"AZ\")\n\n .default_value(\"us-east-1a\")\n", "file_path": "noria-benchmarks/vote/eintopf.rs", "rank": 28, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n use std::fs::{self, File};\n\n use std::io::Write;\n\n use std::path::PathBuf;\n\n use std::str::FromStr;\n\n\n\n let matches = App::new(\"hotsoup\")\n\n .version(\"0.1\")\n\n .about(\"Soupy conference management system for your HotCRP needs.\")\n\n .arg(\n\n Arg::with_name(\"graphs\")\n\n .short(\"g\")\n\n .value_name(\"DIR\")\n\n .help(\"Directory to dump graphs for each schema version into (if set).\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"populate_from\")\n\n .short(\"p\")\n\n .required(true)\n", "file_path": "noria-benchmarks/hotsoup/hotsoup.rs", "rank": 29, "score": 92593.71902440053 }, { "content": "#[test]\n\nfn albums() {\n\n let mut b = Builder::default();\n\n //b.disable_partial();\n\n b.set_sharding(None);\n\n //b.log_with(crate::logger_pls());\n\n let mut g = b.start_simple().unwrap();\n\n g.install_recipe(\n\n \"CREATE TABLE friend (usera int, userb int);\n\n CREATE TABLE album (a_id text, u_id int, public tinyint(1));\n\n CREATE TABLE photo (p_id text, album text);\",\n\n )\n\n .unwrap();\n\n g.extend_recipe(\"VIEW album_friends: \\\n\n (SELECT album.a_id AS aid, friend.userb AS uid FROM album JOIN friend ON (album.u_id = friend.usera) WHERE album.public = 0) \\\n\n UNION \\\n\n (SELECT album.a_id AS aid, friend.usera AS uid FROM album JOIN friend ON (album.u_id = friend.userb) WHERE album.public = 0) \\\n\n UNION \\\n\n (SELECT album.a_id AS aid, album.u_id AS uid FROM album WHERE album.public = 0);\n\nQUERY private_photos: \\\n", "file_path": "noria-server/src/integration.rs", "rank": 30, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n\n\n let args = App::new(\"piazza-mysql\")\n\n .version(\"0.1\")\n\n .about(\"Benchmarks a forum like application with security policies using MySql\")\n\n .arg(Arg::with_name(\"dbname\").required(true))\n\n .arg(\n\n Arg::with_name(\"nusers\")\n\n .short(\"u\")\n\n .default_value(\"1000\")\n\n .help(\"Number of users in the db\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"nlogged\")\n\n .short(\"l\")\n\n .default_value(\"1000\")\n\n .help(\"Number of logged users\"),\n\n )\n\n .arg(\n", "file_path": "noria-benchmarks/piazza/mysql.rs", "rank": 31, "score": 92593.71902440053 }, { "content": "#[test]\n\nfn tpc_w() {\n\n test_queries(\"tpc-w\", \"tests/tpc-w-queries.txt\", true, true, false);\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 32, "score": 92593.71902440053 }, { "content": "fn main() {\n\n // inline recipe definition\n\n let sql1 = \"Article: CREATE TABLE Article (aid int, title varchar(255), \\\n\n url text, PRIMARY KEY(aid));\";\n\n // two internal views: you cannot query these directly from clients, but you can write\n\n // other view definitions that use them.\n\n let sql2 = \"Vote: CREATE TABLE Vote (aid int, uid int);\";\n\n let sql3 = \"VoteCount: SELECT Vote.aid, COUNT(uid) AS votes \\\n\n FROM Vote GROUP BY Vote.aid;\";\n\n // externally queryable materialized view\n\n let sql4 = \"QUERY ArticleWithVoteCount: \\\n\n SELECT Article.aid, title, url, VoteCount.votes AS votes \\\n\n FROM Article, VoteCount \\\n\n WHERE Article.aid = VoteCount.aid AND Article.aid = ?;\";\n\n\n\n // set up Noria via recipe\n\n let rt = tokio::runtime::Runtime::new().unwrap();\n\n let executor = rt.executor();\n\n let mut db = SyncControllerHandle::from_zk(\"127.0.0.1:2181/basicdist\", executor).unwrap();\n\n db.extend_recipe(sql1).unwrap();\n", "file_path": "noria/examples/quickstart-sync.rs", "rank": 33, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n let args = App::new(\"piazza\")\n\n .version(\"0.1\")\n\n .about(\"Benchmarks Piazza-like application with security policies.\")\n\n .arg(\n\n Arg::with_name(\"schema\")\n\n .short(\"s\")\n\n .required(true)\n\n .default_value(\"benchmarks/piazza/schema.sql\")\n\n .help(\"Schema file for Piazza application\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"queries\")\n\n .short(\"q\")\n\n .required(true)\n\n .default_value(\"benchmarks/piazza/post-queries.sql\")\n\n .help(\"Query file for Piazza application\"),\n\n )\n\n .arg(\n", "file_path": "noria-benchmarks/piazza/piazza.rs", "rank": 34, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n\n\n let args = App::new(\"vote-orchestrator\")\n\n .version(\"0.1\")\n\n .about(\"Orchestrate many runs of the vote benchmark\")\n\n .arg(\n\n Arg::with_name(\"articles\")\n\n .short(\"a\")\n\n .long(\"articles\")\n\n .value_name(\"N\")\n\n .default_value(\"500000\")\n\n .takes_value(true)\n\n .help(\"Number of articles to prepopulate the database with\"),\n\n ).arg(\n\n Arg::with_name(\"availability_zone\")\n\n .long(\"availability-zone\")\n\n .value_name(\"AZ\")\n\n .default_value(\"us-east-1a\")\n\n .takes_value(true)\n", "file_path": "noria-benchmarks/vote/orchestrator.rs", "rank": 35, "score": 92593.71902440053 }, { "content": "fn main() {\n\n // inline recipe definition\n\n let sql = \"# base tables\n\n CREATE TABLE Article (aid int, title varchar(255), \\\n\n url text, PRIMARY KEY(aid));\n\n CREATE TABLE Vote (aid int, uid int);\n\n\n\n # internal view, for shorthand below\n\n VoteCount: SELECT Vote.aid, COUNT(DISTINCT uid) AS votes \\\n\n FROM Vote GROUP BY Vote.aid;\n\n # queryable materialized view\n\n QUERY ArticleWithVoteCount: \\\n\n SELECT Article.aid, title, url, VoteCount.votes AS votes \\\n\n FROM Article, VoteCount \\\n\n WHERE Article.aid = VoteCount.aid AND Article.aid = ?;\";\n\n let aid = 1;\n\n\n\n // NOTE: this will get *so* much nicer with async/await\n\n tokio::run(\n\n ControllerHandle::from_zk(\"127.0.0.1:2181/basicdist\")\n", "file_path": "noria/examples/quickstart-async.rs", "rank": 36, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use crate::populate::*;\n\n use clap::{App, Arg};\n\n\n\n let matches = App::new(\"tpc_w\")\n\n .version(\"0.1\")\n\n .about(\"Soup TPC-W driver.\")\n\n .arg(\n\n Arg::with_name(\"recipe\")\n\n .short(\"r\")\n\n .required(true)\n\n .default_value(\"tests/tpc-w-queries.txt\")\n\n .help(\"Location of the TPC-W recipe file.\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"populate_from\")\n\n .short(\"p\")\n\n .required(true)\n\n .default_value(\"benchmarks/tpc_w/data\")\n\n .help(\"Location of the data files for TPC-W prepopulation.\"),\n", "file_path": "noria-benchmarks/tpc_w/tpc_w.rs", "rank": 37, "score": 92593.71902440053 }, { "content": "// Sleeps for either DEFAULT_SETTLE_TIME_MS milliseconds, or\n\n// for the value given through the SETTLE_TIME environment variable.\n\nfn sleep() {\n\n thread::sleep(get_settle_time());\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 38, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::*;\n\n let app = clap_app!(clickstream =>\n\n (@arg DATAFILE: -f --datafile [FILE] \"Where to read the events from or write them to\")\n\n (@subcommand generate =>\n\n (@arg COUNT: <NUM> \"How many entries to generate\")\n\n )\n\n (@subcommand run =>\n\n )\n\n\n\n );\n\n let matches = app.get_matches();\n\n let f = matches.value_of(\"datafile\").unwrap_or(\"data.csv\");\n\n\n\n use crate::click_event::*;\n\n use std::fs::File;\n\n\n\n match matches.subcommand() {\n\n (\"generate\", Some(gen_opts)) => dump(\n\n &mut File::create(f).unwrap(),\n", "file_path": "noria-benchmarks/clickstream/main.rs", "rank": 39, "score": 92593.71902440053 }, { "content": "fn main() {\n\n let args = App::new(\"replay\")\n\n .version(\"0.1\")\n\n .about(\"Benchmarks the latency of full replays in a user-curated news aggregator\")\n\n .arg(\n\n Arg::with_name(\"rows\")\n\n .long(\"rows\")\n\n .value_name(\"N\")\n\n .default_value(\"100000\")\n\n .help(\"Number of rows to prepopulate the database with.\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"reads\")\n\n .long(\"reads\")\n\n .default_value(\"10000\")\n\n .help(\"Number of rows to read while benchmarking.\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"secondary-indices\")\n\n .long(\"secondary-indices\")\n", "file_path": "noria-benchmarks/replay/main.rs", "rank": 40, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n let matches = App::new(\"noria-server\")\n\n .version(\"0.0.1\")\n\n .arg(\n\n Arg::with_name(\"address\")\n\n .short(\"a\")\n\n .long(\"address\")\n\n .takes_value(true)\n\n .default_value(\"127.0.0.1\")\n\n .help(\"IP address to listen on\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"deployment\")\n\n .long(\"deployment\")\n\n .required(true)\n\n .takes_value(true)\n\n .help(\"Noria deployment ID.\"),\n\n )\n\n .arg(\n", "file_path": "noria-server/src/main.rs", "rank": 41, "score": 92593.71902440053 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n let args = App::new(\"SecureCRP\")\n\n .version(\"0.1\")\n\n .about(\"Benchmarks HotCRP-like application with security policies.\")\n\n .arg(\n\n Arg::with_name(\"schema\")\n\n .short(\"s\")\n\n .required(true)\n\n .default_value(\"benchmarks/securecrp/jeeves_schema.sql\")\n\n .help(\"SQL schema file\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"queries\")\n\n .short(\"q\")\n\n .required(true)\n\n .default_value(\"benchmarks/securecrp/jeeves_queries.sql\")\n\n .help(\"SQL query file\"),\n\n )\n\n .arg(\n", "file_path": "noria-benchmarks/securecrp/main.rs", "rank": 42, "score": 92593.71902440053 }, { "content": "#[test]\n\nfn lobsters() {\n\n test_queries(\"lobsters\", \"tests/lobsters-schema.txt\", false, false, false);\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 43, "score": 92593.71902440053 }, { "content": "/// Enables incorporation of a textual SQL query into a Soup graph.\n\ntrait ToFlowParts {\n\n /// Turn a SQL query into a set of nodes inserted into the Soup graph managed by\n\n /// the `SqlIncorporator` in the second argument. The query can optionally be named by the\n\n /// string in the `Option<String>` in the third argument.\n\n fn to_flow_parts(\n\n &self,\n\n inc: &mut SqlIncorporator,\n\n name: Option<String>,\n\n mig: &mut Migration,\n\n ) -> Result<QueryFlowParts, String>;\n\n}\n\n\n\nimpl<'a> ToFlowParts for &'a String {\n\n fn to_flow_parts(\n\n &self,\n\n inc: &mut SqlIncorporator,\n\n name: Option<String>,\n\n mig: &mut Migration,\n\n ) -> Result<QueryFlowParts, String> {\n\n self.as_str().to_flow_parts(inc, name, mig)\n", "file_path": "noria-server/src/controller/sql/mod.rs", "rank": 44, "score": 92052.50585731074 }, { "content": "pub trait Sender {\n\n type Item;\n\n\n\n fn send(&mut self, t: Self::Item) -> Result<(), tcp::SendError>;\n\n}\n\n\n\nimpl<T> Sender for tokio_sync::mpsc::UnboundedSender<T> {\n\n type Item = T;\n\n\n\n fn send(&mut self, t: Self::Item) -> Result<(), tcp::SendError> {\n\n self.try_send(t).map_err(|_| {\n\n tcp::SendError::IoError(io::Error::new(\n\n io::ErrorKind::BrokenPipe,\n\n \"local peer went away\",\n\n ))\n\n })\n\n }\n\n}\n\n\n\nimpl<T> DomainConnectionBuilder<MaybeLocal, T>\n", "file_path": "noria/src/channel/mod.rs", "rank": 45, "score": 91505.37398855646 }, { "content": "#[test]\n\nfn empty_migration() {\n\n // set up graph\n\n let mut g = start_simple(\"empty_migration\");\n\n g.migrate(|_| {});\n\n\n\n let _ = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::default());\n\n let b = mig.add_base(\"b\", &[\"a\", \"b\"], Base::default());\n\n\n\n let mut emits = HashMap::new();\n\n emits.insert(a, vec![0, 1]);\n\n emits.insert(b, vec![0, 1]);\n\n let u = Union::new(emits);\n\n let c = mig.add_ingredient(\"c\", &[\"a\", \"b\"], u);\n\n mig.maintain_anonymous(c, &[0]);\n\n (a, b, c)\n\n });\n\n\n\n let mut cq = g.view(\"c\").unwrap().into_sync();\n\n let mut muta = g.table(\"a\").unwrap().into_sync();\n", "file_path": "noria-server/src/integration.rs", "rank": 46, "score": 91377.30506336871 }, { "content": "fn listen_internal(\n\n valve: &Valve,\n\n log: slog::Logger,\n\n event_tx: UnboundedSender<Event>,\n\n on: tokio::net::TcpListener,\n\n) -> impl Future<Item = (), Error = ()> {\n\n let valve = valve.clone();\n\n valve\n\n .wrap(on.incoming())\n\n .map_err(failure::Error::from)\n\n .for_each(move |sock| {\n\n tokio::spawn(\n\n valve\n\n .wrap(AsyncBincodeReader::from(sock))\n\n .map(Event::InternalMessage)\n\n .map_err(failure::Error::from)\n\n .forward(\n\n event_tx\n\n .clone()\n\n .sink_map_err(|_| format_err!(\"main event loop went away\")),\n", "file_path": "noria-server/src/startup.rs", "rank": 47, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn simple_migration() {\n\n let id: DataType = 1.into();\n\n\n\n // set up graph\n\n let mut g = start_simple(\"simple_migration\");\n\n let _ = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::default());\n\n mig.maintain_anonymous(a, &[0]);\n\n a\n\n });\n\n\n\n let mut aq = g.view(\"a\").unwrap().into_sync();\n\n let mut muta = g.table(\"a\").unwrap().into_sync();\n\n\n\n // send a value on a\n\n muta.insert(vec![id.clone(), 2.into()]).unwrap();\n\n\n\n // give it some time to propagate\n\n sleep();\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 48, "score": 91377.30506336871 }, { "content": "// Synchronously read `reads` times, where each read should trigger a full replay from the base.\n\nfn perform_reads(\n\n g: &mut SyncHandle<ZookeeperAuthority>,\n\n reads: i64,\n\n rows: i64,\n\n skewed: bool,\n\n use_secondary: bool,\n\n verbose: bool,\n\n) {\n\n if verbose {\n\n eprintln!(\"Done populating state, now reading articles...\");\n\n }\n\n\n\n let mut hist = Histogram::<u64>::new(4).unwrap();\n\n let row_ids = if skewed {\n\n vec![SKEWED_KEY]\n\n } else {\n\n let mut rng = rand::thread_rng();\n\n (0..rows).choose_multiple(&mut rng, reads as usize)\n\n };\n\n\n", "file_path": "noria-benchmarks/replay/main.rs", "rank": 49, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn double_shuffle() {\n\n let mut g = start_simple(\"double_shuffle\");\n\n let sql = \"\n\n CREATE TABLE Car (cid int, pid int, PRIMARY KEY(cid));\n\n CREATE TABLE Price (pid int, price int, PRIMARY KEY(pid));\n\n QUERY CarPrice: SELECT cid, price FROM Car \\\n\n JOIN Price ON Car.pid = Price.pid WHERE cid = ?;\n\n \";\n\n g.install_recipe(sql).unwrap();\n\n\n\n let mut car_mutator = g.table(\"Car\").unwrap().into_sync();\n\n let mut price_mutator = g.table(\"Price\").unwrap().into_sync();\n\n let mut getter = g.view(\"CarPrice\").unwrap().into_sync();\n\n let cid = 1;\n\n let pid = 1;\n\n let price = 100;\n\n\n\n price_mutator\n\n .insert(vec![pid.into(), price.into()])\n\n .unwrap();\n", "file_path": "noria-server/src/integration.rs", "rank": 50, "score": 91377.30506336871 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n\n\n let args = App::new(\"vote\")\n\n .version(\"0.1\")\n\n .about(\"Benchmarks user-curated news aggregator throughput for in-memory Soup\")\n\n .arg(\n\n Arg::with_name(\"narticles\")\n\n .short(\"a\")\n\n .long(\"articles\")\n\n .takes_value(true)\n\n .default_value(\"100000\")\n\n .help(\"Number of articles to prepopulate the database with\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"runtime\")\n\n .short(\"r\")\n\n .long(\"runtime\")\n\n .required(true)\n\n .takes_value(true)\n", "file_path": "noria-benchmarks/vote-migration/main.rs", "rank": 51, "score": 91377.30506336871 }, { "content": "#[test]\n\n#[allow_fail]\n\nfn node_removal() {\n\n // set up graph\n\n let mut b = Builder::default();\n\n b.set_persistence(PersistenceParameters::new(\n\n DurabilityMode::DeleteOnExit,\n\n Duration::from_millis(1),\n\n Some(String::from(\"domain_removal\")),\n\n 1,\n\n ));\n\n let mut g = b.start_simple().unwrap();\n\n let cid = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::new(vec![]).with_key(vec![0]));\n\n let b = mig.add_base(\"b\", &[\"a\", \"b\"], Base::new(vec![]).with_key(vec![0]));\n\n\n\n let mut emits = HashMap::new();\n\n emits.insert(a, vec![0, 1]);\n\n emits.insert(b, vec![0, 1]);\n\n let u = Union::new(emits);\n\n let c = mig.add_ingredient(\"c\", &[\"a\", \"b\"], u);\n\n mig.maintain_anonymous(c, &[0])\n", "file_path": "noria-server/src/integration.rs", "rank": 52, "score": 91377.30506336871 }, { "content": "fn main() {\n\n let args = App::new(\"noria lobsters ec2 orchestrator\")\n\n .about(\"Run the noria lobste.rs benchmark on ec2\")\n\n .arg(\n\n Arg::with_name(\"memory_limit\")\n\n .takes_value(true)\n\n .long(\"memory-limit\")\n\n .help(\"Partial state size limit / eviction threshold [in bytes].\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"memscale\")\n\n .takes_value(true)\n\n .default_value(\"1.0\")\n\n .long(\"memscale\")\n\n .help(\"Memory scale factor for workload\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"availability_zone\")\n\n .long(\"availability-zone\")\n\n .value_name(\"AZ\")\n", "file_path": "noria-benchmarks/lobsters/src/ec2.rs", "rank": 53, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn add_columns() {\n\n let id: DataType = \"x\".into();\n\n\n\n // set up graph\n\n let mut g = start_simple(\"add_columns\");\n\n let a = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::new(vec![1.into(), 2.into()]));\n\n mig.maintain_anonymous(a, &[0]);\n\n a\n\n });\n\n let mut aq = g.view(\"a\").unwrap().into_sync();\n\n let mut muta = g.table(\"a\").unwrap().into_sync();\n\n\n\n // send a value on a\n\n muta.insert(vec![id.clone(), \"y\".into()]).unwrap();\n\n sleep();\n\n\n\n // check that a got it\n\n assert_eq!(\n\n aq.lookup(&[id.clone()], true).unwrap(),\n", "file_path": "noria-server/src/integration.rs", "rank": 54, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn recipe_activates() {\n\n let mut g = start_simple(\"recipe_activates\");\n\n g.migrate(|mig| {\n\n let r_txt = \"CREATE TABLE b (a text, c text, x text);\\n\";\n\n let mut r = Recipe::from_str(r_txt, None).unwrap();\n\n assert_eq!(r.version(), 0);\n\n assert_eq!(r.expressions().len(), 1);\n\n assert_eq!(r.prior(), None);\n\n assert!(r.activate(mig).is_ok());\n\n });\n\n // one base node\n\n assert_eq!(g.inputs().unwrap().len(), 1);\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 55, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn it_works_deletion() {\n\n // set up graph\n\n let mut g = start_simple(\"it_works_deletion\");\n\n let _ = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"x\", \"y\"], Base::new(vec![]).with_key(vec![1]));\n\n let b = mig.add_base(\"b\", &[\"_\", \"x\", \"y\"], Base::new(vec![]).with_key(vec![2]));\n\n\n\n let mut emits = HashMap::new();\n\n emits.insert(a, vec![0, 1]);\n\n emits.insert(b, vec![1, 2]);\n\n let u = Union::new(emits);\n\n let c = mig.add_ingredient(\"c\", &[\"x\", \"y\"], u);\n\n mig.maintain_anonymous(c, &[0]);\n\n (a, b, c)\n\n });\n\n\n\n let mut cq = g.view(\"c\").unwrap().into_sync();\n\n let mut muta = g.table(\"a\").unwrap().into_sync();\n\n let mut mutb = g.table(\"b\").unwrap().into_sync();\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 56, "score": 91377.30506336871 }, { "content": "fn main() {\n\n // inline recipe definition\n\n let sql = \"# base tables\n\n CREATE TABLE Article (aid int, title varchar(255), \\\n\n url text, PRIMARY KEY(aid));\n\n CREATE TABLE Vote (aid int, uid int);\n\n\n\n # internal view, for shorthand below\n\n VoteCount: SELECT Vote.aid, COUNT(DISTINCT uid) AS votes \\\n\n FROM Vote GROUP BY Vote.aid;\n\n # queryable materialized view\n\n QUERY ArticleWithVoteCount: \\\n\n SELECT Article.aid, title, url, VoteCount.votes AS votes \\\n\n FROM Article, VoteCount \\\n\n WHERE Article.aid = VoteCount.aid AND Article.aid = ?;\";\n\n\n\n let persistence_params = noria_server::PersistenceParameters::new(\n\n noria_server::DurabilityMode::Permanent,\n\n Duration::from_millis(1),\n\n Some(String::from(\"example\")),\n", "file_path": "noria-server/examples/local-server.rs", "rank": 57, "score": 91377.30506336871 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n use std::fs::File;\n\n use std::io::Write;\n\n\n\n let log = noria::logger_pls();\n\n\n\n let matches = App::new(\"extract_queries\")\n\n .version(\"0.1\")\n\n .about(\"Extracts queries from HotCRP code.\")\n\n .arg(\n\n Arg::with_name(\"source\")\n\n .index(1)\n\n .help(\"Location of the HotCRP code to work on.\")\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"output\")\n\n .short(\"o\")\n\n .long(\"output\")\n", "file_path": "noria-benchmarks/hotsoup/extract_queries.rs", "rank": 58, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn soupy_lobsters() {\n\n test_queries(\n\n \"soupy_lobsters\",\n\n \"tests/soupy-lobsters-schema.txt\",\n\n false,\n\n false,\n\n false,\n\n );\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 59, "score": 91377.30506336871 }, { "content": "fn main() {\n\n // inline recipe definition\n\n let sql = \"# base tables\n\n CREATE TABLE Article (aid int, title varchar(255), \\\n\n url text, PRIMARY KEY(aid));\n\n CREATE TABLE Vote (aid int, uid int, PRIMARY KEY(aid, uid));\n\n\n\n # read queries\n\n VoteCount: SELECT Vote.aid, COUNT(uid) AS votes \\\n\n FROM Vote GROUP BY Vote.aid;\n\n QUERY ArticleWithVoteCount: \\\n\n SELECT Article.aid, title, url, VoteCount.votes AS votes \\\n\n FROM Article LEFT JOIN VoteCount \\\n\n ON (Article.aid = VoteCount.aid) \\\n\n WHERE Article.aid = ?;\";\n\n\n\n let persistence_params = noria::PersistenceParameters::new(\n\n noria::DurabilityMode::Permanent,\n\n Duration::from_millis(1),\n\n Some(String::from(\"evictorama\")),\n", "file_path": "noria-benchmarks/evict-o-rama/main.rs", "rank": 60, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn crossing_migration() {\n\n // set up graph\n\n let mut g = start_simple(\"crossing_migration\");\n\n let (a, b) = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::default());\n\n let b = mig.add_base(\"b\", &[\"a\", \"b\"], Base::default());\n\n (a, b)\n\n });\n\n let mut muta = g.table(\"a\").unwrap().into_sync();\n\n let mut mutb = g.table(\"b\").unwrap().into_sync();\n\n\n\n let _ = g.migrate(move |mig| {\n\n let mut emits = HashMap::new();\n\n emits.insert(a, vec![0, 1]);\n\n emits.insert(b, vec![0, 1]);\n\n let u = Union::new(emits);\n\n let c = mig.add_ingredient(\"c\", &[\"a\", \"b\"], u);\n\n mig.maintain_anonymous(c, &[0]);\n\n c\n\n });\n", "file_path": "noria-server/src/integration.rs", "rank": 61, "score": 91377.30506336871 }, { "content": "#[allow(clippy::type_complexity)]\n\nfn do_eviction(\n\n log: &slog::Logger,\n\n memory_limit: Option<usize>,\n\n domain_senders: &mut HashMap<(DomainIndex, usize), TcpSender<Box<Packet>>>,\n\n state_sizes: &Arc<Mutex<HashMap<(DomainIndex, usize), Arc<AtomicUsize>>>>,\n\n) -> impl Future<Item = (), Error = ()> {\n\n use std::cmp;\n\n\n\n // 2. add current state sizes (could be out of date, as packet sent below is not\n\n // necessarily received immediately)\n\n let sizes: Vec<((DomainIndex, usize), usize)> = crate::block_on(|| {\n\n let state_sizes = state_sizes.lock().unwrap();\n\n state_sizes\n\n .iter()\n\n .map(|(ds, sa)| {\n\n let size = sa.load(Ordering::Relaxed);\n\n trace!(\n\n log,\n\n \"domain {}.{} state size is {} bytes\",\n\n ds.0.index(),\n", "file_path": "noria-server/src/worker/mod.rs", "rank": 62, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn live_writes() {\n\n let mut g = start_simple(\"live_writes\");\n\n let (_vote, vc) = g.migrate(|mig| {\n\n // migrate\n\n\n\n // add vote base table\n\n let vote = mig.add_base(\"vote\", &[\"user\", \"id\"], Base::default());\n\n\n\n // add vote count\n\n let vc = mig.add_ingredient(\n\n \"votecount\",\n\n &[\"id\", \"votes\"],\n\n Aggregation::COUNT.over(vote, 0, &[1]),\n\n );\n\n\n\n mig.maintain_anonymous(vc, &[0]);\n\n (vote, vc)\n\n });\n\n\n\n let mut vc_state = g.view(\"votecount\").unwrap().into_sync();\n", "file_path": "noria-server/src/integration.rs", "rank": 63, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn key_on_added() {\n\n // set up graph\n\n let mut g = start_simple(\"key_on_added\");\n\n let a = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::new(vec![1.into(), 2.into()]));\n\n a\n\n });\n\n\n\n // add a maintained view keyed on newly added column\n\n let _ = g.migrate(move |mig| {\n\n mig.add_column(a, \"c\", 3.into());\n\n let b = mig.add_ingredient(\"x\", &[\"c\", \"b\"], Project::new(a, &[2, 1], None, None));\n\n mig.maintain_anonymous(b, &[0]);\n\n b\n\n });\n\n\n\n // make sure we can read (may trigger a replay)\n\n let mut bq = g.view(\"x\").unwrap().into_sync();\n\n assert!(bq.lookup(&[3.into()], true).unwrap().is_empty());\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 64, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn it_works_with_vote() {\n\n let mut g = start_simple(\"it_works_with_vote\");\n\n let sql = \"\n\n # base tables\n\n CREATE TABLE Article (id int, title varchar(255), PRIMARY KEY(id));\n\n CREATE TABLE Vote (article_id int, user int);\n\n\n\n # read queries\n\n QUERY ArticleWithVoteCount: SELECT Article.id, title, VoteCount.votes AS votes \\\n\n FROM Article \\\n\n LEFT JOIN (SELECT Vote.article_id, COUNT(user) AS votes \\\n\n FROM Vote GROUP BY Vote.article_id) AS VoteCount \\\n\n ON (Article.id = VoteCount.article_id) WHERE Article.id = ?;\n\n \";\n\n\n\n g.install_recipe(sql).unwrap();\n\n let mut article = g.table(\"Article\").unwrap().into_sync();\n\n let mut vote = g.table(\"Vote\").unwrap().into_sync();\n\n let mut awvc = g.view(\"ArticleWithVoteCount\").unwrap().into_sync();\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 65, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn finkelstein1982_queries() {\n\n use std::fs::File;\n\n use std::io::Read;\n\n\n\n // set up graph\n\n let mut g = start_simple(\"finkelstein1982_queries\");\n\n g.migrate(|mig| {\n\n let mut inc = SqlIncorporator::default();\n\n let mut f = File::open(\"tests/finkelstein82.txt\").unwrap();\n\n let mut s = String::new();\n\n\n\n // Load queries\n\n f.read_to_string(&mut s).unwrap();\n\n let lines: Vec<String> = s\n\n .lines()\n\n .filter(|l| !l.is_empty() && !l.starts_with('#'))\n\n .map(|l| {\n\n if !(l.ends_with('\\n') || l.ends_with(';')) {\n\n String::from(l) + \"\\n\"\n\n } else {\n", "file_path": "noria-server/src/integration.rs", "rank": 66, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn it_works_w_mat() {\n\n // set up graph\n\n let mut g = start_simple(\"it_works_w_mat\");\n\n let _ = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::default());\n\n let b = mig.add_base(\"b\", &[\"a\", \"b\"], Base::default());\n\n\n\n let mut emits = HashMap::new();\n\n emits.insert(a, vec![0, 1]);\n\n emits.insert(b, vec![0, 1]);\n\n let u = Union::new(emits);\n\n let c = mig.add_ingredient(\"c\", &[\"a\", \"b\"], u);\n\n mig.maintain_anonymous(c, &[0]);\n\n (a, b, c)\n\n });\n\n\n\n let mut cq = g.view(\"c\").unwrap().into_sync();\n\n let mut muta = g.table(\"a\").unwrap().into_sync();\n\n let mut mutb = g.table(\"b\").unwrap().into_sync();\n\n let id: DataType = 1.into();\n", "file_path": "noria-server/src/integration.rs", "rank": 67, "score": 91377.30506336871 }, { "content": "fn to_conditions(\n\n chained_filters: &[MirNodeRef],\n\n num_columns: usize,\n\n) -> Vec<Option<FilterCondition>> {\n\n let mut merged_conditions = vec![None; num_columns];\n\n for filter in chained_filters {\n\n match filter.borrow().inner {\n\n MirNodeType::Filter { ref conditions } => {\n\n // Note that this assumes that there is only ever one column being filtered on for\n\n // each filter that is being merged.\n\n let i = conditions.iter().position(Option::is_some).unwrap();\n\n merged_conditions[i] = conditions[i].clone();\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n merged_conditions\n\n}\n\n\n\n// currently unused\n", "file_path": "noria-server/mir/src/optimize.rs", "rank": 68, "score": 91377.30506336871 }, { "content": "// returns true if next target is feasible\n\nfn run_clients(\n\n iter: usize,\n\n nclients: usize,\n\n do_perf: Perf,\n\n clients: &Vec<tsunami::Machine>,\n\n ccores: u16,\n\n server: &mut server::Server,\n\n target: usize,\n\n params: ClientParameters,\n\n) -> bool {\n\n // first, we need to prime from some host -- doesn't really matter which\n\n {\n\n clients[0].ssh.as_ref().unwrap().set_timeout(0);\n\n eprintln!(\" .. prepopulating on {}\", clients[0].public_dns);\n\n\n\n let mut prime_params = params.clone();\n\n prime_params.warmup = 0;\n\n prime_params.runtime = 0;\n\n let mut cmd = Vec::<Cow<str>>::new();\n\n cmd.push(\"env\".into());\n", "file_path": "noria-benchmarks/vote/orchestrator.rs", "rank": 69, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn base_mutation() {\n\n use noria::{Modification, Operation};\n\n\n\n let mut g = start_simple(\"base_mutation\");\n\n g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::new(vec![]).with_key(vec![0]));\n\n mig.maintain_anonymous(a, &[0]);\n\n });\n\n\n\n let mut read = g.view(\"a\").unwrap().into_sync();\n\n let mut write = g.table(\"a\").unwrap().into_sync();\n\n\n\n // insert a new record\n\n write.insert(vec![1.into(), 2.into()]).unwrap();\n\n sleep();\n\n assert_eq!(\n\n read.lookup(&[1.into()], true).unwrap(),\n\n vec![vec![1.into(), 2.into()]]\n\n );\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 70, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn replay_during_replay() {\n\n // what we're trying to set up here is a case where a join receives a record with a value for\n\n // the join key that does not exist in the view the record was sent from. since joins only do\n\n // lookups into the origin view during forward processing when it receives things from the\n\n // right in a left join, that's what we have to construct.\n\n let mut g = Builder::default();\n\n g.disable_partial();\n\n g.set_persistence(get_persistence_params(\"replay_during_replay\"));\n\n let mut g = g.start_simple().unwrap();\n\n let (a, u1, u2) = g.migrate(|mig| {\n\n // we need three bases:\n\n //\n\n // - a will be the left side of the left join\n\n // - u1 and u2 will be joined together with a regular one-to-one join to produce a partial\n\n // view (remember, we need to miss in the source of the replay, so it must be partial).\n\n let a = mig.add_base(\"a\", &[\"a\"], Base::new(vec![1.into()]));\n\n let u1 = mig.add_base(\"u1\", &[\"u\"], Base::new(vec![1.into()]));\n\n let u2 = mig.add_base(\"u2\", &[\"u\", \"a\"], Base::new(vec![1.into(), 2.into()]));\n\n (a, u1, u2)\n\n });\n", "file_path": "noria-server/src/integration.rs", "rank": 71, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn remove_query() {\n\n let r_txt = \"CREATE TABLE b (a int, c text, x text);\\n\n\n QUERY qa: SELECT a FROM b;\\n\n\n QUERY qb: SELECT a, c FROM b WHERE a = 42;\";\n\n\n\n let r2_txt = \"CREATE TABLE b (a int, c text, x text);\\n\n\n QUERY qa: SELECT a FROM b;\";\n\n\n\n let mut g = Builder::default().start_simple().unwrap();\n\n g.install_recipe(r_txt).unwrap();\n\n assert_eq!(g.inputs().unwrap().len(), 1);\n\n assert_eq!(g.outputs().unwrap().len(), 2);\n\n\n\n let mut mutb = g.table(\"b\").unwrap().into_sync();\n\n let mut qa = g.view(\"qa\").unwrap().into_sync();\n\n let mut qb = g.view(\"qb\").unwrap().into_sync();\n\n\n\n mutb.insert(vec![42.into(), \"2\".into(), \"3\".into()])\n\n .unwrap();\n\n mutb.insert(vec![1.into(), \"4\".into(), \"5\".into()]).unwrap();\n", "file_path": "noria-server/src/integration.rs", "rank": 72, "score": 91377.30506336871 }, { "content": "fn build_graph(\n\n authority: Arc<ZookeeperAuthority>,\n\n persistence: PersistenceParameters,\n\n verbose: bool,\n\n) -> SyncHandle<ZookeeperAuthority> {\n\n let mut builder = Builder::default();\n\n if verbose {\n\n builder.log_with(noria::logger_pls());\n\n }\n\n\n\n builder.set_persistence(persistence);\n\n builder.set_sharding(None);\n\n let mut rt = tokio::runtime::Runtime::new().unwrap();\n\n let fut = builder.start(authority);\n\n let wh = rt.block_on(fut).unwrap();\n\n SyncHandle::from_existing(rt, wh)\n\n}\n\n\n", "file_path": "noria-benchmarks/replay/main.rs", "rank": 73, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn mutator_churn() {\n\n let mut g = start_simple(\"mutator_churn\");\n\n let _ = g.migrate(|mig| {\n\n // migrate\n\n\n\n // add vote base table\n\n let vote = mig.add_base(\"vote\", &[\"user\", \"id\"], Base::default());\n\n\n\n // add vote count\n\n let vc = mig.add_ingredient(\n\n \"votecount\",\n\n &[\"id\", \"votes\"],\n\n Aggregation::COUNT.over(vote, 0, &[1]),\n\n );\n\n\n\n mig.maintain_anonymous(vc, &[0]);\n\n (vote, vc)\n\n });\n\n\n\n let mut vc_state = g.view(\"votecount\").unwrap().into_sync();\n", "file_path": "noria-server/src/integration.rs", "rank": 74, "score": 91377.30506336871 }, { "content": "fn main() {\n\n let args = App::new(\"trawler-mysql\")\n\n .version(\"0.1\")\n\n .about(\"Benchmark a lobste.rs Rails installation using MySQL directly\")\n\n .arg(\n\n Arg::with_name(\"memscale\")\n\n .long(\"memscale\")\n\n .takes_value(true)\n\n .default_value(\"1.0\")\n\n .help(\"Memory scale factor for workload\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"reqscale\")\n\n .long(\"reqscale\")\n\n .takes_value(true)\n\n .default_value(\"1.0\")\n\n .help(\"Reuest load scale factor for workload\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"in-flight\")\n", "file_path": "noria-benchmarks/lobsters/src/main.rs", "rank": 75, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn materialization_frontier() {\n\n // set up graph\n\n let mut g = start_simple_unsharded(\"materialization_frontier\");\n\n g.migrate(|mig| {\n\n // migrate\n\n\n\n // add article base node\n\n let article = mig.add_base(\"article\", &[\"id\", \"title\"], Base::default());\n\n\n\n // add vote base table\n\n let vote = mig.add_base(\n\n \"vote\",\n\n &[\"user\", \"id\"],\n\n Base::default().with_key(vec![0, 1]),\n\n );\n\n\n\n // add vote count\n\n let vc = mig.add_ingredient(\n\n \"votecount\",\n\n &[\"id\", \"votes\"],\n", "file_path": "noria-server/src/integration.rs", "rank": 76, "score": 91377.30506336871 }, { "content": "#[test]\n\nfn it_works_basic() {\n\n // set up graph\n\n let mut b = Builder::default();\n\n b.set_persistence(PersistenceParameters::new(\n\n DurabilityMode::DeleteOnExit,\n\n Duration::from_millis(1),\n\n Some(String::from(\"it_works_basic\")),\n\n 1,\n\n ));\n\n let mut g = b.start_simple().unwrap();\n\n let _ = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::new(vec![]).with_key(vec![0]));\n\n let b = mig.add_base(\"b\", &[\"a\", \"b\"], Base::new(vec![]).with_key(vec![0]));\n\n\n\n let mut emits = HashMap::new();\n\n emits.insert(a, vec![0, 1]);\n\n emits.insert(b, vec![0, 1]);\n\n let u = Union::new(emits);\n\n let c = mig.add_ingredient(\"c\", &[\"a\", \"b\"], u);\n\n mig.maintain_anonymous(c, &[0]);\n", "file_path": "noria-server/src/integration.rs", "rank": 77, "score": 91377.30506336871 }, { "content": "trait ReuseConfiguration {\n\n fn reuse_candidates<'a>(\n\n qg: &QueryGraph,\n\n query_graphs: &'a HashMap<u64, QueryGraph>,\n\n ) -> Vec<(ReuseType, (u64, &'a QueryGraph))>;\n\n}\n", "file_path": "noria-server/src/controller/sql/reuse/mod.rs", "rank": 78, "score": 90983.92285687287 }, { "content": "trait GroupingUDF {\n\n fn visit(&mut self, item: &DataType) -> DataType;\n\n fn unvisit(&mut self, item: &DataType) -> DataType;\n\n}\n\n\n\nimpl GroupingUDF for TestCount {\n\n fn visit(&mut self, _item: &DataType) -> DataType {\n\n self.0 += 1;\n\n self.0.into()\n\n }\n\n\n\n fn unvisit(&mut self, _item: &DataType) -> DataType {\n\n self.0 -= 1;\n\n self.0.into()\n\n }\n\n}\n\n\n\nimpl GroupingUDF for GroupingFuncType {\n\n fn visit(&mut self, item: &DataType) -> DataType {\n\n impl_grouping_udf_type!(self, visit, item)\n\n }\n\n fn unvisit(&mut self, item: &DataType) -> DataType {\n\n impl_grouping_udf_type!(self, unvisit, item)\n\n }\n\n}\n\n\n", "file_path": "noria-server/dataflow/src/ops/ohua/att2.rs", "rank": 79, "score": 90983.92285687287 }, { "content": "pub trait SizeOf {\n\n fn deep_size_of(&self) -> u64;\n\n fn size_of(&self) -> u64;\n\n}\n\n\n\nimpl SizeOf for DataType {\n\n fn deep_size_of(&self) -> u64 {\n\n use std::mem::size_of_val;\n\n\n\n let inner = match *self {\n\n DataType::Text(ref t) => size_of_val(t) as u64 + t.to_bytes().len() as u64,\n\n _ => 0u64,\n\n };\n\n\n\n self.size_of() + inner\n\n }\n\n\n\n fn size_of(&self) -> u64 {\n\n use std::mem::size_of;\n\n\n", "file_path": "noria-server/common/src/lib.rs", "rank": 80, "score": 90335.37450233438 }, { "content": "// Reads each row from one of the secondary indices.\n\nfn perform_secondary_reads(\n\n g: &mut SyncHandle<ZookeeperAuthority>,\n\n hist: &mut Histogram<u64>,\n\n rows: i64,\n\n row_ids: Vec<i64>,\n\n) {\n\n let indices = 10;\n\n let mut getters: Vec<_> = (1..indices)\n\n .map(|i| g.view(&format!(\"query_c{}\", i)).unwrap().into_sync())\n\n .collect();\n\n\n\n let skewed = row_ids.len() == 1;\n\n for i in row_ids {\n\n let id: DataType = DataType::BigInt(i);\n\n let start = Instant::now();\n\n // Pick an arbitrary secondary index to use:\n\n let getter = &mut getters[i as usize % (indices - 1)];\n\n let rs = getter.lookup(&[id], true).unwrap();\n\n let elapsed = start.elapsed();\n\n let us = elapsed.as_secs() * 1_000_000 + u64::from(elapsed.subsec_nanos()) / 1_000;\n", "file_path": "noria-benchmarks/replay/main.rs", "rank": 81, "score": 90222.49373531484 }, { "content": "// Reads every row with the primary key index.\n\nfn perform_primary_reads(\n\n g: &mut SyncHandle<ZookeeperAuthority>,\n\n hist: &mut Histogram<u64>,\n\n row_ids: Vec<i64>,\n\n) {\n\n let mut getter = g.view(\"ReadRow\").unwrap().into_sync();\n\n\n\n for i in row_ids {\n\n let id: DataType = DataType::BigInt(i);\n\n let start = Instant::now();\n\n let rs = getter.lookup(&[id], true).unwrap();\n\n let elapsed = start.elapsed();\n\n let us = elapsed.as_secs() * 1_000_000 + u64::from(elapsed.subsec_nanos()) / 1_000;\n\n assert_eq!(rs.len(), 1);\n\n for j in 0..10 {\n\n assert_eq!(DataType::BigInt(i), rs[0][j]);\n\n }\n\n\n\n if hist.record(us).is_err() {\n\n let m = hist.high();\n\n hist.record(m).unwrap();\n\n }\n\n }\n\n}\n\n\n", "file_path": "noria-benchmarks/replay/main.rs", "rank": 82, "score": 90222.44388780982 }, { "content": "#[test]\n\nfn it_works_with_sql_recipe() {\n\n let mut g = start_simple(\"it_works_with_sql_recipe\");\n\n let sql = \"\n\n CREATE TABLE Car (id int, brand varchar(255), PRIMARY KEY(id));\n\n QUERY CountCars: SELECT COUNT(*) FROM Car WHERE brand = ?;\n\n \";\n\n g.install_recipe(sql).unwrap();\n\n\n\n let mut mutator = g.table(\"Car\").unwrap().into_sync();\n\n let mut getter = g.view(\"CountCars\").unwrap().into_sync();\n\n\n\n assert_eq!(mutator.table_name(), \"Car\");\n\n assert_eq!(mutator.columns(), &[\"id\", \"brand\"]);\n\n\n\n let brands = vec![\"Volvo\", \"Volvo\", \"Volkswagen\"];\n\n for (i, &brand) in brands.iter().enumerate() {\n\n mutator.insert(vec![i.into(), brand.into()]).unwrap();\n\n }\n\n\n\n // Let writes propagate:\n\n sleep();\n\n\n\n // Retrieve the result of the count query:\n\n let result = getter.lookup(&[\"Volvo\".into()], true).unwrap();\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0][0], 2.into());\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 83, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn it_works_w_partial_mat() {\n\n // set up graph\n\n let mut g = start_simple(\"it_works_w_partial_mat\");\n\n let (a, b) = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::default());\n\n let b = mig.add_base(\"b\", &[\"a\", \"b\"], Base::default());\n\n (a, b)\n\n });\n\n\n\n let mut muta = g.table(\"a\").unwrap().into_sync();\n\n let id: DataType = 1.into();\n\n\n\n // send a few values on a\n\n muta.insert(vec![id.clone(), 1.into()]).unwrap();\n\n muta.insert(vec![id.clone(), 2.into()]).unwrap();\n\n muta.insert(vec![id.clone(), 3.into()]).unwrap();\n\n\n\n // give it some time to propagate\n\n sleep();\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 84, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn it_works_with_simple_arithmetic() {\n\n let mut g = start_simple(\"it_works_with_simple_arithmetic\");\n\n\n\n g.migrate(|mig| {\n\n let sql = \"CREATE TABLE Car (id int, price int, PRIMARY KEY(id));\n\n QUERY CarPrice: SELECT 2 * price FROM Car WHERE id = ?;\";\n\n let mut recipe = Recipe::from_str(&sql, None).unwrap();\n\n recipe.activate(mig).unwrap();\n\n });\n\n\n\n let mut mutator = g.table(\"Car\").unwrap().into_sync();\n\n let mut getter = g.view(\"CarPrice\").unwrap().into_sync();\n\n let id: DataType = 1.into();\n\n let price: DataType = 123.into();\n\n mutator.insert(vec![id.clone(), price]).unwrap();\n\n\n\n // Let writes propagate:\n\n sleep();\n\n\n\n // Retrieve the result of the count query:\n\n let result = getter.lookup(&[id.clone()], true).unwrap();\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0][1], 246.into());\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 85, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn it_works_with_function_arithmetic() {\n\n let mut g = start_simple(\"it_works_with_function_arithmetic\");\n\n let sql = \"\n\n CREATE TABLE Bread (id int, price int, PRIMARY KEY(id));\n\n QUERY Price: SELECT 2 * MAX(price) FROM Bread;\n\n \";\n\n g.install_recipe(sql).unwrap();\n\n\n\n let mut mutator = g.table(\"Bread\").unwrap().into_sync();\n\n let mut getter = g.view(\"Price\").unwrap().into_sync();\n\n let max_price = 20;\n\n for (i, price) in (10..=max_price).enumerate() {\n\n let id = i + 1;\n\n mutator.insert(vec![id.into(), price.into()]).unwrap();\n\n }\n\n\n\n // Let writes propagate:\n\n sleep();\n\n\n\n let result = getter.lookup(&[0.into()], true).unwrap();\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0][0], DataType::from(max_price * 2));\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 86, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn migrate_added_columns() {\n\n let id: DataType = \"x\".into();\n\n\n\n // set up graph\n\n let mut g = start_simple(\"migrate_added_columns\");\n\n let a = g.migrate(|mig| mig.add_base(\"a\", &[\"a\", \"b\"], Base::new(vec![1.into(), 2.into()])));\n\n let mut muta = g.table(\"a\").unwrap().into_sync();\n\n\n\n // send a value on a\n\n muta.insert(vec![id.clone(), \"y\".into()]).unwrap();\n\n sleep();\n\n\n\n // add a third column to a, and a view that uses it\n\n let _ = g.migrate(move |mig| {\n\n mig.add_column(a, \"c\", 3.into());\n\n let b = mig.add_ingredient(\"x\", &[\"c\", \"b\"], Project::new(a, &[2, 0], None, None));\n\n mig.maintain_anonymous(b, &[1]);\n\n b\n\n });\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 87, "score": 90216.65239967019 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n\n\n let log = noria::logger_pls();\n\n\n\n let matches = App::new(\"process_paper_queries\")\n\n .version(\"0.1\")\n\n .about(\"Process extracted HotCRP paper queries.\")\n\n .arg(\n\n Arg::with_name(\"source\")\n\n .index(1)\n\n .help(\n\n \"Location of the HotCRP paper queries (in MySQL query log form).\",\n\n )\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"start_at\")\n\n .default_value(\"10\")\n\n .long(\"start_at\")\n", "file_path": "noria-benchmarks/hotsoup/process_paper_queries.rs", "rank": 88, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn it_works_with_reads_before_writes() {\n\n let mut g = start_simple(\"it_works_with_reads_before_writes\");\n\n let sql = \"\n\n CREATE TABLE Article (aid int, PRIMARY KEY(aid));\n\n CREATE TABLE Vote (aid int, uid int, PRIMARY KEY(aid, uid));\n\n QUERY ArticleVote: SELECT Article.aid, Vote.uid \\\n\n FROM Article, Vote \\\n\n WHERE Article.aid = Vote.aid AND Article.aid = ?;\n\n \";\n\n\n\n g.install_recipe(sql).unwrap();\n\n let mut article = g.table(\"Article\").unwrap().into_sync();\n\n let mut vote = g.table(\"Vote\").unwrap().into_sync();\n\n let mut awvc = g.view(\"ArticleVote\").unwrap().into_sync();\n\n\n\n let aid = 1;\n\n let uid = 10;\n\n\n\n assert!(awvc.lookup(&[aid.into()], true).unwrap().is_empty());\n\n article.insert(vec![aid.into()]).unwrap();\n\n sleep();\n\n\n\n vote.insert(vec![aid.into(), uid.into()]).unwrap();\n\n sleep();\n\n\n\n let result = awvc.lookup(&[aid.into()], true).unwrap();\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0], vec![aid.into(), uid.into()]);\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 89, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn shared_interdomain_ancestor() {\n\n // set up graph\n\n let mut g = start_simple(\"shared_interdomain_ancestor\");\n\n let _ = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::default());\n\n\n\n let mut emits = HashMap::new();\n\n emits.insert(a, vec![0, 1]);\n\n\n\n let u = Union::new(emits.clone());\n\n let b = mig.add_ingredient(\"b\", &[\"a\", \"b\"], u);\n\n mig.maintain_anonymous(b, &[0]);\n\n\n\n let u = Union::new(emits);\n\n let c = mig.add_ingredient(\"c\", &[\"a\", \"b\"], u);\n\n mig.maintain_anonymous(c, &[0]);\n\n (a, b, c)\n\n });\n\n\n\n let mut bq = g.view(\"b\").unwrap().into_sync();\n", "file_path": "noria-server/src/integration.rs", "rank": 90, "score": 90216.65239967019 }, { "content": "/// Modify the graph such that the path between `src` and `dst` shuffles the input such that the\n\n/// records received by `dst` are sharded by sharding `to`.\n\nfn reshard(\n\n log: &Logger,\n\n new: &mut HashSet<NodeIndex>,\n\n swaps: &mut HashMap<(NodeIndex, NodeIndex), NodeIndex>,\n\n graph: &mut Graph,\n\n src: NodeIndex,\n\n dst: NodeIndex,\n\n to: Sharding,\n\n) {\n\n assert!(!graph[src].is_source());\n\n\n\n if graph[src].sharded_by().is_none() && to.is_none() {\n\n debug!(log, \"no need to shuffle\";\n\n \"src\" => ?src,\n\n \"dst\" => ?dst,\n\n \"sharding\" => ?to);\n\n return;\n\n }\n\n\n\n let node = match to {\n", "file_path": "noria-server/src/controller/migrate/sharding.rs", "rank": 91, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn it_works_with_join_arithmetic() {\n\n let mut g = start_simple(\"it_works_with_join_arithmetic\");\n\n let sql = \"\n\n CREATE TABLE Car (car_id int, price_id int, PRIMARY KEY(car_id));\n\n CREATE TABLE Price (price_id int, price int, PRIMARY KEY(price_id));\n\n CREATE TABLE Sales (sales_id int, price_id int, fraction float, PRIMARY KEY(sales_id));\n\n QUERY CarPrice: SELECT price * fraction FROM Car \\\n\n JOIN Price ON Car.price_id = Price.price_id \\\n\n JOIN Sales ON Price.price_id = Sales.price_id \\\n\n WHERE car_id = ?;\n\n \";\n\n g.install_recipe(sql).unwrap();\n\n\n\n let mut car_mutator = g.table(\"Car\").unwrap().into_sync();\n\n let mut price_mutator = g.table(\"Price\").unwrap().into_sync();\n\n let mut sales_mutator = g.table(\"Sales\").unwrap().into_sync();\n\n let mut getter = g.view(\"CarPrice\").unwrap().into_sync();\n\n let id = 1;\n\n let price = 123;\n\n let fraction = 0.7;\n", "file_path": "noria-server/src/integration.rs", "rank": 92, "score": 90216.65239967019 }, { "content": "fn main() {\n\n let args = App::new(\"purge-stress\")\n\n .about(\"Benchmarks the latency of full replays in a user-curated news aggregator\")\n\n .arg(\n\n Arg::with_name(\"flush-timeout\")\n\n .long(\"flush-timeout\")\n\n .takes_value(true)\n\n .default_value(\"100000\")\n\n .help(\"Time to wait before processing a merged packet, in nanoseconds.\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"replay-timeout\")\n\n .long(\"replay-timeout\")\n\n .takes_value(true)\n\n .default_value(\"100000\")\n\n .help(\"Time to batch replay requests for, in nanoseconds.\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"time\")\n\n .short(\"t\")\n", "file_path": "noria-benchmarks/vote-purge-stress/main.rs", "rank": 93, "score": 90216.65239967019 }, { "content": "fn main() {\n\n use clap::{App, Arg};\n\n\n\n let args = App::new(\"vote-dbtoaster-style\")\n\n .about(\"Benchmarks Soup in a DBToaster-like vote setup\")\n\n .arg(\n\n Arg::with_name(\"articles\")\n\n .short(\"a\")\n\n .long(\"articles\")\n\n .default_value(\"500000\")\n\n .help(\"Number of articles to prepopulate the database with\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"votes\")\n\n .index(1)\n\n .value_name(\"VOTES\")\n\n .default_value(\"50000000\")\n\n .help(\"Number of votes to issue\"),\n\n )\n\n .arg(\n", "file_path": "noria-benchmarks/vote-dbtoaster-style/main.rs", "rank": 94, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn migrate_drop_columns() {\n\n let id: DataType = \"x\".into();\n\n\n\n // set up graph\n\n let mut g = start_simple(\"migrate_drop_columns\");\n\n let a = g.migrate(|mig| {\n\n let a = mig.add_base(\"a\", &[\"a\", \"b\"], Base::new(vec![\"a\".into(), \"b\".into()]));\n\n mig.maintain_anonymous(a, &[0]);\n\n a\n\n });\n\n let mut aq = g.view(\"a\").unwrap().into_sync();\n\n let mut muta1 = g.table(\"a\").unwrap().into_sync();\n\n\n\n // send a value on a\n\n muta1.insert(vec![id.clone(), \"bx\".into()]).unwrap();\n\n\n\n // check that it's there\n\n sleep();\n\n let res = aq.lookup(&[id.clone()], true).unwrap();\n\n assert_eq!(res.len(), 1);\n", "file_path": "noria-server/src/integration.rs", "rank": 95, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn it_recovers_persisted_bases() {\n\n let authority = Arc::new(LocalAuthority::new());\n\n let dir = tempfile::tempdir().unwrap();\n\n let path = dir.path().join(\"it_recovers_persisted_bases\");\n\n let persistence_params = PersistenceParameters::new(\n\n DurabilityMode::Permanent,\n\n Duration::from_millis(1),\n\n Some(path.to_string_lossy().into()),\n\n 1,\n\n );\n\n\n\n {\n\n let mut g = Builder::default();\n\n g.set_persistence(persistence_params.clone());\n\n let mut g = wrap_sync(g.start(authority.clone()));\n\n\n\n let sql = \"\n\n CREATE TABLE Car (id int, price int, PRIMARY KEY(id));\n\n QUERY CarPrice: SELECT price FROM Car WHERE id = ?;\n\n \";\n", "file_path": "noria-server/src/integration.rs", "rank": 96, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn it_works_with_double_query_through() {\n\n let mut builder = Builder::default();\n\n builder.set_persistence(get_persistence_params(\"it_works_with_double_query_through\"));\n\n // TODO: sharding::shard picks the wrong column to shard on, since both aid and bid resolves to\n\n // all ancestors (and bid comes first). The reader is on aid though, so the sharder should pick\n\n // that as well (and not bid!).\n\n builder.set_sharding(None);\n\n let mut g = builder.start_simple().unwrap();\n\n let sql = \"\n\n # base tables\n\n CREATE TABLE A (aid int, other int, PRIMARY KEY(aid));\n\n CREATE TABLE B (bid int, PRIMARY KEY(bid));\n\n\n\n # read queries\n\n QUERY ReadJoin: SELECT J.aid, J.other \\\n\n FROM B \\\n\n LEFT JOIN (SELECT A.aid, A.other FROM A \\\n\n WHERE A.other = 5) AS J \\\n\n ON (J.aid = B.bid) \\\n\n WHERE J.aid = ?;\n", "file_path": "noria-server/src/integration.rs", "rank": 97, "score": 90216.65239967019 }, { "content": "#[test]\n\nfn it_works_with_arithmetic_aliases() {\n\n let mut g = start_simple(\"it_works_with_arithmetic_aliases\");\n\n let sql = \"\n\n CREATE TABLE Price (pid int, cent_price int, PRIMARY KEY(pid));\n\n ModPrice: SELECT pid, cent_price / 100 AS price FROM Price;\n\n QUERY AltPrice: SELECT pid, price FROM ModPrice WHERE pid = ?;\n\n \";\n\n g.install_recipe(sql).unwrap();\n\n\n\n let mut price_mutator = g.table(\"Price\").unwrap().into_sync();\n\n let mut getter = g.view(\"AltPrice\").unwrap().into_sync();\n\n let pid = 1;\n\n let price = 10000;\n\n price_mutator\n\n .insert(vec![pid.into(), price.into()])\n\n .unwrap();\n\n\n\n // Let writes propagate:\n\n sleep();\n\n\n\n // Retrieve the result of the count query:\n\n let result = getter.lookup(&[pid.into()], true).unwrap();\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0][1], (price / 100).into());\n\n}\n\n\n", "file_path": "noria-server/src/integration.rs", "rank": 98, "score": 90216.65239967019 }, { "content": "fn git_and_cargo(\n\n ssh: &mut Session,\n\n dir: &str,\n\n bin: &str,\n\n branch: Option<&str>,\n\n) -> Result<(), failure::Error> {\n\n eprintln!(\" -> git reset\");\n\n ssh.cmd(&format!(\"bash -c 'git -C {} reset --hard 2>&1'\", dir))\n\n .map(|out| {\n\n let out = out.trim_end();\n\n if !out.is_empty() && !out.contains(\"Already up-to-date.\") {\n\n eprintln!(\"{}\", out);\n\n }\n\n })?;\n\n\n\n if let Some(branch) = branch {\n\n eprintln!(\" -> git checkout {}\", branch);\n\n ssh.cmd(&format!(\n\n \"bash -c 'git -C {} checkout {} 2>&1'\",\n\n dir, branch\n", "file_path": "noria-benchmarks/lobsters/src/ec2.rs", "rank": 99, "score": 90216.65239967019 } ]
Rust
src/config.rs
pckilgore/syngesture
fe30480f7567e802713c0f85c15f6b60a72e4d9e
use crate::events::*; use serde::Deserialize; use std::collections::BTreeMap; use std::ffi::OsStr; use std::path::{Path, PathBuf}; const PREFIX: Option<&'static str> = option_env!("PREFIX"); pub(crate) type Device = String; pub(crate) type GestureMap = BTreeMap<Gesture, Action>; type BoxedError = Box<dyn std::error::Error + Send + Sync>; type Result<T> = std::result::Result<T, BoxedError>; pub(crate) struct Configuration { pub devices: BTreeMap<Device, GestureMap>, } impl Configuration { pub fn new() -> Self { Self { devices: Default::default(), } } } #[derive(Deserialize)] #[serde(rename_all = "lowercase")] pub(crate) enum Action { #[serde(skip)] None, Execute(String), } impl Default for Action { fn default() -> Self { Action::None } } pub(crate) fn load() -> Configuration { let mut config = Configuration::new(); let prefix = PathBuf::from(PREFIX.unwrap_or("/usr/local")); let global_config = prefix.join("etc/syngestures.toml"); if global_config.exists() { try_load_config_file(&mut config, &global_config); } let global_config_dir = prefix.join("etc/syngestures.d"); try_load_config_dir(&mut config, &global_config_dir); load_user_config(&mut config); if config.devices.is_empty() { eprintln!("No configuration found!"); eprintln!("Searched for configuration files in the following locations:"); eprintln!("* {}/etc/syngestures.toml", global_config_dir.display()); eprintln!("* {}/etc/syngestures.d/*.toml", global_config_dir.display()); eprintln!("* $XDG_HOME/syngestures.toml"); eprintln!("* $XDG_HOME/syngestures.d/*.toml"); eprintln!("* $HOME/.config/syngestures.toml"); eprintln!("* $HOME/.config/syngestures.d/*.toml"); } config } fn try_load_config_file(config: &mut Configuration, path: &Path) { if let Err(e) = load_config_file(config, &path) { eprintln!( "Error loading configuration file at {}: {}", path.display(), e ); } } fn try_load_config_dir(config: &mut Configuration, dir: &Path) { if let Err(e) = load_config_dir(config, &dir) { eprintln!( "Error reading from configuration directory {}: {}", dir.display(), e ); } } fn load_user_config(mut config: &mut Configuration) { use std::env::VarError; let config_home = match std::env::var("XDG_CONFIG_HOME") { Ok(xdg_config_home) => PathBuf::from(xdg_config_home), Err(VarError::NotPresent) => match get_user_config_dir() { Ok(dir) => PathBuf::from(dir), Err(e) => { eprintln!("{}", e); return; } }, Err(VarError::NotUnicode(_)) => { eprintln!("Invalid XDG_CONFIG_HOME"); return; } }; let user_config_file = config_home.join("syngestures.toml"); if user_config_file.exists() { try_load_config_file(&mut config, &user_config_file); } let user_config_dir = config_home.join("syngestures.d"); try_load_config_dir(&mut config, &user_config_dir); } fn get_user_config_dir() -> Result<PathBuf> { #[allow(deprecated)] let home = std::env::home_dir(); if home.is_none() || home.as_ref().unwrap() == &PathBuf::new() { return Err("Could not determine user home directory!".into()); } let config_home = home.unwrap().join(".config/"); Ok(config_home) } fn load_config_dir(mut config: &mut Configuration, dir: &Path) -> Result<()> { use std::fs::DirEntry; if !dir.exists() || !dir.is_dir() { return Ok(()); } let toml = OsStr::new("toml"); for item in dir.read_dir()? { let item = match item { Ok(item) => item, Err(e) => { eprintln!( "Error reading file from configuration directory {}: {}", dir.display(), e ); continue; } }; let mut process_item = |item: &DirEntry| -> Result<()> { if item.file_type()?.is_dir() { return Ok(()); } let item = item.path(); if item.extension() != Some(toml) { return Ok(()); } try_load_config_file(&mut config, &item); Ok(()) }; if let Err(e) = process_item(&item) { eprintln!("Error loading {}: {}", item.path().to_string_lossy(), e); } } Ok(()) } fn load_config_file(config: &mut Configuration, path: &Path) -> Result<()> { #[derive(Deserialize)] struct ConfigGestureAndAction { #[serde(flatten)] pub gesture: Gesture, #[serde(flatten)] pub action: Action, } #[derive(Deserialize)] struct ConfigDeviceGestures { pub device: Device, pub gestures: Vec<ConfigGestureAndAction>, } #[derive(Deserialize)] struct ConfigFile { #[serde(alias = "device")] pub devices: Vec<ConfigDeviceGestures>, } let bytes = std::fs::read(path)?; let config_file: ConfigFile = toml::from_slice(&bytes)?; for device_config in config_file.devices { let device = device_config.device; let device_gestures = config.devices.entry(device).or_default(); for gesture_action in device_config.gestures { device_gestures.insert(gesture_action.gesture, gesture_action.action); } } Ok(()) }
use crate::events::*; use serde::Deserialize; use std::collections::BTreeMap; use std::ffi::OsStr; use std::path::{Path, PathBuf}; const PREFIX: Option<&'static str> = option_env!("PREFIX"); pub(crate) type Device = String; pub(crate) type GestureMap = BTreeMap<Gesture, Action>; type BoxedError = Box<dyn std::error::Error + Send + Sync>; type Result<T> = std::result::Result<T, BoxedError>; pub(crate) struct Configuration { pub devices: BTreeMap<Device, GestureMap>, } impl Configuration { pub fn new() -> Self { Self { devices: Default::default(), } } } #[derive(Deserialize)] #[serde(rename_all = "lowercase")] pub(crate) enum Action { #[serde(skip)] None, Execute(String), } impl Default for Action { fn default() -> Self { Action::None } } pub(crate) fn load() -> Configuration { let mut config = Configuration::new(); let prefix = PathBuf::from(PREFIX.unwrap_or("/usr/local")); let global_config = prefix.join("etc/syngestures.toml"); if global_config.exists() { try_load_config_file(&mut config, &global_config); } let global_config_dir = prefix.join("etc/syngestures.d"); try_load_config_dir(&mut config, &global_config_dir); load_user_config(&mut config); if config.devices.is_empty() { eprintln!("No configuration found!"); eprintln!("Searched for configuration files in the following locations:"); eprintln!("* {}/etc/syngestures.toml", global_config_dir.display()); eprintln!("* {}/etc/syngestures.d/*.toml", global_config_dir.display()); eprintln!("* $XDG_HOME/syngestures.toml"); eprintln!("* $XDG_HOME/syngestures.d/*.toml"); eprintln!("* $HOME/.config/syngestures.toml"); eprintln!("* $HOME/.config/syngestures.d/*.toml"); } config } fn try_load_config_file(config: &mut Configuration, pat
fn try_load_config_dir(config: &mut Configuration, dir: &Path) { if let Err(e) = load_config_dir(config, &dir) { eprintln!( "Error reading from configuration directory {}: {}", dir.display(), e ); } } fn load_user_config(mut config: &mut Configuration) { use std::env::VarError; let config_home = match std::env::var("XDG_CONFIG_HOME") { Ok(xdg_config_home) => PathBuf::from(xdg_config_home), Err(VarError::NotPresent) => match get_user_config_dir() { Ok(dir) => PathBuf::from(dir), Err(e) => { eprintln!("{}", e); return; } }, Err(VarError::NotUnicode(_)) => { eprintln!("Invalid XDG_CONFIG_HOME"); return; } }; let user_config_file = config_home.join("syngestures.toml"); if user_config_file.exists() { try_load_config_file(&mut config, &user_config_file); } let user_config_dir = config_home.join("syngestures.d"); try_load_config_dir(&mut config, &user_config_dir); } fn get_user_config_dir() -> Result<PathBuf> { #[allow(deprecated)] let home = std::env::home_dir(); if home.is_none() || home.as_ref().unwrap() == &PathBuf::new() { return Err("Could not determine user home directory!".into()); } let config_home = home.unwrap().join(".config/"); Ok(config_home) } fn load_config_dir(mut config: &mut Configuration, dir: &Path) -> Result<()> { use std::fs::DirEntry; if !dir.exists() || !dir.is_dir() { return Ok(()); } let toml = OsStr::new("toml"); for item in dir.read_dir()? { let item = match item { Ok(item) => item, Err(e) => { eprintln!( "Error reading file from configuration directory {}: {}", dir.display(), e ); continue; } }; let mut process_item = |item: &DirEntry| -> Result<()> { if item.file_type()?.is_dir() { return Ok(()); } let item = item.path(); if item.extension() != Some(toml) { return Ok(()); } try_load_config_file(&mut config, &item); Ok(()) }; if let Err(e) = process_item(&item) { eprintln!("Error loading {}: {}", item.path().to_string_lossy(), e); } } Ok(()) } fn load_config_file(config: &mut Configuration, path: &Path) -> Result<()> { #[derive(Deserialize)] struct ConfigGestureAndAction { #[serde(flatten)] pub gesture: Gesture, #[serde(flatten)] pub action: Action, } #[derive(Deserialize)] struct ConfigDeviceGestures { pub device: Device, pub gestures: Vec<ConfigGestureAndAction>, } #[derive(Deserialize)] struct ConfigFile { #[serde(alias = "device")] pub devices: Vec<ConfigDeviceGestures>, } let bytes = std::fs::read(path)?; let config_file: ConfigFile = toml::from_slice(&bytes)?; for device_config in config_file.devices { let device = device_config.device; let device_gestures = config.devices.entry(device).or_default(); for gesture_action in device_config.gestures { device_gestures.insert(gesture_action.gesture, gesture_action.action); } } Ok(()) }
h: &Path) { if let Err(e) = load_config_file(config, &path) { eprintln!( "Error loading configuration file at {}: {}", path.display(), e ); } }
function_block-function_prefixed
[ { "content": "fn which(target: &str) -> Option<String> {\n\n let mut cmd = Command::new(\"which\");\n\n cmd.stdout(Stdio::piped());\n\n cmd.stderr(Stdio::null());\n\n cmd.args(&[target]);\n\n let output = match cmd.output() {\n\n Err(_) => {\n\n warn!(\"Failed to find/execute `which`\");\n\n return None;\n\n }\n\n Ok(output) => output,\n\n };\n\n\n\n if output.status.success() {\n\n let result = match String::from_utf8(output.stdout) {\n\n Ok(result) => result,\n\n Err(_) => {\n\n warn!(\"Path to {} cannot be converted to a UTF-8 string!\", target);\n\n return None;\n\n }\n", "file_path": "src/main.rs", "rank": 6, "score": 89960.4909390456 }, { "content": "fn swipe_handler(gestures: &config::GestureMap, gesture: Gesture) {\n\n info!(\"{:?}\", gesture);\n\n\n\n let action = match gestures.get(&gesture) {\n\n Some(action) => action,\n\n None => return,\n\n };\n\n\n\n match action {\n\n &Action::None => {}\n\n &Action::Execute(ref cmd) => {\n\n let mut shell = Command::new(\"sh\");\n\n shell.args(&[\"-c\", cmd]);\n\n let mut child = match shell.spawn() {\n\n Ok(child) => child,\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n return;\n\n }\n\n };\n\n\n\n // Spawn a thread to wait on the process to finish executing.\n\n // TODO: Just have one thread wait on all launched processes.\n\n std::thread::spawn(move || {\n\n let _ = child.wait();\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 61364.92448043797 }, { "content": "#[allow(non_camel_case_types, unused)]\n\n#[derive(Deserialize_repr, Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\nenum EventType {\n\n /// Unknown\n\n EV_SYN = 0,\n\n EV_KEY = 1,\n\n /// Absolute value pertaining to touchpad state (independent variable)\n\n EV_ABS = 3,\n\n}\n\n\n\n// Until it's proven that the different namespaces can collide (e.g. ABS_* and BTN_* sharing\n\n// values), just keep them in one enum for our own sanity.\n", "file_path": "src/events.rs", "rank": 9, "score": 58325.75102824673 }, { "content": "#[allow(non_camel_case_types, unused)]\n\n#[derive(Deserialize_repr, Clone, Copy, Debug, PartialEq, PartialOrd)]\n\n#[repr(u16)]\n\nenum EventCode {\n\n // Absolute Events (reported per-tool)\n\n /// The overall x location, not differentiated by slot.\n\n ABS_X = 0,\n\n /// The overall y location, not differentiated by slot.\n\n ABS_Y = 1,\n\n /// The overall pressure, not differentiated by slot.\n\n ABS_PRESSURE = 24,\n\n /// The slot identifier\n\n ABS_MT_SLOT = 47,\n\n /// The per-tool x location\n\n ABS_MT_POSITION_X = 53,\n\n /// The per-tool y location\n\n ABS_MT_POSITION_Y = 54,\n\n /// The id of the tool being tracked in this slot\n\n ABS_MT_TRACKING_ID = 57,\n\n /// The per-tool pressure\n\n ABS_MT_PRESSURE = 58,\n\n\n\n // Key Events (reported globally)\n", "file_path": "src/events.rs", "rank": 11, "score": 38536.20848506745 }, { "content": "#[derive(Debug, Default)]\n\nstruct Position {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 12, "score": 38438.804494406555 }, { "content": "#[derive(Debug, Default)]\n\nstruct SlotState {\n\n pub complete: bool,\n\n pub tool_id: Option<i32>,\n\n pub last_ts: f64,\n\n pub start_xy: Option<Position>,\n\n pub end_xy: Option<Position>,\n\n}\n\n\n\nimpl SlotState {\n\n pub fn push_position(&mut self, x: i32, y: i32) {\n\n if self.start_xy.is_none() {\n\n self.start_xy = Some(pos(x, y));\n\n } else {\n\n self.end_xy = Some(pos(x, y));\n\n }\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn distance(&self) -> Option<f64> {\n\n if let (Some(start_xy), Some(end_xy)) = (&self.start_xy, &self.end_xy) {\n", "file_path": "src/events.rs", "rank": 13, "score": 37252.242487634954 }, { "content": "#[derive(Debug, Default)]\n\nstruct SynReport {\n\n events: Vec<SynEvent>,\n\n}\n\n\n\n/// A result derived from one or more [`SynReport`] instances in a stream.\n\n#[derive(Deserialize)]\n\n#[serde(tag = \"type\")]\n\n#[serde(rename_all = \"lowercase\")]\n\n#[derive(Debug, PartialEq, PartialOrd, Eq, Ord)]\n\npub(crate) enum Gesture {\n\n Tap {\n\n fingers: Fingers,\n\n },\n\n Swipe {\n\n fingers: Fingers,\n\n direction: Direction,\n\n },\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 14, "score": 37252.242487634954 }, { "content": "#[derive(Debug, Default)]\n\nstruct TouchpadState {\n\n pub slot_states: [Option<SlotState>; MAX_SLOTS],\n\n pub start_xy: Option<Position>,\n\n pub end_xy: Option<Position>,\n\n pub last_ts: f64,\n\n pub last_gesture_time: f64,\n\n pub max_fingers: Option<Fingers>,\n\n pub last_finger: Option<Fingers>,\n\n pub finger_start: Option<f64>,\n\n pub one_finger_duration: f64,\n\n pub two_finger_duration: f64,\n\n pub three_finger_duration: f64,\n\n pub four_finger_duration: f64,\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 15, "score": 37252.242487634954 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct SynEvent {\n\n time: f64,\n\n evt_type: EventType,\n\n code: EventCode,\n\n value: i32,\n\n}\n\n\n\n/// A grouping of [`SynEvent`] objects that arrive together in one report.\n\n/// Each individual `SynEvent` still has its own timestamp.\n", "file_path": "src/events.rs", "rank": 16, "score": 37249.78680893319 }, { "content": "fn main() {\n\n let config = config::load();\n\n\n\n if which(\"evtest\").is_none() {\n\n eprintln!(\"Cannot find `evtest` - make sure it is installed and try again!\");\n\n std::process::exit(-1);\n\n }\n\n\n\n if config.devices.is_empty() {\n\n eprintln!(\"No configured devices\");\n\n std::process::exit(-1);\n\n }\n\n\n\n // Event: time 1593656931.323635, type 3 (EV_ABS), code 47 (ABS_MT_SLOT), value 0\n\n let event_regex = std::sync::Arc::new(\n\n Regex::new(r#\"time (\\d+\\.\\d+), type (\\d+) .* code (\\d+) .* value (\\d+)\"#).unwrap(),\n\n );\n\n\n\n let mut threads = Vec::new();\n\n for (device, gestures) in config.devices {\n", "file_path": "src/main.rs", "rank": 17, "score": 34572.0169077996 }, { "content": "fn pos(x: i32, y: i32) -> Position {\n\n Position { x, y }\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 18, "score": 26516.583740599435 }, { "content": "fn get_direction(pos1: &Position, pos2: &Position) -> Direction {\n\n // It's much easier to scroll side-to-side than up-down, so include a bias\n\n if (pos2.x - pos1.x).abs() > ((1.05f64 * (pos2.y - pos1.y) as f64) as i32).abs() {\n\n // Interpret as movement along the x-axis only\n\n if pos2.x > pos1.x {\n\n Direction::Right\n\n } else {\n\n Direction::Left\n\n }\n\n } else {\n\n // Interpret as movement along the y-axis only\n\n if pos2.y > pos1.y {\n\n Direction::Down\n\n } else {\n\n Direction::Up\n\n }\n\n }\n\n}\n\n\n\n/// A multitouch trackpad driver tracks the location of each tool (read: finger) in a separate\n\n/// slot, and reports on all of them simultaneously. Each tool is independently tracked and does\n\n/// not affect the state of any other tool/slot.\n\n///\n\n/// `TouchpadState` tracks the status of all slots.\n", "file_path": "src/events.rs", "rank": 19, "score": 24269.78332982132 }, { "content": "/// Returns the Euclidean distance between two positions\n\nfn get_distance(pos1: &Position, pos2: &Position) -> f64 {\n\n (((pos2.x - pos1.x).pow(2) + (pos2.y - pos1.y).pow(2)) as f64).sqrt()\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 20, "score": 24269.78332982132 }, { "content": " };\n\n return Some(result);\n\n }\n\n\n\n return None;\n\n}\n\n\n\n// fn xdotool(command: &'static str, actions: &'static str) {\n\n// use std::thread;\n\n//\n\n// thread::spawn(move || {\n\n// Command::new(\"xdotool\")\n\n// .args(&[command, actions])\n\n// .output()\n\n// .expect(\"Failed to run xdotool\");\n\n// });\n\n// }\n", "file_path": "src/main.rs", "rank": 26, "score": 9.814947354329735 }, { "content": "## Installation\n\n\n\nsyngesture is written in rust and has no system build dependencies/requirements. It can be compiled\n\nand installed by checking out a copy of the source code and building with `cargo`, the rust package\n\nmanager:\n\n\n\n```\n\ngit clone https://github.com/mqudsi/syngesture.git\n\ncd syngesture\n\ncargo install --path .\n\n```\n\n\n\nalternatively, it may be installed directly via cargo:\n\n\n\n```\n\ncargo install syngesture\n\n```\n\n\n\n## Configuration\n\n\n\nsyngesture is configured via one or more TOML configuration files, a sample file [is included in this\n\nrepository](./syngestures.toml). Configuration files may be installed at a machine level to\n\n`/usr/local/etc/syngestures.toml` or with multiple per-device configuration files installed to\n\n`/usr/local/etc/syngestures.d/*.toml`, or at a user level with a configuration file at\n\n`$HOME/.config/syngestures.toml` or multiple per-device configuration files installed to\n\n`$HOME/.config/syngestures.d/*.toml`. Multiple files are supported and concatenated with user\n\nconfiguration files overriding the system configuration file.\n\n\n\nThe basic format of the configuration file is as follows, with a `[[device]]` node per input device\n\nimplementing the MT protocol:\n\n\n\n```toml\n\n[[device]]\n\ndevice = \"/dev/input/by-path/pci-0000:00:15.0-platform-i2c_designware.0-event-mouse\"\n\ngestures = [\n\n\t# Navigate next\n\n\t{ type = \"swipe\", direction = \"right\", fingers = 3, execute = \"xdotool key alt+Right\" },\n\n\t# Navigate previous\n\n\t{ type = \"swipe\", direction = \"left\", fingers = 3, execute = \"xdotool key alt+Left\" },\n\n\t# Next desktop/workspace\n\n\t{ type = \"swipe\", direction = \"right\", fingers = 4, execute = \"xdotool key Super_L+Right\" },\n\n\t# Previous desktop/workspace\n\n\t{ type = \"swipe\", direction = \"left\", fingers = 4, execute = \"xdotool key Super_L+Left\" },\n\n]\n\n```\n\n\n\nThe value of `device` should be a stable path to your touchpad, it can often be found by looking at\n\nthe output of `dmesg`. Wayland users may substitute the usage of `xdotool` for whatever alternative\n\nsupports their display server/compositor/window manager.\n\n\n\nThe value of each gesture's `type` may be either `swipe` or `tap`; a numeric `fingers` parameter\n\nfrom `1` to `5` is required in both cases, but an additional `direction` (being one of `right`,\n\n`left`, `up`, or `down`) is required in case of `swipe`.\n\n\n", "file_path": "README.md", "rank": 27, "score": 9.111856461891257 }, { "content": " Some(get_distance(start_xy, end_xy))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn direction(&self) -> Option<Direction> {\n\n if let (Some(start_xy), Some(end_xy)) = (&self.start_xy, &self.end_xy) {\n\n Some(get_direction(start_xy, end_xy))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl TouchpadState {\n\n pub fn reset(&mut self) {\n\n debug!(\"***RESET***\");\n\n self.slot_states = Default::default();\n", "file_path": "src/events.rs", "rank": 28, "score": 8.478589362540767 }, { "content": "use log::{debug, trace};\n\nuse serde::Deserialize;\n\nuse serde_repr::*;\n\n\n\n/// The maximum travel before a tap is considered a swipe, in millimeters.\n\nconst MAX_TAP_DISTANCE: f64 = 100f64;\n\n/// The maximum number of tools (fingers) that are tracked and reported on simultaneously.\n\nconst MAX_SLOTS: usize = 5;\n\n/// How long before the event state resets\n\nconst EVENT_TIMEOUT: f64 = 10_593_665_152f64;\n\n/// A new gesture (note: not a new report) will not be entertained in this timespan.\n\nconst DEBOUNCE_TIME: f64 = 0.2f64;\n\n\n\npub(crate) struct EventLoop {\n\n report: SynReport,\n\n state: TouchpadState,\n\n}\n\n\n\nimpl EventLoop {\n\n pub fn new() -> Self {\n", "file_path": "src/events.rs", "rank": 29, "score": 7.941897978248666 }, { "content": " let prev_finger_start = self.finger_start;\n\n let mut slot = &mut self.slot_states[0];\n\n #[allow(unused_assignments)]\n\n let mut slot_id = 0usize;\n\n // A slot id is only specified if more than one tool is detected.\n\n if slot.is_none() {\n\n *slot = Some(Default::default());\n\n }\n\n let mut slot_x = None;\n\n let mut slot_y = None;\n\n for event in &report.events {\n\n if event.time - self.last_ts >= EVENT_TIMEOUT {\n\n reset = true;\n\n break;\n\n }\n\n self.last_ts = event.time;\n\n\n\n match (&event.evt_type, &event.code) {\n\n (EventType::EV_ABS, EventCode::ABS_X) => {\n\n // Overall location, regardless of tool\n", "file_path": "src/events.rs", "rank": 30, "score": 7.903303010687654 }, { "content": " Self {\n\n report: Default::default(),\n\n state: Default::default(),\n\n }\n\n }\n\n\n\n pub fn add_event(&mut self, time: f64, event_type: u8, event_code: u16, event_value: i32) {\n\n let event_type: EventType = match toml::Value::Integer(event_type as i64).try_into() {\n\n Ok(value) => value,\n\n Err(_) => {\n\n trace!(\"Unsupported event_type {}\", event_type);\n\n return;\n\n }\n\n };\n\n let event_code: EventCode = match toml::Value::Integer(event_code as i64).try_into() {\n\n Ok(value) => value,\n\n Err(_) => {\n\n trace!(\"Unsupported event_code {}\", event_code);\n\n return;\n\n }\n", "file_path": "src/events.rs", "rank": 31, "score": 7.380458511363214 }, { "content": "mod config;\n\nmod events;\n\n\n\nuse config::Action;\n\nuse events::{EventLoop, Gesture};\n\nuse log::{info, trace, warn};\n\nuse regex::Regex;\n\nuse std::io::{BufRead, BufReader};\n\nuse std::process::{Command, Stdio};\n\n\n", "file_path": "src/main.rs", "rank": 32, "score": 6.699493376443318 }, { "content": " };\n\n\n\n self.report.events.push(SynEvent {\n\n time,\n\n evt_type: event_type,\n\n code: event_code,\n\n value: event_value,\n\n });\n\n }\n\n\n\n pub fn update(&mut self) -> Option<Gesture> {\n\n // eprintln!(\"Processing report with {} events\", self.report.events.len());\n\n let result = self.state.update(&mut self.report);\n\n self.report.events.clear();\n\n return result;\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types, unused)]\n\n#[derive(Deserialize_repr, Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n", "file_path": "src/events.rs", "rank": 33, "score": 6.2781483700502045 }, { "content": " self.start_xy = None;\n\n self.end_xy = None;\n\n // self.last_gesture_time should not be reset!\n\n // self.last_gesture_time = 0f64;\n\n self.max_fingers = None;\n\n self.last_finger = None;\n\n self.finger_start = None;\n\n self.one_finger_duration = 0f64;\n\n self.two_finger_duration = 0f64;\n\n self.three_finger_duration = 0f64;\n\n self.four_finger_duration = 0f64;\n\n }\n\n\n\n pub fn update(&mut self, report: &mut SynReport) -> Option<Gesture> {\n\n let mut reset = false;\n\n let mut overall_x = None;\n\n let mut overall_y = None;\n\n\n\n // Loop over events and handle each slot separately\n\n {\n", "file_path": "src/events.rs", "rank": 34, "score": 6.040337797252299 }, { "content": " overall_x = Some(event.value);\n\n }\n\n (EventType::EV_ABS, EventCode::ABS_Y) => {\n\n // Overall location, regardless of tool\n\n overall_y = Some(event.value);\n\n }\n\n (EventType::EV_ABS, EventCode::ABS_MT_SLOT) => {\n\n // This just tells us we're using a multitouch-capable trackpad and the\n\n // id of the slot that contains information about the tool (finger) being\n\n // tracked.\n\n slot_id = event.value as usize;\n\n self.slot_states[slot_id] = Some(Default::default());\n\n slot = &mut self.slot_states[slot_id];\n\n }\n\n (EventType::EV_ABS, EventCode::ABS_MT_POSITION_X) => {\n\n slot_x = Some(event.value);\n\n if slot_y.is_some() {\n\n slot.as_mut()\n\n .unwrap()\n\n .push_position(slot_x.take().unwrap(), slot_y.take().unwrap());\n", "file_path": "src/events.rs", "rank": 35, "score": 5.981962594111981 }, { "content": " pub fn push_position(&mut self, x: i32, y: i32) {\n\n if self.start_xy.is_none() {\n\n self.start_xy = Some(pos(x, y));\n\n } else {\n\n self.end_xy = Some(pos(x, y));\n\n }\n\n }\n\n\n\n fn process(&mut self) -> Option<Gesture> {\n\n if self.start_xy.is_none() {\n\n debug!(\"Received report but indeterminate start\");\n\n return None;\n\n }\n\n\n\n // What if we always assume that the maximum number of fingers detected\n\n // was the intended click?\n\n let fingers = match self.max_fingers {\n\n Some(finger) => finger,\n\n None => {\n\n debug!(\"Received report without any tools detected\");\n", "file_path": "src/events.rs", "rank": 36, "score": 5.4218601351171865 }, { "content": "\n\n#[repr(u8)]\n\n#[derive(Deserialize_repr, Clone, Debug, PartialEq, PartialOrd, Copy, Eq, Ord)]\n\npub(crate) enum Fingers {\n\n One = 1,\n\n Two = 2,\n\n Three = 3,\n\n Four = 4,\n\n}\n\n\n\n// Used to abstract away the event source. In the future, we can migrate from\n\n// using evtest to reading from the input device directly.\n", "file_path": "src/events.rs", "rank": 37, "score": 5.121680098199125 }, { "content": " let event_regex = event_regex.clone();\n\n let handle = std::thread::spawn(move || {\n\n let mut event_loop = EventLoop::new();\n\n\n\n let evtest = Command::new(\"evtest\")\n\n .args(&[&device])\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::inherit())\n\n .spawn()\n\n .unwrap();\n\n\n\n let reader = BufReader::new(evtest.stdout.unwrap());\n\n for line in reader.lines() {\n\n let line = match line {\n\n Ok(line) => line,\n\n Err(_) => break,\n\n };\n\n\n\n // Event: time 1593656931.306879, -------------- SYN_REPORT ------------\n\n if line.contains(\"SYN_REPORT\") {\n", "file_path": "src/main.rs", "rank": 38, "score": 4.540052121363546 }, { "content": "# Syngestures: Linux Multi-Touch Protocol Userland Daemon\n\n\n\nsyngestures is a utility providing multi-gesture support for various Linux touchpad/trackpad drivers\n\nimplementing the [Linux Multi-Touch\n\nProtocol](https://www.kernel.org/doc/Documentation/input/multi-touch-protocol.txt), such as\n\n`xf86-input-synaptics`. [Read more about syngesture, the impetus for its development, and how it fits\n\ninto the X11/Wayland evdev/libinput ecosystem in the release\n\nannouncement](http://neosmart.net/blog/2020/multi-touch-gestures-on-linux/)\n\n\n\n## Purpose and Design\n\n\n\nsyngestures is a daemon (background application) that listens for input events generated by your\n\ntouchpad or trackpad and detects when multi-touch gestures are performed. It can be configured\n\n(globally or on a per-user level) to carry out user-defined actions when specific gestures are\n\nrecognized (with support for unique configurations per-device if you have multiple touchpads\n\ninstalled).\n\n\n\nIt may be used alone or, more commonly, in conjunction with desktop environment/display server\n\nintegration/driver - we recommend using it with `xf86-input-synaptics` under X11 for the most\n\nresponsive and \"natural\" cursor movement and acceleration.\n\n\n\n## Dependencies\n\n\n\nsyngestures currently has a runtime dependency on the `evtest` binary that listens for and reports\n\nevents generated by input devices. `evtest` is often directly installable via your distribution's\n\npackage manager, e.g. `sudo apt install evtest`.\n\n\n\n### Security Considerations\n\n\n\nDepending on your system configuration and [at least until the `evtest` dependency is\n\ndropped](https://github.com/mqudsi/syngesture/issues/1), you may need to set the sticky bit on\n\n`evtest` to allow non-root users to invoke `evtest` without requiring the calling process to also be\n\nelevated. Since `syngestures` currently allows running arbitrary commands in response to multi-touch\n\ngestures via its configuration files, it is not recommended to work around permission errors by\n\ninvoking this application as `sudo syngestures`.\n\n\n", "file_path": "README.md", "rank": 39, "score": 4.2192809842439125 }, { "content": " );\n\n self.three_finger_duration += event.time - prev_finger_start.unwrap();\n\n }\n\n self.last_finger = None;\n\n }\n\n (EventType::EV_KEY, EventCode::BTN_TOOL_QUADTAP) if event.value == 0 => {\n\n if prev_finger_start.is_some() {\n\n debug!(\n\n \"four finger remove {}\",\n\n event.time - prev_finger_start.unwrap()\n\n );\n\n self.four_finger_duration += event.time - prev_finger_start.unwrap();\n\n }\n\n self.last_finger = None;\n\n }\n\n\n\n // Tracking complete event\n\n (EventType::EV_ABS, EventCode::ABS_MT_TRACKING_ID) if event.value == -1 => {\n\n slot.as_mut().unwrap().complete = true;\n\n }\n", "file_path": "src/events.rs", "rank": 40, "score": 4.143238719936747 }, { "content": " );\n\n self.one_finger_duration += event.time - prev_finger_start.unwrap();\n\n }\n\n self.last_finger = None;\n\n }\n\n (EventType::EV_KEY, EventCode::BTN_TOOL_DOUBLETAP) if event.value == 0 => {\n\n if prev_finger_start.is_some() {\n\n debug!(\n\n \"two finger remove {}\",\n\n event.time - prev_finger_start.unwrap()\n\n );\n\n self.two_finger_duration += event.time - prev_finger_start.unwrap();\n\n }\n\n self.last_finger = None;\n\n }\n\n (EventType::EV_KEY, EventCode::BTN_TOOL_TRIPLETAP) if event.value == 0 => {\n\n if prev_finger_start.is_some() {\n\n debug!(\n\n \"three finger remove {}\",\n\n event.time - prev_finger_start.unwrap()\n", "file_path": "src/events.rs", "rank": 41, "score": 3.0442488759761024 }, { "content": " }\n\n }\n\n (EventType::EV_ABS, EventCode::ABS_MT_POSITION_Y) => {\n\n slot_y = Some(event.value);\n\n if slot_x.is_some() {\n\n slot.as_mut()\n\n .unwrap()\n\n .push_position(slot_x.take().unwrap(), slot_y.take().unwrap());\n\n }\n\n }\n\n\n\n // Finger state applied\n\n (EventType::EV_KEY, EventCode::BTN_TOOL_FINGER) if event.value == 1 => {\n\n debug!(\"one finger press\");\n\n self.finger_start = Some(event.time);\n\n self.last_finger.replace(Fingers::One);\n\n }\n\n (EventType::EV_KEY, EventCode::BTN_TOOL_DOUBLETAP) if event.value == 1 => {\n\n debug!(\"two finger press\");\n\n self.finger_start = Some(event.time);\n", "file_path": "src/events.rs", "rank": 42, "score": 2.8474473902888104 }, { "content": "\n\n // Catch-all\n\n _ => {}\n\n };\n\n }\n\n }\n\n\n\n if reset {\n\n self.reset();\n\n return None;\n\n }\n\n\n\n if self.max_fingers.is_none() || self.last_finger > self.max_fingers {\n\n // Reset start position because everything until now was presumably building to this\n\n self.start_xy = None;\n\n self.max_fingers = self.last_finger;\n\n }\n\n\n\n if let (Some(x), Some(y)) = (overall_x.take(), overall_y.take()) {\n\n // We always consider a decrease in tool count to be a tear-down and ignore the change\n", "file_path": "src/events.rs", "rank": 43, "score": 2.2805643219607528 }, { "content": " // in position.\n\n if self.max_fingers == self.last_finger {\n\n self.push_position(x, y);\n\n } else {\n\n debug!(\"Position ignored\");\n\n }\n\n }\n\n\n\n if self.last_finger.is_none() {\n\n if let Some(gesture) = self.process() {\n\n self.reset();\n\n return Some(gesture);\n\n }\n\n } else {\n\n debug!(\"Remaining finger(s): {:?}\", self.last_finger);\n\n }\n\n\n\n return None;\n\n }\n\n\n", "file_path": "src/events.rs", "rank": 44, "score": 2.254834234673909 }, { "content": " Some(Gesture::Swipe {\n\n fingers,\n\n direction: get_direction(\n\n self.start_xy.as_ref().unwrap(),\n\n self.end_xy.as_ref().unwrap(),\n\n ),\n\n })\n\n }\n\n } else {\n\n debug!(\"Gesture ignored by debounce\");\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/events.rs", "rank": 45, "score": 1.837646918323612 }, { "content": " return None;\n\n }\n\n };\n\n\n\n let distance = match &self.end_xy {\n\n Some(end_xy) => get_distance(self.start_xy.as_ref().unwrap(), &end_xy),\n\n None => 0f64,\n\n };\n\n\n\n debug!(\"Distance: {}\", distance);\n\n\n\n trace!(\"self.last_ts: {}\", self.last_ts);\n\n trace!(\"self.last_gesture_time: {}\", self.last_gesture_time);\n\n if self.last_ts - self.last_gesture_time > DEBOUNCE_TIME {\n\n self.last_gesture_time = self.last_ts;\n\n if distance < MAX_TAP_DISTANCE {\n\n debug!(\"tap detected\");\n\n Some(Gesture::Tap { fingers })\n\n } else {\n\n debug!(\"gesture detected\");\n", "file_path": "src/events.rs", "rank": 46, "score": 1.7352592297505314 }, { "content": " if let Some(gesture) = event_loop.update() {\n\n swipe_handler(&gestures, gesture);\n\n }\n\n continue;\n\n }\n\n\n\n if let Some(captures) = event_regex.captures(&line) {\n\n let time: f64 = captures[1].parse().unwrap();\n\n let event_type: u8 = captures[2].parse().unwrap();\n\n let code: u16 = captures[3].parse().unwrap();\n\n let value: i32 = captures[4].parse().unwrap();\n\n\n\n trace!(\"{}\", line);\n\n event_loop.add_event(time, event_type, code, value);\n\n }\n\n }\n\n });\n\n threads.push(handle);\n\n }\n\n\n\n for thread in threads {\n\n thread.join().unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 47, "score": 1.716299199145183 }, { "content": " self.last_finger.replace(Fingers::Two);\n\n }\n\n (EventType::EV_KEY, EventCode::BTN_TOOL_TRIPLETAP) if event.value == 1 => {\n\n debug!(\"three finger press\");\n\n self.finger_start = Some(event.time);\n\n self.last_finger.replace(Fingers::Three);\n\n }\n\n (EventType::EV_KEY, EventCode::BTN_TOOL_QUADTAP) if event.value == 1 => {\n\n debug!(\"four finger press\");\n\n self.finger_start = Some(event.time);\n\n self.last_finger.replace(Fingers::Four);\n\n }\n\n\n\n // Finger state removed\n\n // Assuming we never miss an event, the finger should always have started\n\n (EventType::EV_KEY, EventCode::BTN_TOOL_FINGER) if event.value == 0 => {\n\n if prev_finger_start.is_some() {\n\n debug!(\n\n \"one finger remove {}\",\n\n event.time - prev_finger_start.unwrap()\n", "file_path": "src/events.rs", "rank": 48, "score": 1.7099621166941241 }, { "content": " BTN_LEFT = 272,\n\n BTN_TOOL_FINGER = 325,\n\n BTN_TOUCH = 330,\n\n BTN_TOOL_DOUBLETAP = 333,\n\n BTN_TOOL_TRIPLETAP = 334,\n\n BTN_TOOL_QUADTAP = 335,\n\n BTN_TOOL_QUINTTAP = 328,\n\n}\n\n\n\n#[derive(Deserialize, Debug, PartialEq, PartialOrd, Eq, Ord)]\n\npub(crate) enum Direction {\n\n #[serde(alias = \"up\")]\n\n Up,\n\n #[serde(alias = \"down\")]\n\n Down,\n\n #[serde(alias = \"left\")]\n\n Left,\n\n #[serde(alias = \"right\")]\n\n Right,\n\n}\n", "file_path": "src/events.rs", "rank": 49, "score": 1.3309636323163057 } ]
Rust
src/monadio.rs
TeaEntityLab/fpRust
5381203f823c3b0d0080d7070022379bbd525c02
/*! In this module there're implementations & tests of `MonadIO`. It's inspired by `Rx` & `MonadIO` in `Haskell` */ use std::sync::{Arc, Mutex}; #[cfg(feature = "for_futures")] use super::common::shared_thread_pool; #[cfg(feature = "for_futures")] use crate::futures::task::SpawnExt; #[cfg(feature = "for_futures")] use std::error::Error; use super::handler::Handler; use super::sync::CountDownLatch; use super::common::{RawFunc, Subscription, SubscriptionFunc}; /** `MonadIO` implements basic `Rx`/`MonadIO` APIs. The `observe` and `subscribe` actions could be sync/async, and `observe` & `subscribe` could be on other `thread`s (by setting up `observe_on` and `subscribe_on`). # Arguments * `Y` - The generic type of data # Remarks It's inspired by `Rx` & `MonadIO` in `Haskell` , and easily run it on sync/async scenaios. `` */ #[derive(Clone)] pub struct MonadIO<Y> { effect: Arc<Mutex<dyn FnMut() -> Y + Send + Sync + 'static>>, ob_handler: Option<Arc<Mutex<dyn Handler>>>, sub_handler: Option<Arc<Mutex<dyn Handler>>>, } pub fn of<Z: 'static + Send + Sync + Clone>(r: Z) -> impl FnMut() -> Z + Send + Sync + 'static { let _r = Box::new(r); move || *_r.clone() } impl<Y: 'static + Send + Sync + Clone> From<Y> for MonadIO<Y> { fn from(r: Y) -> Self { MonadIO::just(r) } } impl<Y: 'static + Send + Sync + Clone> MonadIO<Y> { pub fn just(r: Y) -> MonadIO<Y> { MonadIO::new(of(r)) } #[cfg(feature = "for_futures")] pub async fn to_future(&self) -> Result<Arc<Y>, Box<dyn Error>> { let mio = self.clone(); let future = { shared_thread_pool() .inner .lock() .unwrap() .spawn_with_handle(async move { mio.eval() })? }; let result = future.await; Ok(result) } } impl<Y: 'static + Send + Sync> MonadIO<Y> { pub fn new(effect: impl FnMut() -> Y + Send + Sync + 'static) -> MonadIO<Y> { MonadIO::new_with_handlers(effect, None, None) } pub fn new_with_handlers( effect: impl FnMut() -> Y + Send + Sync + 'static, ob: Option<Arc<Mutex<dyn Handler + 'static>>>, sub: Option<Arc<Mutex<dyn Handler + 'static>>>, ) -> MonadIO<Y> { MonadIO { effect: Arc::new(Mutex::new(effect)), ob_handler: ob, sub_handler: sub, } } pub fn observe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) { self.ob_handler = h; } pub fn subscribe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) { self.sub_handler = h; } pub fn map<Z: 'static + Send + Sync + Clone>( &self, func: impl FnMut(Y) -> Z + Send + Sync + 'static, ) -> MonadIO<Z> { let _func = Arc::new(Mutex::new(func)); let mut _effect = self.effect.clone(); MonadIO::new_with_handlers( move || (_func.lock().unwrap())((_effect.lock().unwrap())()), self.ob_handler.clone(), self.sub_handler.clone(), ) } pub fn fmap<Z: 'static + Send + Sync + Clone>( &self, func: impl FnMut(Y) -> MonadIO<Z> + Send + Sync + 'static, ) -> MonadIO<Z> { let mut _func = Arc::new(Mutex::new(func)); self.map(move |y: Y| ((_func.lock().unwrap())(y).effect.lock().unwrap())()) } pub fn subscribe(&self, s: Arc<impl Subscription<Y>>) { let mut _effect = self.effect.clone(); match &self.ob_handler { Some(ob_handler) => { let mut sub_handler_thread = Arc::new(self.sub_handler.clone()); ob_handler.lock().unwrap().post(RawFunc::new(move || { match Arc::make_mut(&mut sub_handler_thread) { Some(ref mut sub_handler) => { let effect = _effect.clone(); let s = s.clone(); sub_handler.lock().unwrap().post(RawFunc::new(move || { let result = { Arc::new(effect.lock().unwrap()()) }; s.on_next(result); })); } None => { s.on_next(Arc::new(_effect.lock().unwrap()())); } } })); } None => { s.on_next(Arc::new(_effect.lock().unwrap()())); } } } pub fn subscribe_fn(&self, func: impl FnMut(Arc<Y>) + Send + Sync + 'static) { self.subscribe(Arc::new(SubscriptionFunc::new(func))) } pub fn eval(&self) -> Arc<Y> { let latch = CountDownLatch::new(1); let latch_thread = latch.clone(); let result = Arc::new(Mutex::new(None::<Arc<Y>>)); let result_thread = result.clone(); self.subscribe_fn(move |y| { result_thread.lock().unwrap().replace(y); latch_thread.countdown(); }); latch.wait(); let result = result.lock().as_mut().unwrap().to_owned(); result.unwrap() } } #[cfg(feature = "for_futures")] #[futures_test::test] async fn test_monadio_async() { assert_eq!(Arc::new(3), MonadIO::just(3).eval()); assert_eq!( Arc::new(3), MonadIO::just(3).to_future().await.ok().unwrap() ); assert_eq!( Arc::new(6), MonadIO::just(3) .map(|i| i * 2) .to_future() .await .ok() .unwrap() ); } #[test] fn test_monadio_new() { use super::common::SubscriptionFunc; use super::handler::HandlerThread; use std::sync::Arc; use std::{thread, time}; use super::sync::CountDownLatch; let monadio_simple = MonadIO::just(3); { assert_eq!(3, (monadio_simple.effect.lock().unwrap())()); } let monadio_simple_map = monadio_simple.map(|x| x * 3); monadio_simple_map.subscribe_fn(move |x| { println!("monadio_simple_map {:?}", x); assert_eq!(9, *Arc::make_mut(&mut x.clone())); }); let mut _subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<u16>| { println!("monadio_sync {:?}", x); assert_eq!(36, *Arc::make_mut(&mut x.clone())); })); let subscription = _subscription.clone(); let monadio_sync = MonadIO::just(1) .fmap(|x| MonadIO::new(move || x * 4)) .map(|x| x * 3) .map(|x| x * 3); monadio_sync.subscribe(subscription); let mut _handler_observe_on = HandlerThread::new_with_mutex(); let mut _handler_subscribe_on = HandlerThread::new_with_mutex(); let monadio_async = MonadIO::new_with_handlers( || { println!("In string"); String::from("ok") }, Some(_handler_observe_on.clone()), Some(_handler_subscribe_on.clone()), ); let latch = CountDownLatch::new(1); let latch2 = latch.clone(); thread::sleep(time::Duration::from_millis(1)); let subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<String>| { println!("monadio_async {:?}", x); latch2.countdown(); })); monadio_async.subscribe(subscription); monadio_async.subscribe(Arc::new(SubscriptionFunc::new(move |x: Arc<String>| { println!("monadio_async sub2 {:?}", x); }))); { let mut handler_observe_on = _handler_observe_on.lock().unwrap(); let mut handler_subscribe_on = _handler_subscribe_on.lock().unwrap(); println!("hh2"); handler_observe_on.start(); handler_subscribe_on.start(); println!("hh2 running"); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); } thread::sleep(time::Duration::from_millis(1)); latch.clone().wait(); }
/*! In this module there're implementations & tests of `MonadIO`. It's inspired by `Rx` & `MonadIO` in `Haskell` */ use std::sync::{Arc, Mutex}; #[cfg(feature = "for_futures")] use super::common::shared_thread_pool; #[cfg(feature = "for_futures")] use crate::futures::task::SpawnExt; #[cfg(feature = "for_futures")] use std::error::Error; use super::handler::Handler; use super::sync::CountDownLatch; use super::common::{RawFunc, Subscription, SubscriptionFunc}; /** `MonadIO` implements basic `Rx`/`MonadIO` APIs. The `observe` and `subscribe` actions could be sync/async, and `observe` & `subscribe` could be on other `thread`s (by setting up `observe_on` and `subscribe_on`). # Arguments * `Y` - The generic type of data # Remarks It's inspired by `Rx` & `MonadIO` in `Haskell` , and easily run it on sync/async scenaios. `` */ #[derive(Clone)] pub struct MonadIO<Y> { effect: Arc<Mutex<dyn FnMut() -> Y + Send + Sync + 'static>>, ob_handler: Option<Arc<Mutex<dyn Handler>>>, sub_handler: Option<Arc<Mutex<dyn Handler>>>, } pub fn of<Z: 'static + Send + Sync + Clone>(r: Z) -> impl FnMut() -> Z + Send + Sync + 'static { let _r = Box::new(r); move || *_r.clone() } impl<Y: 'static + Send + Sync + Clone> From<Y> for MonadIO<Y> { fn from(r: Y) -> Self { MonadIO::just(r) } } impl<Y: 'static + Send + Sync + Clone> MonadIO<Y> { pub fn just(r: Y) -> MonadIO<Y> { MonadIO::new(of(r)) } #[cfg(feature = "for_futures")] pub async fn to_future(&self) -> Result<Arc<Y>, Box<dyn Error>> { let mio = self.clone(); let future = { shared_thread_pool() .inner .lock() .unwrap() .spawn_with_handle(async move { mio.eval() })? }; let result = future.await; Ok(result) } } impl<Y: 'static + Send + Sync> MonadIO<Y> { pub fn new(effect: impl FnMut() -> Y + Send + Sync + 'static) -> MonadIO<Y> { MonadIO::new_with_handlers(effect, None, None) } pub fn new_with_handlers( effect: impl FnMut() -> Y + Send + Sync + 'static, ob: Option<Arc<Mutex<dyn Handler + 'static>>>, sub: Option<Arc<Mutex<dyn Handler + 'static>>>, ) -> MonadIO<Y> { MonadIO { effect: Arc::new(Mutex::new(effect)), ob_handler: ob, sub_handler: sub, } } pub fn observe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) { self.ob_handler = h; } pub fn subscribe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) { self.sub_handler = h; }
pub fn fmap<Z: 'static + Send + Sync + Clone>( &self, func: impl FnMut(Y) -> MonadIO<Z> + Send + Sync + 'static, ) -> MonadIO<Z> { let mut _func = Arc::new(Mutex::new(func)); self.map(move |y: Y| ((_func.lock().unwrap())(y).effect.lock().unwrap())()) } pub fn subscribe(&self, s: Arc<impl Subscription<Y>>) { let mut _effect = self.effect.clone(); match &self.ob_handler { Some(ob_handler) => { let mut sub_handler_thread = Arc::new(self.sub_handler.clone()); ob_handler.lock().unwrap().post(RawFunc::new(move || { match Arc::make_mut(&mut sub_handler_thread) { Some(ref mut sub_handler) => { let effect = _effect.clone(); let s = s.clone(); sub_handler.lock().unwrap().post(RawFunc::new(move || { let result = { Arc::new(effect.lock().unwrap()()) }; s.on_next(result); })); } None => { s.on_next(Arc::new(_effect.lock().unwrap()())); } } })); } None => { s.on_next(Arc::new(_effect.lock().unwrap()())); } } } pub fn subscribe_fn(&self, func: impl FnMut(Arc<Y>) + Send + Sync + 'static) { self.subscribe(Arc::new(SubscriptionFunc::new(func))) } pub fn eval(&self) -> Arc<Y> { let latch = CountDownLatch::new(1); let latch_thread = latch.clone(); let result = Arc::new(Mutex::new(None::<Arc<Y>>)); let result_thread = result.clone(); self.subscribe_fn(move |y| { result_thread.lock().unwrap().replace(y); latch_thread.countdown(); }); latch.wait(); let result = result.lock().as_mut().unwrap().to_owned(); result.unwrap() } } #[cfg(feature = "for_futures")] #[futures_test::test] async fn test_monadio_async() { assert_eq!(Arc::new(3), MonadIO::just(3).eval()); assert_eq!( Arc::new(3), MonadIO::just(3).to_future().await.ok().unwrap() ); assert_eq!( Arc::new(6), MonadIO::just(3) .map(|i| i * 2) .to_future() .await .ok() .unwrap() ); } #[test] fn test_monadio_new() { use super::common::SubscriptionFunc; use super::handler::HandlerThread; use std::sync::Arc; use std::{thread, time}; use super::sync::CountDownLatch; let monadio_simple = MonadIO::just(3); { assert_eq!(3, (monadio_simple.effect.lock().unwrap())()); } let monadio_simple_map = monadio_simple.map(|x| x * 3); monadio_simple_map.subscribe_fn(move |x| { println!("monadio_simple_map {:?}", x); assert_eq!(9, *Arc::make_mut(&mut x.clone())); }); let mut _subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<u16>| { println!("monadio_sync {:?}", x); assert_eq!(36, *Arc::make_mut(&mut x.clone())); })); let subscription = _subscription.clone(); let monadio_sync = MonadIO::just(1) .fmap(|x| MonadIO::new(move || x * 4)) .map(|x| x * 3) .map(|x| x * 3); monadio_sync.subscribe(subscription); let mut _handler_observe_on = HandlerThread::new_with_mutex(); let mut _handler_subscribe_on = HandlerThread::new_with_mutex(); let monadio_async = MonadIO::new_with_handlers( || { println!("In string"); String::from("ok") }, Some(_handler_observe_on.clone()), Some(_handler_subscribe_on.clone()), ); let latch = CountDownLatch::new(1); let latch2 = latch.clone(); thread::sleep(time::Duration::from_millis(1)); let subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<String>| { println!("monadio_async {:?}", x); latch2.countdown(); })); monadio_async.subscribe(subscription); monadio_async.subscribe(Arc::new(SubscriptionFunc::new(move |x: Arc<String>| { println!("monadio_async sub2 {:?}", x); }))); { let mut handler_observe_on = _handler_observe_on.lock().unwrap(); let mut handler_subscribe_on = _handler_subscribe_on.lock().unwrap(); println!("hh2"); handler_observe_on.start(); handler_subscribe_on.start(); println!("hh2 running"); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); handler_observe_on.post(RawFunc::new(move || {})); } thread::sleep(time::Duration::from_millis(1)); latch.clone().wait(); }
pub fn map<Z: 'static + Send + Sync + Clone>( &self, func: impl FnMut(Y) -> Z + Send + Sync + 'static, ) -> MonadIO<Z> { let _func = Arc::new(Mutex::new(func)); let mut _effect = self.effect.clone(); MonadIO::new_with_handlers( move || (_func.lock().unwrap())((_effect.lock().unwrap())()), self.ob_handler.clone(), self.sub_handler.clone(), ) }
function_block-full_function
[ { "content": "pub trait Handler: Send + Sync + 'static {\n\n /**\n\n Did this `Handler` start?\n\n Return `true` when it did started (no matter it has stopped or not)\n\n\n\n */\n\n fn is_started(&mut self) -> bool;\n\n\n\n /**\n\n Is this `Handler` alive?\n\n Return `true` when it has started and not stopped yet.\n\n */\n\n fn is_alive(&mut self) -> bool;\n\n\n\n /**\n\n Start `Handler`.\n\n */\n\n fn start(&mut self);\n\n\n\n /**\n", "file_path": "src/handler.rs", "rank": 1, "score": 206654.75606180722 }, { "content": "pub trait Will<T>: Send + Sync + 'static {\n\n /**\n\n Did this `Will` start?\n\n Return `true` when it did started (no matter it has stopped or not)\n\n\n\n */\n\n fn is_started(&mut self) -> bool;\n\n\n\n /**\n\n Is this `Will` alive?\n\n Return `true` when it has started and not stopped yet.\n\n */\n\n fn is_alive(&mut self) -> bool;\n\n\n\n /**\n\n Start `Will`.\n\n */\n\n fn start(&mut self);\n\n\n\n /**\n", "file_path": "src/sync.rs", "rank": 2, "score": 157838.29212138115 }, { "content": "pub trait Subscription<X>: Send + Sync + 'static + UniqueId<String> {\n\n /**\n\n The callback when `Subscription` received the broadcasted value.\n\n\n\n # Arguments\n\n\n\n * `func` - The given `FnMut`.\n\n\n\n */\n\n fn on_next(&self, x: Arc<X>);\n\n}\n\n\n\n/**\n\n`UniqueId` trait defines the interface of an object with an unique id,\n\nfor general purposes crossing over many modules of fpRust.\n\n\n\n# Remarks\n\n\n\nThis is inspired by Java/Swift Hashable.\n\n\n\n*/\n", "file_path": "src/common.rs", "rank": 3, "score": 151527.6996067813 }, { "content": "#[derive(Clone)]\n\nstruct HandlerThreadInner {\n\n // this: Option<Arc<HandlerThreadInner>>,\n\n started: Arc<AtomicBool>,\n\n alive: Arc<AtomicBool>,\n\n q: Arc<fpSync::BlockingQueue<RawFunc>>,\n\n}\n\n\n\nimpl HandlerThreadInner {\n\n pub fn new() -> HandlerThreadInner {\n\n HandlerThreadInner {\n\n started: Arc::new(AtomicBool::new(false)),\n\n alive: Arc::new(AtomicBool::new(false)),\n\n q: Arc::new(<fpSync::BlockingQueue<RawFunc>>::new()),\n\n }\n\n }\n\n}\n\n\n\nimpl Handler for HandlerThreadInner {\n\n fn is_started(&mut self) -> bool {\n\n self.started.load(Ordering::SeqCst)\n", "file_path": "src/handler.rs", "rank": 4, "score": 136404.04622642018 }, { "content": "#[cfg(feature = \"for_futures\")]\n\npub fn shared_thread_pool() -> SharedThreadPoolReader {\n\n // Initialize it to a null value\n\n static mut SINGLETON: *const SharedThreadPoolReader = 0 as *const SharedThreadPoolReader;\n\n static ONCE: Once = Once::new();\n\n\n\n unsafe {\n\n ONCE.call_once(|| {\n\n // Make it\n\n let singleton = SharedThreadPoolReader {\n\n inner: Arc::new(Mutex::new(\n\n ThreadPool::new().expect(\"Unable to create threadpool\"),\n\n )),\n\n };\n\n\n\n // Put it in the heap so it can outlive this call\n\n SINGLETON = mem::transmute(Box::new(singleton));\n\n });\n\n\n\n // Now we give out a copy of the data that is safe to use concurrently.\n\n (*SINGLETON).clone()\n", "file_path": "src/common.rs", "rank": 5, "score": 119784.11017120612 }, { "content": "#[test]\n\nfn test_handler_new() {\n\n use super::sync::CountDownLatch;\n\n use std::time;\n\n\n\n let mut _h = HandlerThread::new_with_mutex();\n\n let mut h = _h.lock().unwrap();\n\n\n\n assert_eq!(false, h.is_alive());\n\n assert_eq!(false, h.is_started());\n\n\n\n h.stop();\n\n h.stop();\n\n assert_eq!(false, h.is_alive());\n\n assert_eq!(false, h.is_started());\n\n // let mut h1 = _h.clone();\n\n h.start();\n\n assert_eq!(true, h.is_alive());\n\n assert_eq!(true, h.is_started());\n\n\n\n let latch = CountDownLatch::new(1);\n", "file_path": "src/handler.rs", "rank": 7, "score": 101760.97234274652 }, { "content": "#[test]\n\nfn test_will_sync_new() {\n\n use std::thread;\n\n use std::time;\n\n\n\n let latch = CountDownLatch::new(1);\n\n let latch2 = latch.clone();\n\n let mut h = WillAsync::new(move || 1);\n\n assert_eq!(false, h.is_alive());\n\n assert_eq!(false, h.is_started());\n\n h.stop();\n\n h.stop();\n\n assert_eq!(false, h.is_alive());\n\n assert_eq!(false, h.is_started());\n\n h.add_callback(Arc::new(SubscriptionFunc::new(move |_v: Arc<i16>| {\n\n assert_eq!(1, *Arc::make_mut(&mut _v.clone()));\n\n latch2.countdown();\n\n })));\n\n h.start();\n\n latch.clone().wait();\n\n thread::sleep(time::Duration::from_millis(1));\n\n assert_eq!(false, h.is_alive());\n\n assert_eq!(true, h.is_started());\n\n assert_eq!(1, h.result().unwrap());\n\n}\n", "file_path": "src/sync.rs", "rank": 8, "score": 97383.3468593281 }, { "content": "#[inline]\n\npub fn filter<'r, T: 'r>(f: impl FnMut(&T) -> bool, v: Vec<T>) -> Vec<T> {\n\n v.into_iter().filter(f).into_iter().collect::<Vec<T>>()\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 9, "score": 95782.17010666036 }, { "content": "#[inline]\n\npub fn map<T, B>(f: impl FnMut(T) -> B, v: Vec<T>) -> Vec<B> {\n\n v.into_iter().map(f).collect::<Vec<B>>()\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 10, "score": 95376.88965181491 }, { "content": "#[test]\n\nfn test_maybe_unwrap() {\n\n assert_eq!(false, Maybe::just(None::<bool>).or(false));\n\n assert_eq!(true, Maybe::val(true).or(false));\n\n use std::panic;\n\n\n\n let none_unwrap = panic::catch_unwind(|| {\n\n Maybe::just(None::<bool>).unwrap();\n\n });\n\n assert_eq!(true, none_unwrap.is_err());\n\n assert_eq!(true, Maybe::val(true).unwrap());\n\n\n\n assert_eq!(\n\n true,\n\n match Maybe::val(true).option() {\n\n None => false,\n\n Some(_x) => true,\n\n }\n\n );\n\n assert_eq!(\n\n false,\n\n match Maybe::just(None::<bool>).option() {\n\n None => false,\n\n Some(_x) => true,\n\n }\n\n );\n\n}\n", "file_path": "src/maybe.rs", "rank": 11, "score": 93249.54609456196 }, { "content": "#[inline]\n\npub fn reduce<'r, T: 'r>(f: impl FnMut(T, T) -> T, v: Vec<T>) -> Option<T> {\n\n Reduce::reduce(v.into_iter(), f)\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 12, "score": 92558.19646841874 }, { "content": "#[inline]\n\npub fn foldl<T, B>(f: impl FnMut(B, T) -> B, initial: B, v: Vec<T>) -> B {\n\n v.into_iter().fold(initial, f)\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 13, "score": 89143.81324689848 }, { "content": "#[inline]\n\npub fn foldr<T, B>(f: impl FnMut(B, T) -> B, initial: B, v: Vec<T>) -> B {\n\n v.into_iter().rev().fold(initial, f)\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 14, "score": 89143.81324689848 }, { "content": "#[inline]\n\npub fn compose_two<A, B, C, G, F>(f: F, g: G) -> impl FnOnce(A) -> C\n\nwhere\n\n F: FnOnce(A) -> B,\n\n G: FnOnce(B) -> C,\n\n{\n\n move |x| g(f(x))\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 15, "score": 86042.48925708614 }, { "content": "pub trait Observable<X, T: Subscription<X>> {\n\n /**\n\n Add a `Subscription`.\n\n\n\n # Arguments\n\n\n\n * `observer` - The given `Subscription`.\n\n\n\n */\n\n fn add_observer(&mut self, observer: Arc<T>);\n\n\n\n /**\n\n Remove the observer.\n\n\n\n # Arguments\n\n\n\n * `observer` - The given `Subscription`.\n\n\n\n */\n\n fn delete_observer(&mut self, observer: Arc<T>);\n", "file_path": "src/common.rs", "rank": 16, "score": 80508.6648198418 }, { "content": "pub fn generate_id() -> String {\n\n let since_the_epoch = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Time went backwards\");\n\n\n\n format!(\"{:?}{:?}\", thread::current().id(), since_the_epoch)\n\n}\n\n\n\n/**\n\n`SubscriptionFunc` struct implements the interface of `Subscription`,\n\nfor general purposes crossing over many modules of fpRust.\n\n\n\n# Arguments\n\n\n\n* `T` - The generic type of broadcasted data\n\n\n\n# Remarks\n\n\n\nIt's enough to use for general cases of `Subscription`.\n\n\n", "file_path": "src/common.rs", "rank": 17, "score": 78237.12254339557 }, { "content": "#[test]\n\nfn test_contains() {\n\n assert_eq!(true, contains!(&4)(vec![1, 2, 3, 4]));\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 18, "score": 69914.27145290212 }, { "content": "#[test]\n\nfn test_compose() {\n\n let add = |x| x + 2;\n\n let multiply = |x| x * 3;\n\n let divide = |x| x / 2;\n\n\n\n let result = (compose!(add, multiply, divide))(10);\n\n assert_eq!(17, result);\n\n println!(\"Composed FnOnce Result is {}\", result);\n\n\n\n let result = (pipe!(add, multiply, divide))(10);\n\n assert_eq!(18, result);\n\n println!(\"Piped FnOnce Result is {}\", result);\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 19, "score": 69914.27145290212 }, { "content": "#[test]\n\nfn test_reverse() {\n\n assert_eq!(vec![4, 3, 2, 1], reverse!()(vec![1, 2, 3, 4]));\n\n}\n", "file_path": "src/fp.rs", "rank": 20, "score": 69914.27145290212 }, { "content": "#[test]\n\nfn test_cor_do_m() {\n\n let v = Arc::new(Mutex::new(String::from(\"\")));\n\n\n\n let _v = v.clone();\n\n do_m!(move |this| {\n\n println!(\"test_cor_do_m started\");\n\n\n\n let cor_inner1 = cor_newmutex_and_start!(\n\n |this| {\n\n let s = cor_yield!(this, Some(String::from(\"1\")));\n\n println!(\"cor_inner1 {:?}\", s);\n\n },\n\n String,\n\n i16\n\n );\n\n let cor_inner2 = cor_newmutex_and_start!(\n\n |this| {\n\n let s = cor_yield!(this, Some(String::from(\"2\")));\n\n println!(\"cor_inner2 {:?}\", s);\n\n },\n", "file_path": "src/cor.rs", "rank": 21, "score": 69914.27145290212 }, { "content": "type CorEffect<RETURN, RECEIVE> =\n\n dyn FnMut(Arc<Mutex<Cor<RETURN, RECEIVE>>>) + Send + Sync + 'static;\n\n\n\n/**\n\nThe `Cor` implements a *PythonicGenerator-like Coroutine*.\n\n\n\n# Arguments\n\n\n\n* `RETURN` - The generic type of returned data\n\n* `RECEIVE` - The generic type of received data\n\n\n\n# Remarks\n\n\n\nIt could be sync or async up to your usages,\n\nand it could use `yield_from` to send a value to another `Cor` object and get the response,\n\nand use `yield_ref`()/`yield_none`() to return my response to the callee of mine.\n\n\n\n*NOTE*: Beware the deadlock if it's sync(waiting for each other), except the entry point.\n\n\n\n*/\n", "file_path": "src/cor.rs", "rank": 22, "score": 69347.31156683978 }, { "content": "pub trait Queue<T> {\n\n fn offer(&mut self, v: T);\n\n fn poll(&mut self) -> Option<T>;\n\n fn put(&mut self, v: T);\n\n fn take(&mut self) -> Option<T>;\n\n}\n\n\n\n/**\n\n`BlockingQueue` implements `Queue` `trait` and provides `BlockingQueue` features.\n\n\n\n# Arguments\n\n\n\n* `T` - The generic type of data\n\n\n\n# Remarks\n\n\n\nIt's inspired by `BlockingQueue` in `Java`,\n\n, and easily use it on async scenaios.\n\n\n\n``\n", "file_path": "src/sync.rs", "rank": 23, "score": 67152.33996497828 }, { "content": "#[test]\n\nfn test_actor_common() {\n\n use std::time::Duration;\n\n\n\n use super::common::LinkedListAsync;\n\n\n\n #[derive(Clone, Debug)]\n\n enum Value {\n\n // Str(String),\n\n Int(i32),\n\n VecStr(Vec<String>),\n\n Spawn,\n\n Shutdown,\n\n }\n\n\n\n let result_i32 = LinkedListAsync::<i32>::new();\n\n let result_i32_thread = result_i32.clone();\n\n let result_string = LinkedListAsync::<Vec<String>>::new();\n\n let result_string_thread = result_string.clone();\n\n let mut root = ActorAsync::new(\n\n move |this: &mut ActorAsync<_, _>, msg: Value, context: &mut HashMap<String, Value>| {\n", "file_path": "src/actor.rs", "rank": 24, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_actor_ask() {\n\n use std::time::Duration;\n\n\n\n use super::common::LinkedListAsync;\n\n\n\n #[derive(Clone, Debug)]\n\n enum Value {\n\n AskIntByLinkedListAsync((i32, LinkedListAsync<i32>)),\n\n AskIntByBlockingQueue((i32, BlockingQueue<i32>)),\n\n }\n\n\n\n let mut root = ActorAsync::new(\n\n move |_: &mut ActorAsync<_, _>, msg: Value, _: &mut HashMap<String, Value>| match msg {\n\n Value::AskIntByLinkedListAsync(v) => {\n\n println!(\"Actor AskIntByLinkedListAsync\");\n\n v.1.push_back(v.0 * 10);\n\n }\n\n Value::AskIntByBlockingQueue(mut v) => {\n\n println!(\"Actor AskIntByBlockingQueue\");\n\n\n", "file_path": "src/actor.rs", "rank": 25, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_foldl_foldr() {\n\n // foldl!(f, initial)\n\n let result = (compose!(\n\n foldl!(\n\n |a, b| {\n\n if a < 4 {\n\n return a + b;\n\n }\n\n return a;\n\n },\n\n 0\n\n ),\n\n filter!(|x| *x < 6),\n\n map!(|x| x * 2)\n\n ))(vec![1, 2, 3, 4]);\n\n assert_eq!(6, result);\n\n println!(\"foldl Result is {:?}\", result);\n\n\n\n // foldr!(f, initial)\n\n let result = (compose!(\n", "file_path": "src/fp.rs", "rank": 26, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_map_insert() {\n\n use std::collections::HashMap;\n\n\n\n let expected_by_ident = &mut HashMap::new();\n\n expected_by_ident.insert(\"a\", \"2\");\n\n expected_by_ident.insert(\"b\", \"4\");\n\n expected_by_ident.insert(\"c\", \"6\");\n\n let actual = &mut HashMap::new();\n\n map_insert!(actual, [\n\n a : \"2\",\n\n b : \"4\",\n\n c : \"6\",\n\n ]);\n\n assert_eq!(expected_by_ident, actual);\n\n let actual = &mut HashMap::new();\n\n map_insert!(actual, {\n\n a : \"2\",\n\n b : \"4\",\n\n c : \"6\",\n\n });\n", "file_path": "src/common.rs", "rank": 27, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_cor_new() {\n\n use std::time;\n\n\n\n println!(\"test_cor_new\");\n\n\n\n let _cor1 = cor_newmutex!(\n\n |this| {\n\n println!(\"cor1 started\");\n\n\n\n let s = cor_yield!(this, Some(String::from(\"given_to_outside\")));\n\n println!(\"cor1 {:?}\", s);\n\n },\n\n String,\n\n i16\n\n );\n\n let cor1 = _cor1.clone();\n\n\n\n let _cor2 = cor_newmutex!(\n\n move |this| {\n\n println!(\"cor2 started\");\n", "file_path": "src/cor.rs", "rank": 28, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_maybe_present() {\n\n assert_eq!(false, Maybe::just(None::<bool>).present());\n\n assert_eq!(true, Maybe::val(true).present());\n\n\n\n assert_eq!(true, Maybe::just(None::<bool>).null());\n\n assert_eq!(false, Maybe::val(true).null());\n\n\n\n let mut val;\n\n\n\n val = false;\n\n Maybe::just(None::<bool>).let_do(|x| val = *x);\n\n assert_eq!(false, val);\n\n\n\n val = false;\n\n Maybe::val(true).let_do(|x| val = *x);\n\n assert_eq!(true, val);\n\n}\n", "file_path": "src/maybe.rs", "rank": 29, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_publisher_new() {\n\n use std::sync::Arc;\n\n\n\n use super::common::SubscriptionFunc;\n\n use super::handler::HandlerThread;\n\n\n\n use super::sync::CountDownLatch;\n\n\n\n let mut pub1 = Publisher::new();\n\n pub1.subscribe_fn(|x: Arc<u16>| {\n\n println!(\"pub1 {:?}\", x);\n\n assert_eq!(9, *Arc::make_mut(&mut x.clone()));\n\n });\n\n pub1.publish(9);\n\n\n\n let mut _h = HandlerThread::new_with_mutex();\n\n\n\n let mut pub2 = Publisher::new_with_handlers(Some(_h.clone()));\n\n\n\n let latch = CountDownLatch::new(1);\n", "file_path": "src/publisher.rs", "rank": 30, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_cor_do_m_pattern() {\n\n let _r = do_m_pattern! {\n\n let mut _v4 = String::from(\"\");\n\n _v4 = String::from(\"4\");\n\n\n\n exec {\n\n println!(\"do_m_pattern _v4:{:?}\", _v4)\n\n };\n\n\n\n _v1 String, Some(1), yield_from cor_newmutex_and_start!(\n\n |this| {\n\n let s = cor_yield!(this, Some(String::from(\"1\")));\n\n println!(\"cor_inner1 {:?}\", s);\n\n },\n\n String,\n\n i16\n\n );\n\n _v2 String, Some(2), yield_from cor_newmutex_and_start!(\n\n |this| {\n\n let s = cor_yield!(this, Some(String::from(\"2\")));\n", "file_path": "src/cor.rs", "rank": 31, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_maybe_flatmap() {\n\n assert_eq!(\n\n false,\n\n Maybe::val(true)\n\n .fmap(|x| return Maybe::val(!x.unwrap()))\n\n .unwrap()\n\n );\n\n assert_eq!(\n\n true,\n\n Maybe::val(false)\n\n .fmap(|x| return Maybe::val(!x.unwrap()))\n\n .unwrap()\n\n );\n\n\n\n assert_eq!(\n\n false,\n\n Maybe::val(true).map(|x| return Some(!x.unwrap())).unwrap()\n\n );\n\n assert_eq!(\n\n true,\n", "file_path": "src/maybe.rs", "rank": 32, "score": 67118.64878098 }, { "content": "#[test]\n\nfn test_map_reduce_filter() {\n\n let result =\n\n (compose!(reduce!(|a, b| a * b), filter!(|x| *x < 6), map!(|x| x * 2)))(vec![1, 2, 3, 4]);\n\n assert_eq!(Some(8), result);\n\n println!(\"test_map_reduce_filter Result is {:?}\", result);\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 33, "score": 64598.80620689965 }, { "content": "#[inline]\n\npub fn reverse<T>(v: Vec<T>) -> Vec<T> {\n\n v.into_iter().rev().collect::<Vec<T>>()\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 34, "score": 63552.07406995889 }, { "content": "#[inline]\n\npub fn contains<T: PartialEq>(x: &T, v: Vec<T>) -> bool {\n\n v.contains(x)\n\n}\n\n\n\n/**\n\nImplementations of `ECMASript`-like `reduce`()\n\n\n\n# Arguments\n\n\n\n* `T` - The generic type of data.\n\n\n\n*NOTE*: Credit https://github.com/dtolnay/reduce\n\n*/\n", "file_path": "src/fp.rs", "rank": 35, "score": 58038.49363137405 }, { "content": "pub fn get_mut<'a, T>(v: &'a mut Vec<T>, index: usize) -> Option<&'a mut T> {\n\n for (i, elem) in v.into_iter().enumerate() {\n\n if index == i {\n\n return Some(elem);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct LinkedListAsync<T> {\n\n inner: Arc<Mutex<LinkedList<T>>>,\n\n\n\n #[cfg(feature = \"for_futures\")]\n\n alive: Arc<Mutex<AtomicBool>>,\n\n #[cfg(feature = \"for_futures\")]\n\n waker: Arc<Mutex<Option<Waker>>>,\n\n\n\n _t: PhantomData<T>,\n", "file_path": "src/common.rs", "rank": 36, "score": 50026.27001290665 }, { "content": "pub trait Actor<Msg, ContextValue, HandleType, Functor>: UniqueId<String> {\n\n fn receive(\n\n &mut self,\n\n this: &mut Self,\n\n message: Msg,\n\n context: &mut HashMap<String, ContextValue>,\n\n );\n\n fn spawn_with_handle(&self, func: Functor) -> HandleType;\n\n\n\n fn get_handle(&self) -> HandleType;\n\n fn get_handle_child(&self, name: impl Into<String>) -> Option<HandleType>;\n\n fn get_handle_parent(&self) -> Option<HandleType>;\n\n\n\n fn for_each_child(&self, func: impl FnMut(&String, &mut HandleType));\n\n}\n\n\n", "file_path": "src/actor.rs", "rank": 37, "score": 48201.34317688905 }, { "content": "pub trait Reduce<T> {\n\n fn reduce<F>(self, f: F) -> Option<T>\n\n where\n\n Self: Sized,\n\n F: FnMut(T, T) -> T;\n\n}\n\n\n\nimpl<T, I> Reduce<T> for I\n\nwhere\n\n I: Iterator<Item = T>,\n\n{\n\n #[inline]\n\n fn reduce<F>(mut self, f: F) -> Option<T>\n\n where\n\n Self: Sized,\n\n F: FnMut(T, T) -> T,\n\n {\n\n self.next().map(|first| self.fold(first, f))\n\n }\n\n}\n\n\n", "file_path": "src/fp.rs", "rank": 38, "score": 41879.62977422565 }, { "content": "pub trait UniqueId<T> {\n\n /**\n\n The callback when `Subscription` received the broadcasted value.\n\n\n\n # Arguments\n\n\n\n * `func` - The given `FnMut`.\n\n\n\n */\n\n fn get_id(&self) -> T;\n\n}\n\n\n", "file_path": "src/common.rs", "rank": 39, "score": 40615.67057108576 }, { "content": "pub trait Handle<Msg>: UniqueId<String> {\n\n fn send(&mut self, message: Msg);\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct HandleAsync<Msg>\n\nwhere\n\n Msg: Send + 'static,\n\n{\n\n id: String,\n\n queue: BlockingQueue<Msg>,\n\n}\n\n\n\nimpl<Msg> Handle<Msg> for HandleAsync<Msg>\n\nwhere\n\n Msg: Send + 'static,\n\n{\n\n fn send(&mut self, message: Msg) {\n\n self.queue.offer(message);\n\n }\n", "file_path": "src/actor.rs", "rank": 40, "score": 36206.592719020904 }, { "content": "/*!\n\nIn this module there're implementations & tests of `Handler`.\n\n(Inspired by `Android Handler`)\n\n*/\n\nuse std::sync::{\n\n atomic::{AtomicBool, Ordering},\n\n Arc, Mutex,\n\n};\n\nuse std::thread;\n\n\n\nuse super::common::RawFunc;\n\nuse super::sync as fpSync;\n\nuse super::sync::Queue;\n\n\n\n/**\n\n`Handler` `trait` defines the interface which could receive `FnMut` and run them on its own `thread`.\n\n\n\n# Remarks\n\n\n\nThis is highly inspired by `Android Handler` concepts.\n\n\n\n*/\n", "file_path": "src/handler.rs", "rank": 52, "score": 30591.273670805724 }, { "content": "/**\n\n`HandlerThread` could receive `FnMut` and run them on its own `thread`.\n\nIt implements `Handler` `trait` simply and works well.\n\n\n\nThis is kind of facade which just handles `thread`,\n\nand bypasses jobs to `HandlerThreadInner`(private implementations).\n\n\n\n# Remarks\n\n\n\nThis is highly inspired by `Android Handler` concepts.\n\n\n\n*/\n\n#[derive(Clone)]\n\npub struct HandlerThread {\n\n started_alive: Arc<Mutex<(AtomicBool, AtomicBool)>>,\n\n\n\n inner: Arc<HandlerThreadInner>,\n\n\n\n handle: Arc<Mutex<Option<thread::JoinHandle<()>>>>,\n\n}\n", "file_path": "src/handler.rs", "rank": 53, "score": 30589.902618127526 }, { "content": "\n\nimpl Default for HandlerThread {\n\n fn default() -> Self {\n\n HandlerThread {\n\n started_alive: Arc::new(Mutex::new((AtomicBool::new(false), AtomicBool::new(false)))),\n\n inner: Arc::new(HandlerThreadInner::new()),\n\n\n\n handle: Arc::new(Mutex::new(None)),\n\n }\n\n }\n\n}\n\n\n\nimpl HandlerThread {\n\n pub fn new() -> HandlerThread {\n\n Default::default()\n\n }\n\n pub fn new_with_mutex() -> Arc<Mutex<HandlerThread>> {\n\n Arc::new(Mutex::new(HandlerThread::new()))\n\n }\n\n}\n", "file_path": "src/handler.rs", "rank": 54, "score": 30585.70190288052 }, { "content": " let latch2 = latch.clone();\n\n\n\n // /*\n\n h.post(RawFunc::new(move || {\n\n println!(\"Executed !\");\n\n\n\n let latch3 = latch2.clone();\n\n\n\n let mut _h2 = HandlerThread::new_with_mutex();\n\n let mut _h2_inside = _h2.clone();\n\n\n\n let mut h2 = _h2.lock().unwrap();\n\n h2.start();\n\n\n\n h2.post(RawFunc::new(move || {\n\n latch3.countdown();\n\n\n\n {\n\n _h2_inside.lock().unwrap().stop();\n\n }\n", "file_path": "src/handler.rs", "rank": 55, "score": 30580.476554423003 }, { "content": " return;\n\n }\n\n started.store(true, Ordering::SeqCst);\n\n if alive.load(Ordering::SeqCst) {\n\n return;\n\n }\n\n alive.store(true, Ordering::SeqCst);\n\n }\n\n\n\n let mut _inner = self.inner.clone();\n\n let mut this = self.clone();\n\n self.handle = Arc::new(Mutex::new(Some(thread::spawn(move || {\n\n Arc::make_mut(&mut _inner).start();\n\n\n\n this.stop();\n\n }))));\n\n }\n\n\n\n fn stop(&mut self) {\n\n {\n", "file_path": "src/handler.rs", "rank": 56, "score": 30575.750159415653 }, { "content": "\n\nimpl Handler for HandlerThread {\n\n fn is_started(&mut self) -> bool {\n\n let started_alive = self.started_alive.lock().unwrap();\n\n let &(ref started, _) = &*started_alive;\n\n started.load(Ordering::SeqCst)\n\n }\n\n\n\n fn is_alive(&mut self) -> bool {\n\n let started_alive = self.started_alive.lock().unwrap();\n\n let &(_, ref alive) = &*started_alive;\n\n alive.load(Ordering::SeqCst)\n\n }\n\n\n\n fn start(&mut self) {\n\n {\n\n let started_alive = self.started_alive.lock().unwrap();\n\n let &(ref started, ref alive) = &*started_alive;\n\n\n\n if started.load(Ordering::SeqCst) {\n", "file_path": "src/handler.rs", "rank": 57, "score": 30574.902897859065 }, { "content": " let started_alive = self.started_alive.lock().unwrap();\n\n let &(ref started, ref alive) = &*started_alive;\n\n\n\n if !started.load(Ordering::SeqCst) {\n\n return;\n\n }\n\n if !alive.load(Ordering::SeqCst) {\n\n return;\n\n }\n\n alive.store(false, Ordering::SeqCst);\n\n }\n\n\n\n Arc::make_mut(&mut self.inner).stop();\n\n\n\n // NOTE: Kill thread <- OS depending\n\n // let mut handle = self.handle.lock().unwrap();\n\n // handle\n\n // .take()\n\n // .expect(\"Called stop on non-running thread\")\n\n // .join()\n\n // .expect(\"Could not join spawned thread\");\n\n }\n\n\n\n fn post(&mut self, func: RawFunc) {\n\n Arc::make_mut(&mut self.inner).post(func);\n\n }\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 58, "score": 30574.887714223813 }, { "content": "\n\n Stop `Cor`.\n\n This will make self.`is_alive`() returns `false`,\n\n and all `FnMut` posted by self.`post`() will not be executed.\n\n (Because it has stopped :P, that's reasonable)\n\n\n\n */\n\n fn stop(&mut self);\n\n\n\n /**\n\n Post a job which will run on this `Handler`.\n\n\n\n # Arguments\n\n\n\n * `func` - The posted job.\n\n ``\n\n */\n\n fn post(&mut self, func: RawFunc);\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 59, "score": 30572.936596626103 }, { "content": " }));\n\n }));\n\n println!(\"Test\");\n\n\n\n thread::sleep(time::Duration::from_millis(1));\n\n\n\n assert_eq!(true, h.is_alive());\n\n assert_eq!(true, h.is_started());\n\n\n\n h.stop();\n\n thread::sleep(time::Duration::from_millis(1));\n\n\n\n assert_eq!(false, h.is_alive());\n\n assert_eq!(true, h.is_started());\n\n\n\n latch.clone().wait();\n\n}\n", "file_path": "src/handler.rs", "rank": 60, "score": 30570.751061957897 }, { "content": " Some(f) => {\n\n f.invoke();\n\n }\n\n None => {\n\n self.alive.store(false, Ordering::SeqCst);\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn stop(&mut self) {\n\n self.alive.store(false, Ordering::SeqCst);\n\n }\n\n\n\n fn post(&mut self, func: RawFunc) {\n\n let q = Arc::make_mut(&mut self.q);\n\n\n\n q.put(func);\n\n }\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 61, "score": 30565.807415667587 }, { "content": " }\n\n\n\n fn is_alive(&mut self) -> bool {\n\n self.alive.load(Ordering::SeqCst)\n\n }\n\n\n\n fn start(&mut self) {\n\n self.alive.store(true, Ordering::SeqCst);\n\n\n\n if self.is_started() {\n\n return;\n\n }\n\n self.started.store(true, Ordering::SeqCst);\n\n\n\n let q = Arc::make_mut(&mut self.q);\n\n\n\n while self.alive.load(Ordering::SeqCst) {\n\n let v = q.take();\n\n\n\n match v {\n", "file_path": "src/handler.rs", "rank": 62, "score": 30563.261088878033 }, { "content": " waker: Arc<Mutex<Option<Waker>>>,\n\n}\n\n\n\n#[cfg(all(feature = \"publisher\", feature = \"handler\"))]\n\nimpl<T> WillAsync<T> {\n\n pub fn new(effect: impl FnMut() -> T + Send + Sync + 'static) -> WillAsync<T> {\n\n Self::new_with_handler(effect, HandlerThread::new_with_mutex())\n\n }\n\n pub fn new_with_handler(\n\n effect: impl FnMut() -> T + Send + Sync + 'static,\n\n handler: Arc<Mutex<dyn Handler>>,\n\n ) -> WillAsync<T> {\n\n WillAsync {\n\n handler,\n\n effect: Arc::new(Mutex::new(effect)),\n\n started_alive: Arc::new(Mutex::new((AtomicBool::new(false), AtomicBool::new(false)))),\n\n publisher: Arc::new(Mutex::new(Publisher::default())),\n\n result: Arc::new(Mutex::new(None)),\n\n\n\n #[cfg(feature = \"for_futures\")]\n", "file_path": "src/sync.rs", "rank": 63, "score": 29963.26404123218 }, { "content": "\n\n let spawn_future_result = {\n\n shared_thread_pool()\n\n .inner\n\n .lock()\n\n .unwrap()\n\n .spawn_with_handle(async move { queue.poll_result() })\n\n };\n\n match spawn_future_result {\n\n Ok(future) => future.await,\n\n Err(e) => Err(Box::new(e)),\n\n }\n\n }\n\n pub async fn take_result_as_future(&mut self) -> Result<T, Box<dyn Error + Send>> {\n\n let mut queue = self.clone();\n\n\n\n let spawn_future_result = {\n\n shared_thread_pool()\n\n .inner\n\n .lock()\n", "file_path": "src/sync.rs", "rank": 64, "score": 29960.640364872186 }, { "content": " }\n\n None => {\n\n let result = { self.blocking_recever.lock().unwrap().recv() };\n\n\n\n match result {\n\n Ok(v) => Ok(v),\n\n Err(e) => Err(Box::new(e)),\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n#[cfg(feature = \"for_futures\")]\n\nimpl<T> BlockingQueue<T>\n\nwhere\n\n T: Send + 'static + Clone,\n\n{\n\n pub async fn poll_result_as_future(&mut self) -> Result<T, Box<dyn Error + Send>> {\n\n let mut queue = self.clone();\n", "file_path": "src/sync.rs", "rank": 65, "score": 29959.441381979024 }, { "content": " .unwrap()\n\n .spawn_with_handle(async move { queue.take_result() })\n\n };\n\n match spawn_future_result {\n\n Ok(future) => future.await,\n\n Err(e) => Err(Box::new(e)),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"for_futures\")]\n\n#[futures_test::test]\n\nasync fn test_sync_future() {\n\n let mut wa = WillAsync::new(move || 1);\n\n wa.start();\n\n\n\n assert_eq!(Some(1), wa.await);\n\n\n\n let mut _h = HandlerThread::new_with_mutex();\n\n let mut pub1 = Publisher::new_with_handlers(Some(_h.clone()));\n", "file_path": "src/sync.rs", "rank": 66, "score": 29958.168205971477 }, { "content": " ``\n\n */\n\n fn remove_callback(&mut self, subscription: Arc<SubscriptionFunc<T>>);\n\n\n\n /**\n\n Get the result.\n\n */\n\n fn result(&mut self) -> Option<T>;\n\n}\n\n\n\n#[cfg(all(feature = \"publisher\", feature = \"handler\"))]\n\n#[derive(Clone)]\n\npub struct WillAsync<T> {\n\n effect: Arc<Mutex<dyn FnMut() -> T + Send + Sync + 'static>>,\n\n handler: Arc<Mutex<dyn Handler>>,\n\n publisher: Arc<Mutex<Publisher<T>>>,\n\n started_alive: Arc<Mutex<(AtomicBool, AtomicBool)>>,\n\n result: Arc<Mutex<Option<T>>>,\n\n\n\n #[cfg(feature = \"for_futures\")]\n", "file_path": "src/sync.rs", "rank": 67, "score": 29958.151040128316 }, { "content": " }\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n\n/**\n\n`CountDownLatch` implements a latch with a value(> 0),\n\nwaiting for the value counted down until <= 0\n\n(the countdown action would be in other threads).\n\n\n\n# Remarks\n\n\n\nIt's inspired by `CountDownLatch` in `Java`\n\n, and easily use it on async scenaios.\n\n\n\n``\n\n*/\n\n#[derive(Debug, Clone)]\n\npub struct CountDownLatch {\n", "file_path": "src/sync.rs", "rank": 68, "score": 29957.699136178162 }, { "content": "\n\n let latch = CountDownLatch::new(4);\n\n let latch2 = latch.clone();\n\n\n\n let _ = pub1.subscribe(Arc::new(SubscriptionFunc::new(move |y| {\n\n println!(\"test_sync_future {:?}\", y);\n\n latch2.countdown();\n\n })));\n\n\n\n println!(\"test_sync_future before Publisher.start()\");\n\n\n\n {\n\n let h = &mut _h.lock().unwrap();\n\n\n\n println!(\"test_sync_future hh2\");\n\n h.start();\n\n println!(\"test_sync_future hh2 running\");\n\n }\n\n std::thread::sleep(Duration::from_millis(1));\n\n\n", "file_path": "src/sync.rs", "rank": 69, "score": 29957.161687184944 }, { "content": " self.result.lock().unwrap().clone()\n\n }\n\n}\n\n\n\n#[cfg(all(feature = \"for_futures\", feature = \"publisher\", feature = \"handler\"))]\n\nimpl<T> Future for WillAsync<T>\n\nwhere\n\n T: Clone + Send + Sync + 'static,\n\n{\n\n type Output = Option<T>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let started_alive = self.started_alive.lock().unwrap();\n\n let &(ref started, ref alive) = &*started_alive;\n\n\n\n if started.load(Ordering::SeqCst) && (!alive.load(Ordering::SeqCst)) {\n\n Poll::Ready(self.clone().result())\n\n } else {\n\n {\n\n self.waker.lock().unwrap().replace(cx.waker().clone());\n", "file_path": "src/sync.rs", "rank": 70, "score": 29956.389104754522 }, { "content": " match result {\n\n Ok(v) => Some(v),\n\n Err(_) => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<T> BlockingQueue<T>\n\nwhere\n\n T: Send + 'static,\n\n{\n\n pub fn poll_result(&mut self) -> Result<T, Box<dyn Error + Send>> {\n\n if !self.is_alive() {\n\n return Err(Box::new(RecvTimeoutError::Disconnected));\n\n }\n\n\n\n {\n\n let result = { self.blocking_recever.lock().unwrap().try_recv() };\n\n\n\n match result {\n", "file_path": "src/sync.rs", "rank": 71, "score": 29953.59397253345 }, { "content": "#[cfg(feature = \"for_futures\")]\n\nuse std::pin::Pin;\n\n#[cfg(feature = \"for_futures\")]\n\nuse std::task::{Context, Poll, Waker};\n\n\n\nuse super::common::{Observable, RawFunc, SubscriptionFunc};\n\n\n\n#[cfg(feature = \"handler\")]\n\nuse super::handler::{Handler, HandlerThread};\n\n#[cfg(feature = \"publisher\")]\n\nuse super::publisher::Publisher;\n\n\n\n/**\n\n`Will` `trait` defines the interface which could do actions in its `Handler`.\n\n\n\n# Remarks\n\n\n\nThis is highly inspired by `Java Future` concepts.\n\n\n\n*/\n", "file_path": "src/sync.rs", "rank": 72, "score": 29953.344377675952 }, { "content": "/*!\n\nIn this module there're implementations & tests\n\nof general async handling features.\n\n*/\n\n\n\nuse std::error::Error;\n\nuse std::sync::{\n\n atomic::{AtomicBool, Ordering},\n\n mpsc,\n\n mpsc::RecvTimeoutError,\n\n Arc, Condvar, Mutex,\n\n};\n\nuse std::time::Duration;\n\n\n\n#[cfg(feature = \"for_futures\")]\n\nuse super::common::shared_thread_pool;\n\n#[cfg(feature = \"for_futures\")]\n\nuse crate::futures::task::SpawnExt;\n\n#[cfg(feature = \"for_futures\")]\n\nuse std::future::Future;\n", "file_path": "src/sync.rs", "rank": 73, "score": 29952.86244670762 }, { "content": " waker: Arc::new(Mutex::new(None)),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(all(feature = \"publisher\", feature = \"handler\"))]\n\nimpl<T> Will<T> for WillAsync<T>\n\nwhere\n\n T: Clone + Send + Sync + 'static,\n\n{\n\n fn is_started(&mut self) -> bool {\n\n let started_alive = self.started_alive.lock().unwrap();\n\n let &(ref started, _) = &*started_alive;\n\n started.load(Ordering::SeqCst)\n\n }\n\n\n\n fn is_alive(&mut self) -> bool {\n\n let started_alive = self.started_alive.lock().unwrap();\n\n let &(_, ref alive) = &*started_alive;\n\n alive.load(Ordering::SeqCst)\n", "file_path": "src/sync.rs", "rank": 74, "score": 29951.18917086523 }, { "content": " handler.post(RawFunc::new(move || {\n\n let result = { (_effect.lock().unwrap())() };\n\n {\n\n (*this.result.lock().unwrap()) = Some(result.clone());\n\n }\n\n {\n\n _publisher.lock().unwrap().publish(result);\n\n }\n\n this.stop();\n\n }));\n\n }\n\n\n\n fn stop(&mut self) {\n\n let started_alive = self.started_alive.lock().unwrap();\n\n let &(ref started, ref alive) = &*started_alive;\n\n if !started.load(Ordering::SeqCst) {\n\n return;\n\n }\n\n if !alive.load(Ordering::SeqCst) {\n\n return;\n", "file_path": "src/sync.rs", "rank": 75, "score": 29949.72373548224 }, { "content": " pair: Arc<(Arc<Mutex<u64>>, Condvar)>,\n\n\n\n #[cfg(feature = \"for_futures\")]\n\n waker: Arc<Mutex<Option<Waker>>>,\n\n}\n\n\n\nimpl CountDownLatch {\n\n pub fn new(count: u64) -> CountDownLatch {\n\n CountDownLatch {\n\n pair: Arc::new((Arc::new(Mutex::new(count)), Condvar::new())),\n\n\n\n #[cfg(feature = \"for_futures\")]\n\n waker: Arc::new(Mutex::new(None)),\n\n }\n\n }\n\n\n\n pub fn countdown(&self) {\n\n {\n\n let &(ref lock, ref cvar) = &*self.pair.clone();\n\n let mut started = lock.lock().unwrap();\n", "file_path": "src/sync.rs", "rank": 76, "score": 29949.479060020658 }, { "content": "`Queue` `trait` defined the interface which perform basic `Queue` actions.\n\n\n\n# Arguments\n\n\n\n* `T` - The generic type of data\n\n\n\n# Remarks\n\n\n\nIt's inspired by `Queue` in `Java`.\n\n\n\n``\n\n*/\n", "file_path": "src/sync.rs", "rank": 77, "score": 29949.444099642264 }, { "content": " }\n\n alive.store(false, Ordering::SeqCst);\n\n\n\n #[cfg(feature = \"for_futures\")]\n\n {\n\n if let Some(waker) = self.waker.lock().unwrap().take() {\n\n waker.wake()\n\n }\n\n }\n\n }\n\n\n\n fn add_callback(&mut self, subscription: Arc<SubscriptionFunc<T>>) {\n\n self.publisher.lock().unwrap().subscribe(subscription);\n\n }\n\n\n\n fn remove_callback(&mut self, subscription: Arc<SubscriptionFunc<T>>) {\n\n self.publisher.lock().unwrap().delete_observer(subscription);\n\n }\n\n\n\n fn result(&mut self) -> Option<T> {\n", "file_path": "src/sync.rs", "rank": 78, "score": 29949.410957340362 }, { "content": "\n\n#[cfg(feature = \"for_futures\")]\n\nimpl Future for CountDownLatch {\n\n type Output = ();\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let &(ref remaining, _) = &*self.pair;\n\n let count = remaining.lock().unwrap();\n\n if *count > 0 {\n\n {\n\n self.waker.lock().unwrap().replace(cx.waker().clone());\n\n }\n\n Poll::Pending\n\n } else {\n\n Poll::Ready(())\n\n }\n\n }\n\n}\n\n\n\n/**\n", "file_path": "src/sync.rs", "rank": 79, "score": 29947.42741762928 }, { "content": " let mut started;\n\n if result.is_err() {\n\n started = result.err().unwrap().into_inner();\n\n } else {\n\n started = result.unwrap();\n\n }\n\n */\n\n let mut started = lock.lock().unwrap();\n\n\n\n while *started > 0 {\n\n let result = cvar.wait(started);\n\n\n\n if result.is_err() {\n\n started = result.err().unwrap().into_inner();\n\n } else {\n\n started = result.unwrap();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/sync.rs", "rank": 80, "score": 29945.89952208272 }, { "content": " Ok(v) => Ok(v),\n\n Err(e) => Err(Box::new(e)),\n\n }\n\n }\n\n }\n\n\n\n pub fn take_result(&mut self) -> Result<T, Box<dyn Error + Send>> {\n\n if !self.is_alive() {\n\n return Err(Box::new(RecvTimeoutError::Disconnected));\n\n }\n\n\n\n {\n\n match self.timeout {\n\n Some(duration) => {\n\n let result = { self.blocking_recever.lock().unwrap().recv_timeout(duration) };\n\n\n\n match result {\n\n Ok(v) => Ok(v),\n\n Err(e) => Err(Box::new(e)),\n\n }\n", "file_path": "src/sync.rs", "rank": 81, "score": 29945.771658390007 }, { "content": " }\n\n\n\n fn start(&mut self) {\n\n let started_alive = self.started_alive.lock().unwrap();\n\n let &(ref started, ref alive) = &*started_alive;\n\n if started.load(Ordering::SeqCst) {\n\n return;\n\n }\n\n started.store(true, Ordering::SeqCst);\n\n if alive.load(Ordering::SeqCst) {\n\n return;\n\n }\n\n alive.store(true, Ordering::SeqCst);\n\n\n\n let mut this = self.clone();\n\n let _effect = self.effect.clone();\n\n let _publisher = self.publisher.clone();\n\n\n\n let mut handler = self.handler.lock().unwrap();\n\n handler.start();\n", "file_path": "src/sync.rs", "rank": 82, "score": 29945.5146395212 }, { "content": " pub fn stop(&mut self) {\n\n {\n\n let alive = &self.alive.lock().unwrap();\n\n if !alive.load(Ordering::SeqCst) {\n\n return;\n\n }\n\n alive.store(false, Ordering::SeqCst);\n\n\n\n let sender = self.blocking_sender.lock().unwrap();\n\n drop(sender);\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Queue<T> for BlockingQueue<T>\n\nwhere\n\n T: 'static + Send,\n\n{\n\n fn offer(&mut self, v: T) {\n\n {\n", "file_path": "src/sync.rs", "rank": 83, "score": 29945.459067767 }, { "content": " let alive = &self.alive.lock().unwrap();\n\n if !alive.load(Ordering::SeqCst) {\n\n return;\n\n }\n\n\n\n let result = self.blocking_sender.lock().unwrap().send(v);\n\n if self.panic && result.is_err() {\n\n std::panic::panic_any(result.err());\n\n }\n\n }\n\n }\n\n\n\n fn poll(&mut self) -> Option<T> {\n\n let result = self.poll_result();\n\n\n\n if self.panic && result.is_err() {\n\n std::panic::panic_any(result.err());\n\n // return None;\n\n }\n\n\n", "file_path": "src/sync.rs", "rank": 84, "score": 29945.455238569906 }, { "content": " if *started > 0 {\n\n *started -= 1;\n\n }\n\n cvar.notify_one();\n\n\n\n #[cfg(feature = \"for_futures\")]\n\n {\n\n let mut waker = self.waker.lock().unwrap();\n\n if let Some(waker) = waker.take() {\n\n waker.wake()\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn wait(&self) {\n\n let &(ref lock, ref cvar) = &*self.pair;\n\n\n\n /*\n\n let mut result = lock.lock();\n", "file_path": "src/sync.rs", "rank": 85, "score": 29944.557854427938 }, { "content": "*/\n\n#[derive(Debug, Clone)]\n\npub struct BlockingQueue<T> {\n\n pub timeout: Option<Duration>,\n\n pub panic: bool,\n\n alive: Arc<Mutex<AtomicBool>>,\n\n blocking_sender: Arc<Mutex<mpsc::Sender<T>>>,\n\n blocking_recever: Arc<Mutex<mpsc::Receiver<T>>>,\n\n}\n\n\n\n// impl <T> Copy for BlockingQueue<T> {\n\n// fn clone(&self) -> BlockingQueue<T> {\n\n// *self\n\n// }\n\n// }\n\n\n\nimpl<T> Default for BlockingQueue<T> {\n\n fn default() -> Self {\n\n let (blocking_sender, blocking_recever) = mpsc::channel();\n\n\n", "file_path": "src/sync.rs", "rank": 86, "score": 29944.171019238907 }, { "content": " BlockingQueue {\n\n alive: Arc::new(Mutex::new(AtomicBool::new(true))),\n\n timeout: None,\n\n panic: false,\n\n blocking_sender: Arc::new(Mutex::new(blocking_sender)),\n\n blocking_recever: Arc::new(Mutex::new(blocking_recever)),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> BlockingQueue<T> {\n\n pub fn new() -> BlockingQueue<T> {\n\n Default::default()\n\n }\n\n\n\n pub fn is_alive(&self) -> bool {\n\n let alive = &self.alive.lock().unwrap();\n\n alive.load(Ordering::SeqCst)\n\n }\n\n\n", "file_path": "src/sync.rs", "rank": 87, "score": 29943.872558843483 }, { "content": " pub1.publish(1);\n\n println!(\"test_sync_future pub1.publish\");\n\n pub1.publish(2);\n\n println!(\"test_sync_future pub1.publish\");\n\n pub1.publish(3);\n\n println!(\"test_sync_future pub1.publish\");\n\n pub1.publish(4);\n\n println!(\"test_sync_future pub1.publish\");\n\n\n\n let _ = latch.await;\n\n println!(\"test_sync_future done\");\n\n}\n\n\n", "file_path": "src/sync.rs", "rank": 88, "score": 29943.209613268456 }, { "content": " Stop `Will`.\n\n */\n\n fn stop(&mut self);\n\n\n\n /**\n\n Add a callback called when it has completed.\n\n\n\n # Arguments\n\n\n\n * `subscription` - The callback.\n\n ``\n\n */\n\n fn add_callback(&mut self, subscription: Arc<SubscriptionFunc<T>>);\n\n\n\n /**\n\n Remove a callback called when it has completed.\n\n\n\n # Arguments\n\n\n\n * `subscription` - The callback.\n", "file_path": "src/sync.rs", "rank": 89, "score": 29938.8652366587 }, { "content": " match result {\n\n Ok(v) => Some(v),\n\n Err(_) => None,\n\n }\n\n }\n\n\n\n fn put(&mut self, v: T) {\n\n // NOTE Currently there's no maximum size of BlockingQueue.\n\n\n\n self.offer(v);\n\n }\n\n\n\n fn take(&mut self) -> Option<T> {\n\n let result = self.take_result();\n\n\n\n if self.panic && result.is_err() {\n\n std::panic::panic_any(result.err());\n\n // return None;\n\n }\n\n\n", "file_path": "src/sync.rs", "rank": 90, "score": 29938.74922672405 }, { "content": "## MonadIO (RxObserver-like)\n\n\n\nExample:\n\n```rust\n\n\n\nextern crate fp_rust;\n\n\n\nuse std::{\n\n thread,\n\n time,\n\n sync::{\n\n Arc,\n\n Mutex,\n\n Condvar,\n\n }\n\n};\n\n\n\nuse fp_rust::handler::{\n\n Handler,\n\n HandlerThread,\n\n};\n\nuse fp_rust::common::SubscriptionFunc;\n\nuse fp_rust::monadio::{\n\n MonadIO,\n\n of,\n\n};\n\nuse fp_rust::sync::CountDownLatch;\n\n\n\n// fmap & map (sync)\n\nlet mut _subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<u16>| {\n\n println!(\"monadio_sync {:?}\", x); // monadio_sync 36\n\n assert_eq!(36, *Arc::make_mut(&mut x.clone()));\n\n}));\n\nlet subscription = _subscription.clone();\n\nlet monadio_sync = MonadIO::just(1)\n\n .fmap(|x| MonadIO::new(move || x * 4))\n\n .map(|x| x * 3)\n\n .map(|x| x * 3);\n\nmonadio_sync.subscribe(subscription);\n\n\n\n// fmap & map (async)\n\nlet mut _handler_observe_on = HandlerThread::new_with_mutex();\n\nlet mut _handler_subscribe_on = HandlerThread::new_with_mutex();\n\nlet monadio_async = MonadIO::new_with_handlers(\n\n || {\n\n println!(\"In string\");\n\n String::from(\"ok\")\n\n },\n\n Some(_handler_observe_on.clone()),\n\n Some(_handler_subscribe_on.clone()),\n\n);\n\n\n\nlet latch = CountDownLatch::new(1);\n\nlet latch2 = latch.clone();\n\n\n\nthread::sleep(time::Duration::from_millis(1));\n\n\n\nlet subscription = Arc::new(SubscriptionFunc::new(move |x: Arc<String>| {\n\n println!(\"monadio_async {:?}\", x); // monadio_async ok\n\n\n\n latch2.countdown(); // Unlock here\n\n}));\n\nmonadio_async.subscribe(subscription);\n\nmonadio_async.subscribe(Arc::new(SubscriptionFunc::new(move |x: Arc<String>| {\n\n println!(\"monadio_async sub2 {:?}\", x); // monadio_async sub2 ok\n\n})));\n\n{\n\n let mut handler_observe_on = _handler_observe_on.lock().unwrap();\n\n let mut handler_subscribe_on = _handler_subscribe_on.lock().unwrap();\n\n\n\n println!(\"hh2\");\n\n handler_observe_on.start();\n\n handler_subscribe_on.start();\n", "file_path": "README.md", "rank": 91, "score": 38.04694645467793 }, { "content": "\n\n# Remarks\n\n\n\nIt could be sync or async up to your usages,\n\nand it could be mapped just as Rx-like APIs.\n\n\n\n*/\n\n#[derive(Clone)]\n\npub struct Publisher<X> {\n\n // observers: Vec<Arc<dyn Subscription<X>>>,\n\n observers: Vec<Arc<SubscriptionFunc<X>>>,\n\n\n\n sub_handler: Option<Arc<Mutex<dyn Handler>>>,\n\n\n\n _x: PhantomData<X>,\n\n}\n\n\n\nimpl<X> Default for Publisher<X> {\n\n fn default() -> Self {\n\n Publisher {\n", "file_path": "src/publisher.rs", "rank": 92, "score": 30.191732658654633 }, { "content": "}\n\nimpl<X> Publisher<X>\n\nwhere\n\n X: Send + Sync + 'static,\n\n{\n\n pub fn publish(&mut self, val: X) {\n\n self.notify_observers(Arc::new(val));\n\n }\n\n\n\n pub fn subscribe(&mut self, s: Arc<SubscriptionFunc<X>>) -> Arc<SubscriptionFunc<X>> {\n\n self.add_observer(s.clone());\n\n s\n\n }\n\n pub fn subscribe_fn(\n\n &mut self,\n\n func: impl FnMut(Arc<X>) + Send + Sync + 'static,\n\n ) -> Arc<SubscriptionFunc<X>> {\n\n self.subscribe(Arc::new(SubscriptionFunc::new(func)))\n\n }\n\n pub fn map<Z: Send + Sync + 'static>(\n", "file_path": "src/publisher.rs", "rank": 93, "score": 29.47524898501989 }, { "content": "/*!\n\nIn this module, there're implementations & tests of `Publisher`\n\n*/\n\nuse std::marker::PhantomData;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[cfg(feature = \"for_futures\")]\n\nuse super::common::LinkedListAsync;\n\n\n\nuse super::common::{Observable, RawFunc, Subscription, SubscriptionFunc, UniqueId};\n\nuse super::handler::Handler;\n\nuse super::sync::{BlockingQueue, Queue};\n\n\n\n/**\n\nThe `Publisher` implements *PubSub-like* features.\n\n\n\n# Arguments\n\n\n\n* `X` - The generic type of yielded/yielding data\n\n* `T` - In order to pass compilations, `T` must be `SubscriptionFunc<X>` (for instantiation)\n", "file_path": "src/publisher.rs", "rank": 94, "score": 28.986442403377374 }, { "content": " }));\n\n }\n\n None => {\n\n let sub = Arc::make_mut(&mut _do_sub);\n\n (sub)();\n\n }\n\n };\n\n }\n\n}\n\n\n\n#[cfg(feature = \"for_futures\")]\n\n#[futures_test::test]\n\nasync fn test_publisher_stream() {\n\n use std::sync::Arc;\n\n\n\n use futures::StreamExt;\n\n\n\n use super::common::SubscriptionFunc;\n\n use super::handler::HandlerThread;\n\n\n", "file_path": "src/publisher.rs", "rank": 95, "score": 28.47469878031867 }, { "content": " &mut self,\n\n func: impl FnMut(Arc<X>) -> Z + Send + Sync + 'static,\n\n ) -> Arc<SubscriptionFunc<X>> {\n\n let _func = Arc::new(Mutex::new(func));\n\n self.subscribe_fn(move |x: Arc<X>| {\n\n (_func.lock().unwrap())(x);\n\n })\n\n }\n\n pub fn unsubscribe(&mut self, s: Arc<SubscriptionFunc<X>>) {\n\n self.delete_observer(s);\n\n }\n\n\n\n pub fn subscribe_blocking_queue(\n\n &mut self,\n\n queue: &BlockingQueue<Arc<X>>,\n\n ) -> Arc<SubscriptionFunc<X>> {\n\n let mut queue_new = queue.clone();\n\n self.subscribe_fn(move |v| queue_new.put(v))\n\n }\n\n pub fn as_blocking_queue(&mut self) -> (Arc<SubscriptionFunc<X>>, BlockingQueue<Arc<X>>) {\n", "file_path": "src/publisher.rs", "rank": 96, "score": 28.4657633940368 }, { "content": " let queue = BlockingQueue::new();\n\n let subscription = self.subscribe_blocking_queue(&queue);\n\n\n\n (subscription, queue)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"for_futures\")]\n\nimpl<X: Send + Sync + 'static + Unpin> Publisher<X> {\n\n pub fn subscribe_as_stream(&mut self, s: Arc<SubscriptionFunc<X>>) -> LinkedListAsync<Arc<X>> {\n\n self.subscribe(s).as_ref().clone().as_stream()\n\n }\n\n pub fn subscribe_fn_as_stream(\n\n &mut self,\n\n func: impl FnMut(Arc<X>) + Send + Sync + 'static,\n\n ) -> LinkedListAsync<Arc<X>> {\n\n self.subscribe_fn(func).as_ref().clone().as_stream()\n\n }\n\n pub fn as_stream(&mut self) -> LinkedListAsync<Arc<X>> {\n\n self.subscribe_fn_as_stream(|_| {})\n", "file_path": "src/publisher.rs", "rank": 97, "score": 27.849908584807583 }, { "content": "#[cfg(feature = \"for_futures\")]\n\nuse std::sync::{\n\n atomic::{AtomicBool, Ordering},\n\n Once,\n\n};\n\n#[cfg(feature = \"for_futures\")]\n\nuse std::task::{Context, Poll, Waker};\n\n\n\n// pub trait FnMutReceiveThreadSafe<X>: FnMut(Arc<X>) + Send + Sync + 'static {}\n\n// pub trait FnMutReturnThreadSafe<X>: FnMut() -> X + Send + Sync + 'static {}\n\n\n\n#[cfg(feature = \"for_futures\")]\n\n#[derive(Clone)]\n\npub struct SharedThreadPoolReader {\n\n // Since we will be used in many threads, we need to protect\n\n // concurrent access\n\n pub inner: Arc<Mutex<ThreadPool>>,\n\n}\n\n#[cfg(feature = \"for_futures\")]\n", "file_path": "src/common.rs", "rank": 98, "score": 26.800659967456646 }, { "content": " observers: vec![],\n\n sub_handler: None,\n\n _x: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<X> Publisher<X> {\n\n pub fn new() -> Publisher<X> {\n\n Default::default()\n\n }\n\n pub fn new_with_handlers(h: Option<Arc<Mutex<dyn Handler + 'static>>>) -> Publisher<X> {\n\n let mut new_one = Publisher::new();\n\n new_one.subscribe_on(h);\n\n new_one\n\n }\n\n\n\n pub fn subscribe_on(&mut self, h: Option<Arc<Mutex<dyn Handler + 'static>>>) {\n\n self.sub_handler = h;\n\n }\n", "file_path": "src/publisher.rs", "rank": 99, "score": 25.387230192732513 } ]
Rust
xv7-kernel/src/memory/buddy.rs
imtsuki/xv7
edab461a6a7ceab2236e24ca726598107d346467
use crate::config::*; use crate::pretty::Pretty; use bitvec::prelude::*; use boot::PhysAddr; use core::mem; use core::ptr; use lazy_static::lazy_static; use spin::Mutex; pub use x86_64::structures::paging::{FrameAllocator, FrameDeallocator}; use x86_64::structures::paging::{PageSize, PhysFrame, Size4KiB}; pub struct BuddyFrameAllocator<'zone> { zone: &'zone mut BuddyZone, frames: &'zone mut [BuddyFrame; MAX_FRAMES_SUPPORTED], } const MAX_ORDER: u8 = 11; macro_rules! align_to_upper { ($val: expr, $align:expr) => { (($val + $align - 1) / $align * $align) }; } macro_rules! align_to_lower { ($val: expr, $align:expr) => { ($val / $align * $align) }; } macro_rules! get_buddy { ($frame_index: expr, $order:expr) => { if ($frame_index) & (1 << ($order + 1) - 1) == 0 { $frame_index + (1 << $order) } else { $frame_index - (1 << $order) } }; } #[repr(u8)] #[derive(Copy, Clone, PartialEq)] enum BuddyFrameStatus { UNCHECKED = 0, USED = 1, NOTUSED = 2, } #[derive(Copy, Clone)] struct BuddyFrame { next: *mut BuddyFrame, use_status: BuddyFrameStatus, order: u8, } #[derive(Copy, Clone)] struct BuddyFreeArea { head: *mut BuddyFrame, length: usize, } #[derive(Copy, Clone)] struct BuddyZone { head: *mut BuddyFrame, free_area: [BuddyFreeArea; MAX_ORDER as usize], } impl BuddyFreeArea { unsafe fn drop_frame(&mut self, frame: *mut BuddyFrame) -> *mut BuddyFrame { let mut next = self.head; let mut pre = ptr::null_mut(); while !next.is_null() && next != frame { pre = next; next = (*next).next; } if next.is_null() { return ptr::null_mut(); } self.length -= 1; let next = (*frame).next; if !pre.is_null() { (*pre).next = next; } else { self.head = next; } (*frame).next = ptr::null_mut(); (*frame).use_status = BuddyFrameStatus::NOTUSED; return frame; } unsafe fn push_frame(&mut self, frame: *mut BuddyFrame) { self.length += 1; (*frame).next = self.head; self.head = frame; (*frame).use_status = BuddyFrameStatus::NOTUSED; } unsafe fn pop_frame(&mut self) -> *mut BuddyFrame { if self.length == 0 { return ptr::null_mut(); } self.length -= 1; let head = self.head; self.head = (*head).next; (*head).next = ptr::null_mut(); (*head).use_status = BuddyFrameStatus::USED; return head; } } impl BuddyZone { fn count_free_mem(&self) -> usize { let mut mem_count = 0usize; for i in 0..MAX_ORDER { mem_count += self.free_area[i as usize].length * (1 << i); } return mem_count; } } unsafe impl<'zone> Send for BuddyFrameAllocator<'zone> {} impl<'zone> BuddyFrameAllocator<'zone> { fn new( zone: &'zone mut BuddyZone, frames: &'zone mut [BuddyFrame; MAX_FRAMES_SUPPORTED], ) -> Self { Self { zone: zone, frames: frames, } } fn index_of_frame(&mut self, frame: *mut BuddyFrame) -> usize { (frame as usize - (&mut self.frames[0] as *mut BuddyFrame) as usize) / mem::size_of::<BuddyFrame>() } pub fn install_memory_region(&mut self, phys_start: PhysAddr, page_count: usize) { assert!(phys_start.is_aligned(Size4KiB::SIZE)); let frame_start = (phys_start.as_u64() / Size4KiB::SIZE) as usize; unsafe { self.free_frame_range(frame_start, frame_start + page_count); } } unsafe fn free_frame_range(&mut self, index_l: usize, index_r: usize) { self.free_frame_range_top_down(index_l, index_r, MAX_ORDER - 1) } unsafe fn free_frame_range_top_down(&mut self, index_l: usize, index_r: usize, order: u8) { if index_l >= index_r || order >= MAX_ORDER { return; } let block_size: usize = 1 << order; let align_index_l: usize = align_to_upper!(index_l, block_size); let align_index_r: usize = align_to_lower!(index_r, block_size); if align_index_l <= align_index_r { self.free_frame_range_top_down(index_l, align_index_l, order.wrapping_sub(1)); for frame_index in (align_index_l..align_index_r).step_by(1 << order) { self.free_frame_specific_order(frame_index, order); } self.free_frame_range_top_down(align_index_r, index_r, order.wrapping_sub(1)); } else { self.free_frame_range_top_down(index_l, index_r, order.wrapping_sub(1)); } } unsafe fn free_frame_specific_order(&mut self, mut frame_index: usize, mut order: u8) { if order >= MAX_ORDER { return; } if self.frames[frame_index].use_status == BuddyFrameStatus::NOTUSED { println!( "BuddyFrameAllocator: free twice on frame({}) detected", frame_index ); return; } while order < MAX_ORDER { if order == MAX_ORDER - 1 { break; } let area = &mut self.zone.free_area[order as usize]; let buddy_index = get_buddy!(frame_index, order); let buddy_frame = area.drop_frame(&mut self.frames[buddy_index]); if !buddy_frame.is_null() { frame_index = if frame_index < buddy_index { frame_index } else { buddy_index }; order += 1; } else { break; } } assert_eq!( frame_index, align_to_lower!(frame_index, (1 << order) as usize), "frame_index {} cannot match order {}", frame_index, order ); self.frames[frame_index].order = order; self.zone.free_area[order as usize].push_frame(&mut self.frames[frame_index]); } unsafe fn alloc_frame_specific_order(&mut self, order: u8) -> *mut BuddyFrame { let mut upper_order = order; while upper_order < MAX_ORDER && self.zone.free_area[upper_order as usize].length <= 0 { upper_order += 1; } if upper_order >= MAX_ORDER { return ptr::null_mut(); } let large_frame = self.zone.free_area[upper_order as usize].pop_frame(); while upper_order > order { let offset = (1 << (upper_order - 1)) + self.index_of_frame(large_frame); self.frames[offset].order = upper_order - 1; self.zone.free_area[(upper_order - 1) as usize] .push_frame(&mut self.frames[offset] as *mut BuddyFrame); upper_order -= 1; } (*large_frame).use_status = BuddyFrameStatus::USED; (*large_frame).order = order; return large_frame; } pub unsafe fn check_bugs(&mut self) { for i in 0..MAX_ORDER { let area = self.zone.free_area[i as usize]; let mut j = 0; let mut cur = area.head; while !cur.is_null() { let next = (*cur).next; let offset = self.index_of_frame(cur); assert_eq!( offset, align_to_lower!(offset, (1 << i) as usize), "area({})'s frame at index({}) has offset({}), cannot match order", i, j, offset ); j += 1; cur = next; } assert_eq!( j, area.length, "area({})'s length was not equals to it's link length", i ); } } #[allow(unused)] pub unsafe fn print_statistics(&mut self) { self.check_bugs(); let free_mem_count = self.zone.count_free_mem(); println!( "BuddyFrameAllocator: {} frames available, which is {} of memory", free_mem_count, (free_mem_count * Size4KiB::SIZE as usize).pretty(), ); print!("default zone:\t"); for i in 0..MAX_ORDER { print!("{:>8}", self.zone.free_area[i as usize].length); } println!(); } } unsafe impl<'zone> FrameAllocator<Size4KiB> for BuddyFrameAllocator<'zone> { fn allocate_frame(&mut self) -> Option<PhysFrame<Size4KiB>> { let frame = unsafe { self.alloc_frame_specific_order(0) }; if !frame.is_null() { Some(PhysFrame::containing_address(PhysAddr::new( self.index_of_frame(frame) as u64 * Size4KiB::SIZE, ))) } else { None } } } impl<'zone> FrameDeallocator<Size4KiB> for BuddyFrameAllocator<'zone> { unsafe fn deallocate_frame(&mut self, frame: PhysFrame<Size4KiB>) { let index = frame.start_address().as_u64() / Size4KiB::SIZE; self.free_frame_specific_order(index as usize, 0); } } lazy_static! { pub static ref FRAME_ALLOCATOR: Mutex<BuddyFrameAllocator<'static>> = { unsafe { static mut FRAMES: [BuddyFrame; MAX_FRAMES_SUPPORTED] = [BuddyFrame { next: ptr::null_mut(), use_status: BuddyFrameStatus::UNCHECKED, order: 0, }; MAX_FRAMES_SUPPORTED]; static mut DEFAULT_ZONE: BuddyZone = BuddyZone { head: ptr::null_mut(), free_area: [BuddyFreeArea { head: ptr::null_mut(), length: 0, }; MAX_ORDER as usize], }; DEFAULT_ZONE.head = &mut FRAMES[0]; Mutex::new(BuddyFrameAllocator::new(&mut DEFAULT_ZONE, &mut FRAMES)) } }; }
use crate::config::*; use crate::pretty::Pretty; use bitvec::prelude::*; use boot::PhysAddr; use core::mem; use core::ptr; use lazy_static::lazy_static; use spin::Mutex; pub use x86_64::structures::paging::{FrameAllocator, FrameDeallocator}; use x86_64::structures::paging::{PageSize, PhysFrame, Size4KiB}; pub struct BuddyFrameAllocator<'zone> { zone: &'zone mut BuddyZone, frames: &'zone mut [BuddyFrame; MAX_FRAMES_SUPPORTED], } const MAX_ORDER: u8 = 11; macro_rules! align_to_upper { ($val: expr, $align:expr) => { (($val + $align - 1) / $align * $align) }; } macro_rules! align_to_lower { ($val: expr, $align:expr) => { ($val / $align * $align) }; } macro_rules! get_buddy { ($frame_index: expr, $order:expr) => { if ($frame_index) & (1 << ($order + 1) - 1) == 0 { $frame_index + (1 << $order) } else { $frame_index - (1 << $order) } }; } #[repr(u8)] #[derive(Copy, Clone, PartialEq)] enum BuddyFrameStatus { UNCHECKED = 0, USED = 1, NOTUSED = 2, } #[derive(Copy, Clone)] struct BuddyFrame { next: *mut BuddyFrame, use_status: BuddyFrameStatus, order: u8, } #[derive(Copy, Clone)] struct BuddyFreeArea { head: *mut BuddyFrame, length: usize, } #[derive(Copy, Clone)] struct BuddyZone { head: *mut BuddyFrame, free_area: [BuddyFreeArea; MAX_ORDER as usize], } impl BuddyFreeArea { unsafe fn drop_frame(&mut self, frame: *mut BuddyFrame) -> *mut BuddyFrame { let mut next = self.head; let mut pre = ptr::null_mut(); while !next.is_null() && next != frame { pre = next; next = (*next).next; } if next.is_null() { return ptr::null_mut(); } self.length -= 1; let next = (*frame).next; if !pre.is_null() { (*pre).next = next; } else { self.head = next; } (*frame).next = ptr::null_mut(); (*frame).use_status = BuddyFrameStatus::NOTUSED; return frame; } unsafe fn push_frame(&mut self, frame: *mut BuddyFrame) { self.length += 1; (*frame).next = self.head; self.head = frame; (*frame).use_status = BuddyFrameStatus::NOTUSED; } unsafe fn pop_frame(&mut self) -> *mut BuddyFrame { if self.length == 0 { return ptr::null_mut(); } self.length -= 1; let head = self.head; self.head = (*head).next; (*head).next = ptr::null_mut(); (*head).use_status = BuddyFrameStatus::USED; return head; } } impl BuddyZone { fn count_free_mem(&self) -> usize { let mut mem_count = 0usize; for i in 0..MAX_ORDER { mem_count += self.free_area[i as usize].length * (1 << i); } return mem_count; } } unsafe impl<'zone> Send for BuddyFrameAllocator<'zone> {} impl<'zone> BuddyFrameAllocator<'zone> { fn new( zone: &'zone mut BuddyZone, frames: &'zone mut [BuddyFrame; MAX_FRAMES_SUPPORTED], ) -> Self { Self { zone: zone, frames: frames, } } fn index_of_frame(&mut self, frame: *mut BuddyFrame) -> usize { (frame as usize - (&mut self.frames[0] as *mut BuddyFrame) as usize) / mem::size_of::<BuddyFrame>() } pub fn install_memory_region(&mut self, phys_start: PhysAddr, page_count: usize) { assert!(phys_start.is_aligned(Size4KiB::SIZE)); let frame_start = (phys_start.as_u64() / Size4KiB::SIZE) as usize; unsafe { self.free_frame_range(frame_start, frame_start + page_count); } } unsafe fn free_frame_range(&mut self, index_l: usize, index_r: usize) { self.free_frame_range_top_down(index_l, index_r, MAX_ORDER - 1) } unsafe fn free_frame_range_top_down(&mut self, index_l: usize, index_r: usize, order: u8) { if index_l >= index_r || order >= MAX_ORDER { return; } let block_size: usize = 1 << order; let align_index_l: usize = align_to_upper!(index_l, block_size); let align_index_r: usize = align_to_lower!(index_r, block_size);
} unsafe fn free_frame_specific_order(&mut self, mut frame_index: usize, mut order: u8) { if order >= MAX_ORDER { return; } if self.frames[frame_index].use_status == BuddyFrameStatus::NOTUSED { println!( "BuddyFrameAllocator: free twice on frame({}) detected", frame_index ); return; } while order < MAX_ORDER { if order == MAX_ORDER - 1 { break; } let area = &mut self.zone.free_area[order as usize]; let buddy_index = get_buddy!(frame_index, order); let buddy_frame = area.drop_frame(&mut self.frames[buddy_index]); if !buddy_frame.is_null() { frame_index = if frame_index < buddy_index { frame_index } else { buddy_index }; order += 1; } else { break; } } assert_eq!( frame_index, align_to_lower!(frame_index, (1 << order) as usize), "frame_index {} cannot match order {}", frame_index, order ); self.frames[frame_index].order = order; self.zone.free_area[order as usize].push_frame(&mut self.frames[frame_index]); } unsafe fn alloc_frame_specific_order(&mut self, order: u8) -> *mut BuddyFrame { let mut upper_order = order; while upper_order < MAX_ORDER && self.zone.free_area[upper_order as usize].length <= 0 { upper_order += 1; } if upper_order >= MAX_ORDER { return ptr::null_mut(); } let large_frame = self.zone.free_area[upper_order as usize].pop_frame(); while upper_order > order { let offset = (1 << (upper_order - 1)) + self.index_of_frame(large_frame); self.frames[offset].order = upper_order - 1; self.zone.free_area[(upper_order - 1) as usize] .push_frame(&mut self.frames[offset] as *mut BuddyFrame); upper_order -= 1; } (*large_frame).use_status = BuddyFrameStatus::USED; (*large_frame).order = order; return large_frame; } pub unsafe fn check_bugs(&mut self) { for i in 0..MAX_ORDER { let area = self.zone.free_area[i as usize]; let mut j = 0; let mut cur = area.head; while !cur.is_null() { let next = (*cur).next; let offset = self.index_of_frame(cur); assert_eq!( offset, align_to_lower!(offset, (1 << i) as usize), "area({})'s frame at index({}) has offset({}), cannot match order", i, j, offset ); j += 1; cur = next; } assert_eq!( j, area.length, "area({})'s length was not equals to it's link length", i ); } } #[allow(unused)] pub unsafe fn print_statistics(&mut self) { self.check_bugs(); let free_mem_count = self.zone.count_free_mem(); println!( "BuddyFrameAllocator: {} frames available, which is {} of memory", free_mem_count, (free_mem_count * Size4KiB::SIZE as usize).pretty(), ); print!("default zone:\t"); for i in 0..MAX_ORDER { print!("{:>8}", self.zone.free_area[i as usize].length); } println!(); } } unsafe impl<'zone> FrameAllocator<Size4KiB> for BuddyFrameAllocator<'zone> { fn allocate_frame(&mut self) -> Option<PhysFrame<Size4KiB>> { let frame = unsafe { self.alloc_frame_specific_order(0) }; if !frame.is_null() { Some(PhysFrame::containing_address(PhysAddr::new( self.index_of_frame(frame) as u64 * Size4KiB::SIZE, ))) } else { None } } } impl<'zone> FrameDeallocator<Size4KiB> for BuddyFrameAllocator<'zone> { unsafe fn deallocate_frame(&mut self, frame: PhysFrame<Size4KiB>) { let index = frame.start_address().as_u64() / Size4KiB::SIZE; self.free_frame_specific_order(index as usize, 0); } } lazy_static! { pub static ref FRAME_ALLOCATOR: Mutex<BuddyFrameAllocator<'static>> = { unsafe { static mut FRAMES: [BuddyFrame; MAX_FRAMES_SUPPORTED] = [BuddyFrame { next: ptr::null_mut(), use_status: BuddyFrameStatus::UNCHECKED, order: 0, }; MAX_FRAMES_SUPPORTED]; static mut DEFAULT_ZONE: BuddyZone = BuddyZone { head: ptr::null_mut(), free_area: [BuddyFreeArea { head: ptr::null_mut(), length: 0, }; MAX_ORDER as usize], }; DEFAULT_ZONE.head = &mut FRAMES[0]; Mutex::new(BuddyFrameAllocator::new(&mut DEFAULT_ZONE, &mut FRAMES)) } }; }
if align_index_l <= align_index_r { self.free_frame_range_top_down(index_l, align_index_l, order.wrapping_sub(1)); for frame_index in (align_index_l..align_index_r).step_by(1 << order) { self.free_frame_specific_order(frame_index, order); } self.free_frame_range_top_down(align_index_r, index_r, order.wrapping_sub(1)); } else { self.free_frame_range_top_down(index_l, index_r, order.wrapping_sub(1)); }
if_condition
[ { "content": "pub fn read(fd: usize, buf: &mut [u8]) -> Result<usize> {\n\n unsafe { syscall3(SYS_READ, fd, buf.as_mut_ptr() as usize, buf.len()) }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 0, "score": 186518.74317084477 }, { "content": "pub fn read(fd: usize, buf: &mut [u8]) -> Result<usize> {\n\n let proc = my_proc();\n\n\n\n match proc.fds.get_mut(fd) {\n\n Some(f) => f.read(buf).map_err(|_| Error::new(EFAULT)),\n\n None => Err(Error::new(ENOENT)),\n\n }\n\n}\n", "file_path": "xv7-kernel/src/syscall/fs.rs", "rank": 1, "score": 183239.36693757467 }, { "content": "pub fn validate_str(ptr: *const u8, len: usize) -> Result<&'static str> {\n\n let slice = validate_slice(ptr, len)?;\n\n str::from_utf8(slice).map_err(|_| Error::new(EINVAL))\n\n}\n", "file_path": "xv7-kernel/src/syscall.rs", "rank": 2, "score": 168050.4824752632 }, { "content": "pub fn write(fd: usize, buf: &[u8]) -> Result<usize> {\n\n unsafe { syscall3(SYS_WRITE, fd, buf.as_ptr() as usize, buf.len()) }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 3, "score": 151367.45977202992 }, { "content": "pub fn write(fd: usize, buf: &[u8]) -> Result<usize> {\n\n let proc = my_proc();\n\n\n\n match proc.fds.get_mut(fd) {\n\n Some(f) => f.write(buf).map_err(|_| Error::new(EFAULT)),\n\n None => Err(Error::new(ENOENT)),\n\n }\n\n}\n\n\n", "file_path": "xv7-kernel/src/syscall/fs.rs", "rank": 4, "score": 148626.72314657015 }, { "content": "pub fn validate_slice_mut<T>(ptr: *mut T, len: usize) -> Result<&'static mut [T]> {\n\n Ok(unsafe { slice::from_raw_parts_mut(ptr, len) })\n\n}\n\n\n", "file_path": "xv7-kernel/src/syscall.rs", "rank": 5, "score": 141538.13990290067 }, { "content": "pub fn my_proc() -> &'static mut Process {\n\n let cpu = my_cpu();\n\n cpu.current_process.as_mut().unwrap()\n\n}\n", "file_path": "xv7-kernel/src/process.rs", "rank": 6, "score": 131985.63552259447 }, { "content": "pub fn my_cpu() -> &'static mut Cpu {\n\n unsafe { &mut CPUS[0] }\n\n}\n", "file_path": "xv7-kernel/src/cpu.rs", "rank": 7, "score": 131985.63552259447 }, { "content": "pub fn validate_slice<T>(ptr: *const T, len: usize) -> Result<&'static [T]> {\n\n Ok(unsafe { slice::from_raw_parts(ptr, len) })\n\n}\n\n\n", "file_path": "xv7-kernel/src/syscall.rs", "rank": 8, "score": 125887.48287600276 }, { "content": "pub fn read_file(services: &BootServices, path: &str) -> Result<(usize, Vec<u8>)> {\n\n let fatfs = services\n\n .locate_protocol::<SimpleFileSystem>()\n\n .log_warning()?;\n\n let fatfs = unsafe { &mut *fatfs.get() };\n\n\n\n let mut volume = fatfs.open_volume().log_warning()?;\n\n\n\n let file_handle = volume\n\n .open(path, FileMode::Read, FileAttribute::empty())\n\n .log_warning()?;\n\n\n\n let mut file = match file_handle.into_type().log_warning()? {\n\n FileType::Regular(file) => file,\n\n FileType::Dir(_) => unreachable!(),\n\n };\n\n\n\n // Use an empty buffer to retrieve the actual FileInfo size\n\n let mut empty_buf = Vec::new();\n\n let len = match *file\n", "file_path": "xv7-bootloader-uefi/src/io.rs", "rank": 9, "score": 125209.5721174931 }, { "content": "pub fn fork() -> Result<usize> {\n\n unsafe { syscall0(SYS_FORK) }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 10, "score": 117543.07763371573 }, { "content": "pub fn getpid() -> Result<usize> {\n\n unsafe { syscall0(SYS_GETPID) }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 11, "score": 117543.07763371573 }, { "content": "pub fn close(fd: usize) -> Result<usize> {\n\n unsafe { syscall1(SYS_CLOSE, fd) }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 12, "score": 117376.19211431587 }, { "content": "pub fn r#yield() -> Result<usize> {\n\n unsafe { syscall0(SYS_YIELD) }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 13, "score": 115235.18377615474 }, { "content": "pub fn getpid() -> Result<usize> {\n\n Ok(process::my_proc().pid)\n\n}\n\n\n\nmod images {\n\n pub const INIT: &'static [u8] = include_bytes!(\"../../../target/x86_64/debug/init\");\n\n}\n\n\n\npub(crate) fn r#yield() -> Result<usize> {\n\n let cpu = my_cpu();\n\n if cpu.current_process.is_some() {\n\n unsafe {\n\n cpu.switch_to_kernel();\n\n }\n\n }\n\n Ok(0)\n\n}\n", "file_path": "xv7-kernel/src/syscall/process.rs", "rank": 14, "score": 114920.64377881531 }, { "content": "pub fn fork() -> Result<usize> {\n\n let _proc = process::my_proc();\n\n\n\n Ok(0)\n\n}\n\n\n", "file_path": "xv7-kernel/src/syscall/process.rs", "rank": 15, "score": 114920.64377881531 }, { "content": "pub fn syscall(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize) -> usize {\n\n fn inner(a: usize, b: usize, c: usize, d: usize, _e: usize, _f: usize) -> Result<usize> {\n\n match a {\n\n SYS_EXIT => process::exit(b as isize),\n\n SYS_WRITE => fs::write(b, validate_slice(c as *const u8, d)?),\n\n SYS_READ => fs::read(b, validate_slice_mut(c as *mut u8, d)?),\n\n SYS_FORK => process::fork(),\n\n SYS_GETPID => process::getpid(),\n\n SYS_YIELD => process::r#yield(),\n\n SYS_MKNOD => fs::mknod(validate_str(b as *const u8, c)?, d),\n\n SYS_OPEN => fs::open(validate_str(b as *const u8, c)?),\n\n _ => Err(Error::new(ENOSYS)),\n\n }\n\n }\n\n\n\n let result = inner(a, b, c, d, e, f);\n\n\n\n Error::mux(result)\n\n}\n\n\n", "file_path": "xv7-kernel/src/syscall.rs", "rank": 16, "score": 113149.51563749177 }, { "content": "pub fn mknod(path: &str, dev: usize) -> Result<usize> {\n\n unsafe { syscall3(SYS_MKNOD, path.as_ptr() as usize, path.len(), dev) }\n\n}\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 17, "score": 109710.35444795362 }, { "content": "pub fn open(path: &str) -> Result<usize> {\n\n unsafe { syscall2(SYS_OPEN, path.as_ptr() as usize, path.len()) }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 18, "score": 108149.21366355257 }, { "content": "pub fn mknod(path: &str, dev: usize) -> Result<usize> {\n\n if dev == 1 {\n\n FILE_SYSTEM\n\n .lock()\n\n .insert(String::from(path), Arc::new(Console));\n\n Ok(0)\n\n } else {\n\n Err(Error::new(ENODEV))\n\n }\n\n}\n\n\n", "file_path": "xv7-kernel/src/syscall/fs.rs", "rank": 19, "score": 107680.47813433905 }, { "content": "pub fn init_frame_allocator(args: &BootArgs) {\n\n let mut allocator = crate::memory::FRAME_ALLOCATOR.lock();\n\n for descriptor in args.memory_map.clone().iter {\n\n if descriptor.ty == MemoryType::CONVENTIONAL {\n\n allocator.install_memory_region(\n\n PhysAddr::new(descriptor.phys_start),\n\n descriptor.page_count as usize,\n\n );\n\n }\n\n }\n\n}\n\n\n\npub unsafe fn active_page_table() -> &'static mut PageTable {\n\n let (level_4_table_frame, _) = Cr3::read();\n\n\n\n let phys = level_4_table_frame.start_address();\n\n let virt = PAGE_OFFSET_BASE + phys.as_u64();\n\n let page_table_ptr: *mut PageTable = virt as *mut _;\n\n\n\n &mut *page_table_ptr\n", "file_path": "xv7-kernel/src/arch/x86_64/paging.rs", "rank": 20, "score": 106181.65074952267 }, { "content": "pub fn exit(code: isize) -> Result<usize> {\n\n let proc = process::my_proc();\n\n if proc.pid == 0 {\n\n panic!(\"pid 0 exited with code {}\", code);\n\n }\n\n Ok(0)\n\n}\n\n\n", "file_path": "xv7-kernel/src/syscall/process.rs", "rank": 21, "score": 105825.1474301136 }, { "content": "pub fn open(path: &str) -> Result<usize> {\n\n let proc = my_proc();\n\n match FILE_SYSTEM.lock().get(path) {\n\n None => dbg!(Err(Error::new(ENOENT))),\n\n Some(inode) => {\n\n let inode = inode.clone();\n\n let file = File::new(inode, true, true);\n\n proc.fds.push(file);\n\n dbg!(Ok(proc.fds.len() - 1))\n\n }\n\n }\n\n}\n\n\n", "file_path": "xv7-kernel/src/syscall/fs.rs", "rank": 22, "score": 105825.1474301136 }, { "content": "pub fn exec(fd: usize, args: &[&str], envs: &[&str]) -> Result<usize> {\n\n unsafe {\n\n syscall5(\n\n SYS_EXEC,\n\n fd,\n\n args.as_ptr() as usize,\n\n args.len(),\n\n envs.as_ptr() as usize,\n\n envs.len(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 23, "score": 103161.50363744279 }, { "content": "pub fn kmain() -> ! {\n\n println!(\"We are alive!\");\n\n scheduler::scheduler();\n\n}\n", "file_path": "xv7-kernel/src/main.rs", "rank": 24, "score": 98226.08582595007 }, { "content": "pub fn scheduler() -> ! {\n\n let cpu = my_cpu();\n\n let p = Process::initcode();\n\n\n\n cpu.current_process = Some(p);\n\n\n\n loop {\n\n println!(\"[scheduler] pick next process to run\");\n\n unsafe {\n\n cpu.switch_to_process();\n\n }\n\n }\n\n}\n", "file_path": "xv7-kernel/src/scheduler.rs", "rank": 25, "score": 98226.08582595007 }, { "content": "#[allow(unused)]\n\npub fn splash_screen() {\n\n if let Some(display) = &mut *GOP_DISPLAY.lock() {\n\n display.clear(RgbColor::WHITE).unwrap();\n\n\n\n let img = Bmp::from_slice(include_bytes!(\"../resources/logo.bmp\")).unwrap();\n\n let logo = Image::new(&img, Point::zero());\n\n\n\n logo.translate(\n\n (\n\n (display.size().width - img.width()) as i32 / 2,\n\n (display.size().height - img.height()) as i32 / 2,\n\n )\n\n .into(),\n\n )\n\n .draw(display)\n\n .unwrap();\n\n\n\n egtext!(\n\n text = \"XV7: Yet Another Operating System by imtsuki\",\n\n top_left = (100, 100),\n\n style = text_style!(font = Font8x16, text_color = RgbColor::BLACK)\n\n )\n\n .draw(display)\n\n .unwrap();\n\n }\n\n}\n", "file_path": "xv7-kernel/src/video.rs", "rank": 26, "score": 96255.34694255577 }, { "content": "pub fn init_heap() {\n\n crate::arch::allocator::init_heap();\n\n}\n\n\n\n#[global_allocator]\n\npub static ALLOCATOR: LockedHeap = LockedHeap::empty();\n", "file_path": "xv7-kernel/src/allocator.rs", "rank": 27, "score": 96255.34694255577 }, { "content": "/// Loads kernel image to `KERNEL_BASE`.\n\n/// Returns entry's virtual address.\n\npub fn load_elf(\n\n services: &BootServices,\n\n page_table: &mut impl Mapper<Size4KiB>,\n\n allocator: &mut impl FrameAllocator<Size4KiB>,\n\n path: &str,\n\n) -> KernelEntry {\n\n let (len, kernel_image) =\n\n read_file(services, path).expect_success(\"Could not load kernel image\");\n\n\n\n dbg!(len);\n\n\n\n let kernel_elf = elf::Elf::parse(&kernel_image).expect(\"Failed to parse ELF file\");\n\n\n\n dbg!(KERNEL_BASE);\n\n\n\n for ph in kernel_elf.program_headers {\n\n if ph.p_type == elf::program_header::PT_LOAD {\n\n info!(\n\n \"PT_LOAD range = {:#x?}, to address {:#x} + {:#x?}\",\n\n ph.file_range(),\n", "file_path": "xv7-bootloader-uefi/src/loader.rs", "rank": 28, "score": 94412.73392658573 }, { "content": "/// Set up a basic recursive page table.\n\npub fn init_recursive(\n\n allocator: &mut impl FrameAllocator<Size4KiB>,\n\n) -> RecursivePageTable<'static> {\n\n // First we do a copy for the level 4 table here, because the old table\n\n // has memory type `BOOT_SERVICES_DATA`. Level 3 ~ level 1 tables will\n\n // be discarded eventually so we can ignore them.\n\n let old_l4_table_addr = Cr3::read().0.start_address().as_u64();\n\n let l4_table_frame = allocator.allocate_frame().unwrap();\n\n let l4_table_addr = l4_table_frame.start_address().as_u64();\n\n\n\n // Safety: newly allocated frame is guaranteed to be valid and unused\n\n unsafe {\n\n core::ptr::copy(\n\n old_l4_table_addr as *const u8,\n\n l4_table_addr as *mut u8,\n\n l4_table_frame.size() as usize,\n\n )\n\n };\n\n\n\n // Safety: same as above\n", "file_path": "xv7-bootloader-uefi/src/paging.rs", "rank": 29, "score": 94409.20254221206 }, { "content": "pub fn init() {\n\n GDT.0.load();\n\n unsafe {\n\n load_ss(GDT.1.kernel_data_selector);\n\n load_ds(GDT.1.kernel_data_selector);\n\n load_es(GDT.1.kernel_data_selector);\n\n load_gs(GDT.1.kernel_data_selector);\n\n\n\n set_cs(GDT.1.kernel_code_selector);\n\n load_tss(GDT.1.tss_selector);\n\n }\n\n}\n", "file_path": "xv7-kernel/src/arch/x86_64/gdt.rs", "rank": 30, "score": 94409.20254221206 }, { "content": "pub fn init() {\n\n println!(\"{:x}\", unsafe {\n\n x86_64::registers::model_specific::Msr::new(0x1b).read()\n\n });\n\n\n\n let cpuid = CpuId::new();\n\n\n\n println!(\n\n \"CPU Vendor: {}\",\n\n cpuid\n\n .get_vendor_info()\n\n .as_ref()\n\n .map_or_else(|| \"unknown\", |vf| vf.as_string(),)\n\n );\n\n\n\n println!(\n\n \"CPU Model: {}\",\n\n cpuid.get_extended_function_info().as_ref().map_or_else(\n\n || \"n/a\",\n\n |extfuninfo| extfuninfo.processor_brand_string().unwrap_or(\"unreadable\"),\n", "file_path": "xv7-kernel/src/arch/x86_64/cpuid.rs", "rank": 31, "score": 94409.20254221206 }, { "content": "#[inline(always)]\n\npub fn idle() -> ! {\n\n loop {\n\n unsafe {\n\n asm!(\"wfe\");\n\n }\n\n }\n\n}\n", "file_path": "xv7-kernel/src/arch/aarch64/mod.rs", "rank": 32, "score": 94409.20254221206 }, { "content": "pub fn init() {\n\n IDT.load();\n\n}\n", "file_path": "xv7-kernel/src/arch/x86_64/interrupt.rs", "rank": 33, "score": 94409.20254221206 }, { "content": "pub fn init() {\n\n let mut console_drivers = crate::device::console::CONSOLE_DRIVERS.lock();\n\n\n\n console_drivers.register(box MonitorConsole::new());\n\n console_drivers.register(box SerialConsole::new());\n\n\n\n drop(console_drivers);\n\n\n\n print!(\n\n \"{}{}{}\",\n\n CtrlSeq::EraseDisplay(Some(EraseParam::Entire)),\n\n CtrlSeq::CursorPosition(None, None),\n\n CtrlSeq::SelectGraphicRendition(None),\n\n );\n\n}\n", "file_path": "xv7-kernel/src/arch/x86_64/console.rs", "rank": 34, "score": 94409.20254221206 }, { "content": "/// Map kernel stack under `KERNEL_STACK_TOP`.\n\npub fn map_stack(\n\n stack_top: VirtAddr,\n\n size: usize,\n\n page_table: &mut impl Mapper<Size4KiB>,\n\n allocator: &mut impl FrameAllocator<Size4KiB>,\n\n) {\n\n let page_count = align_up(size as u64, Size4KiB::SIZE) / Size4KiB::SIZE;\n\n let stack_top = Page::containing_address(stack_top);\n\n let stack_bottom = stack_top - page_count;\n\n for page in Page::range(stack_bottom, stack_top) {\n\n let frame = allocator.allocate_frame().unwrap();\n\n unsafe {\n\n page_table\n\n .map_to(\n\n page,\n\n frame,\n\n PageTableFlags::PRESENT | PageTableFlags::WRITABLE | PageTableFlags::NO_EXECUTE,\n\n allocator,\n\n )\n\n .expect(\"Error occured while mapping kernel stack\")\n\n .flush();\n\n }\n\n }\n\n}\n", "file_path": "xv7-bootloader-uefi/src/paging.rs", "rank": 35, "score": 94409.20254221206 }, { "content": "pub fn init() {\n\n // Setup syscall/sysret cs/ss\n\n Star::write(\n\n gdt::GDT.1.user_code_selector,\n\n gdt::GDT.1.user_data_selector,\n\n gdt::GDT.1.kernel_code_selector,\n\n gdt::GDT.1.kernel_data_selector,\n\n )\n\n .unwrap();\n\n\n\n // Setup syscall target rip\n\n LStar::write(VirtAddr::from_ptr(syscall_entry as *const u8));\n\n\n\n // Setup flags to clear\n\n let mask = RFlags::TRAP_FLAG\n\n | RFlags::DIRECTION_FLAG\n\n | RFlags::INTERRUPT_FLAG\n\n | RFlags::IOPL_HIGH\n\n | RFlags::IOPL_LOW\n\n | RFlags::ALIGNMENT_CHECK\n", "file_path": "xv7-kernel/src/arch/x86_64/syscall.rs", "rank": 36, "score": 94409.20254221206 }, { "content": "#[inline(always)]\n\npub fn idle() -> ! {\n\n loop {\n\n unsafe {\n\n llvm_asm!(\"hlt\");\n\n }\n\n }\n\n}\n", "file_path": "xv7-kernel/src/arch/x86_64/mod.rs", "rank": 37, "score": 94409.20254221206 }, { "content": "pub fn stdin() -> Stdin {\n\n Stdin\n\n}\n", "file_path": "xv7-user/src/io.rs", "rank": 38, "score": 93060.60856549183 }, { "content": "/// Map complete pyhsical memory to `offset`, which is `PAGE_OFFSET_BASE`.\n\npub fn map_physical_memory(\n\n offset: VirtAddr,\n\n max_addr: PhysAddr,\n\n page_table: &mut impl Mapper<Size2MiB>,\n\n allocator: &mut impl FrameAllocator<Size4KiB>,\n\n) {\n\n let start_frame = PhysFrame::containing_address(PhysAddr::new(0));\n\n let end_frame = PhysFrame::containing_address(max_addr);\n\n for frame in PhysFrame::range_inclusive(start_frame, end_frame) {\n\n let page = Page::containing_address(offset + frame.start_address().as_u64());\n\n unsafe {\n\n page_table\n\n .map_to(\n\n page,\n\n frame,\n\n PageTableFlags::PRESENT | PageTableFlags::WRITABLE | PageTableFlags::NO_EXECUTE,\n\n allocator,\n\n )\n\n .expect(\"Error occured while mapping complete pyhsical memory\")\n\n .flush();\n\n }\n\n }\n\n}\n\n\n", "file_path": "xv7-bootloader-uefi/src/paging.rs", "rank": 39, "score": 92676.19897806803 }, { "content": "pub fn init() {\n\n unsafe {\n\n pic::disable_8259_pic();\n\n }\n\n\n\n lapic::LOCAL_APIC.lock().init();\n\n\n\n let mut ioapic = ioapic::IoApic::default();\n\n\n\n ioapic.write_irq(IRQ_KEYBOARD, 0, 0);\n\n ioapic.write_irq(IRQ_COM1, 0, 0);\n\n}\n\n\n\npub const T_IRQ0: u8 = 0x20;\n\n\n\npub const IRQ_TIMER: u8 = 0;\n\npub const IRQ_KEYBOARD: u8 = 1;\n\npub const IRQ_COM1: u8 = 4;\n\npub const IRQ_SPURIOUS: u8 = 31;\n", "file_path": "xv7-kernel/src/arch/x86_64/interrupt/controller.rs", "rank": 40, "score": 92676.19897806803 }, { "content": "pub fn init_heap() {\n\n let mut frame_allocator = FRAME_ALLOCATOR.lock();\n\n let page_table = unsafe { paging::active_page_table() };\n\n let mut mapper = unsafe { OffsetPageTable::new(page_table, VirtAddr::new(PAGE_OFFSET_BASE)) };\n\n\n\n let page_range = {\n\n let heap_start = VirtAddr::new(KERNEL_HEAP_BASE as u64);\n\n let heap_end = heap_start + KERNEL_HEAP_SIZE - 1u64;\n\n let heap_start_page = Page::containing_address(heap_start);\n\n let heap_end_page = Page::containing_address(heap_end);\n\n Page::range_inclusive(heap_start_page, heap_end_page)\n\n };\n\n\n\n for page in page_range {\n\n let frame = frame_allocator.allocate_frame().unwrap();\n\n let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE | PageTableFlags::NO_EXECUTE;\n\n unsafe {\n\n mapper\n\n .map_to(page, frame, flags, &mut *frame_allocator)\n\n .unwrap()\n", "file_path": "xv7-kernel/src/arch/x86_64/allocator.rs", "rank": 41, "score": 92676.19897806803 }, { "content": "pub fn disable_identity_mapping() {\n\n let page_table = unsafe { active_page_table() };\n\n\n\n for i in 0..256 {\n\n page_table[i].set_unused();\n\n }\n\n\n\n x86_64::instructions::tlb::flush_all();\n\n}\n\n\n", "file_path": "xv7-kernel/src/arch/x86_64/paging.rs", "rank": 42, "score": 91046.24473612601 }, { "content": "pub trait Inode: Any + Sync + Send {\n\n fn read_at(&self, offset: usize, buf: &mut [u8]) -> Result<usize>;\n\n fn write_at(&self, offset: usize, buf: &[u8]) -> Result<usize>;\n\n fn metadata(&self) -> Result<Metadata> {\n\n Err(FsError::NotSupported)\n\n }\n\n fn set_metadata(&self, _metadata: &Metadata) -> Result<()> {\n\n Err(FsError::NotSupported)\n\n }\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct Metadata {\n\n /// Device ID\n\n pub dev: usize, // (major << 8) | minor\n\n /// Inode number\n\n pub inode: usize,\n\n /// Size in bytes\n\n ///\n\n /// SFS Note: for normal file size is the actuate file size\n", "file_path": "xv7-kernel/src/fs/vfs.rs", "rank": 43, "score": 90225.30051818282 }, { "content": "pub fn exit(code: isize) -> ! {\n\n unsafe {\n\n syscall1(SYS_EXIT, code as usize).unwrap();\n\n }\n\n unreachable!()\n\n}\n\n\n", "file_path": "xv7-usyscall/src/syscall.rs", "rank": 44, "score": 88517.0105490066 }, { "content": "pub fn exec(path: &str) {\n\n let proc = process::my_proc();\n\n\n\n let image = match path {\n\n \"/init\" => images::INIT,\n\n _ => panic!(\"We have no filesystem yet; executables are hardcoded\"),\n\n };\n\n\n\n let image_elf = elf::Elf::parse(image).expect(\"Failed to parse ELF file\");\n\n\n\n let mut frame_allocator = FRAME_ALLOCATOR.lock();\n\n // FIXME: we should free the previous vm and set up a new vm\n\n let page_table = unsafe { proc.vm.page_table() };\n\n let mut mapper = unsafe { OffsetPageTable::new(page_table, VirtAddr::new(PAGE_OFFSET_BASE)) };\n\n\n\n for ph in image_elf.program_headers {\n\n if ph.p_type == elf::program_header::PT_LOAD {\n\n let page_range = {\n\n let start_addr = VirtAddr::new(ph.p_vaddr);\n\n let end_addr = start_addr + ph.p_memsz - 1u64;\n", "file_path": "xv7-kernel/src/syscall/process.rs", "rank": 47, "score": 86784.00698486257 }, { "content": "pub fn init(args: &BootArgs) {\n\n GOP_DISPLAY.lock().replace(GopDisplay(\n\n args.frame_buffer.base.as_u64(),\n\n args.frame_buffer.resolution,\n\n ));\n\n}\n\n\n", "file_path": "xv7-kernel/src/video.rs", "rank": 48, "score": 86784.00698486257 }, { "content": "#[doc(hidden)]\n\npub fn _print(args: fmt::Arguments) {\n\n Stdout.write_fmt(args).unwrap();\n\n}\n\npub struct Stdin;\n\n\n\nimpl Stdin {\n\n pub fn read_line<'a>(&self, buf: &'a mut [u8]) -> &'a str {\n\n let mut idx = 0;\n\n loop {\n\n if idx >= buf.len() {\n\n break;\n\n }\n\n if let Ok(n) = syscall::read(0, &mut buf[idx..idx + 1]) {\n\n if n > 0 {\n\n if buf[idx] == b'\\n' || buf[idx] == b'\\r' {\n\n break;\n\n }\n\n idx += n;\n\n }\n\n } else {\n\n panic!(\"read_line\");\n\n }\n\n }\n\n core::str::from_utf8(&buf[0..idx]).unwrap()\n\n }\n\n}\n\n\n", "file_path": "xv7-user/src/io.rs", "rank": 50, "score": 84476.11312730156 }, { "content": "#[doc(hidden)]\n\npub fn _print(args: fmt::Arguments) {\n\n without_interrupts(|| {\n\n crate::device::console::CONSOLE_DRIVERS\n\n .lock()\n\n .write_fmt(args)\n\n .unwrap();\n\n })\n\n}\n\n\n\n/// Prints and returns the value of a given expression for quick and dirty\n\n/// debugging.\n\n///\n\n/// Copied from standard library with slight modifications.\n\n#[macro_export]\n\nmacro_rules! dbg {\n\n () => {\n\n $crate::println!(\"[{}:{}]\", file!(), line!());\n\n };\n\n ($val:expr) => {\n\n match $val {\n", "file_path": "xv7-kernel/src/macros.rs", "rank": 51, "score": 84476.11312730156 }, { "content": "pub fn print_memory_map(mmap: &MemoryMap) {\n\n println!(\"Mem phys map:\");\n\n for descriptor in mmap.clone().iter {\n\n println!(\n\n \"[mem {:#016x}-{:#016x} {:>8}] {:?}\",\n\n descriptor.phys_start,\n\n descriptor.phys_start + descriptor.page_count * Size4KiB::SIZE - 1,\n\n descriptor.page_count,\n\n descriptor.ty\n\n );\n\n }\n\n}\n", "file_path": "xv7-kernel/src/memory/mod.rs", "rank": 53, "score": 82168.59007881887 }, { "content": "struct Stdout;\n\n\n\nimpl fmt::Write for Stdout {\n\n fn write_str(&mut self, s: &str) -> fmt::Result {\n\n syscall::write(1, s.as_bytes()).map_err(|_| fmt::Error)?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "xv7-user/src/io.rs", "rank": 54, "score": 59223.20067104359 }, { "content": "fn main() {\n\n println!(\"This is a WIP compose script\");\n\n}\n", "file_path": "compose.rs", "rank": 55, "score": 55510.60833427185 }, { "content": "struct Performer {\n\n pos: (usize, usize),\n\n size: (usize, usize),\n\n}\n\n\n\npub struct MonitorConsole {\n\n parser: Parser,\n\n performer: Performer,\n\n}\n\n\n\nimpl MonitorConsole {\n\n pub fn new() -> MonitorConsole {\n\n if let Some(display) = &mut *GOP_DISPLAY.lock() {\n\n let display_size = display.size();\n\n let (width, height) = (display_size.width as usize, display_size.height as usize);\n\n let rows = height / 16;\n\n let columns = width / 8;\n\n MonitorConsole {\n\n parser: Parser::new(),\n\n performer: Performer {\n", "file_path": "xv7-kernel/src/arch/x86_64/device/monitor_console.rs", "rank": 56, "score": 55248.19975693909 }, { "content": "/// A trait for implementing arbitrary return types in the `main` function.\n\n///\n\n/// The C-main function only supports to return integers as return type.\n\n/// So, every type implementing the `Termination` trait has to be converted\n\n/// to an integer.\n\n///\n\n/// The default implementations are returning `libc::EXIT_SUCCESS` to indicate\n\n/// a successful execution. In case of a failure, `libc::EXIT_FAILURE` is returned.\n\npub trait Termination {\n\n /// Is called to get the representation of the value as status code.\n\n /// This status code is returned to the operating system.\n\n fn report(self) -> i32;\n\n}\n\n\n\nimpl Termination for () {\n\n #[inline]\n\n fn report(self) -> i32 {\n\n 0\n\n }\n\n}\n", "file_path": "xv7-user/src/process.rs", "rank": 57, "score": 49328.91908159682 }, { "content": "/// A console device.\n\npub trait Console {\n\n fn write(&mut self, buf: &[u8]);\n\n}\n\n\n\npub struct ConsoleDrivers {\n\n consoles: Vec<Box<dyn Console + Send>>,\n\n}\n\n\n\nimpl ConsoleDrivers {\n\n pub fn new() -> Self {\n\n Self {\n\n consoles: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn register(&mut self, console: Box<dyn Console + Send>) {\n\n self.consoles.push(console)\n\n }\n\n\n\n pub fn write(&mut self, buf: &[u8]) {\n", "file_path": "xv7-kernel/src/device/console.rs", "rank": 58, "score": 48338.37517664951 }, { "content": "pub trait Pretty<T> {\n\n fn pretty(&self) -> PrettyRef<T>;\n\n}\n\n\n\nimpl<T> Pretty<T> for T {\n\n fn pretty(&self) -> PrettyRef<T> {\n\n PrettyRef(self)\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for PrettyRef<'a, usize> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if *self.0 >= 1 << 40 {\n\n write!(f, \"{}TiB\", self.0 >> 40)\n\n } else if *self.0 >= 1 << 30 {\n\n write!(f, \"{}GiB\", self.0 >> 30)\n\n } else if *self.0 >= 1 << 20 {\n\n write!(f, \"{}MiB\", self.0 >> 20)\n\n } else if *self.0 >= 1 << 10 {\n\n write!(f, \"{}KiB\", self.0 >> 10)\n\n } else {\n\n write!(f, \"{}B\", self.0)\n\n }\n\n }\n\n}\n", "file_path": "xv7-kernel/src/pretty.rs", "rank": 59, "score": 46725.346221382744 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n crate::arch::interrupt::disable();\n\n crate::println!(\"kernel {}\", info);\n\n crate::arch::idle();\n\n}\n\n\n", "file_path": "xv7-kernel/src/rt.rs", "rank": 60, "score": 45412.98527032834 }, { "content": "#[alloc_error_handler]\n\nfn alloc_error_handler(layout: Layout) -> ! {\n\n panic!(\"Failed to allocate memory: {:?}\", layout)\n\n}\n", "file_path": "xv7-kernel/src/rt.rs", "rank": 61, "score": 44547.229885379704 }, { "content": "#[cfg(not(test))]\n\n#[panic_handler]\n\nfn panic(info: &core::panic::PanicInfo) -> ! {\n\n println!(\"{}\", info);\n\n crate::syscall::exit(-1);\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn _start() -> ! {\n\n extern \"C\" {\n\n fn main(argc: isize, argv: *const *const u8) -> isize;\n\n }\n\n // TODO: setup argc and argv\n\n let exit_code = main(0, 0 as *const *const u8);\n\n crate::syscall::exit(exit_code);\n\n}\n\n\n\n#[cfg(not(test))]\n\n#[lang = \"start\"]\n\nextern \"C\" fn lang_start<T: crate::process::Termination + 'static>(\n\n main: fn() -> T,\n\n _argc: isize,\n\n _argv: *const *const u8,\n\n) -> isize {\n\n let exit_code = main().report();\n\n exit_code as isize\n\n}\n", "file_path": "xv7-user/src/rt.rs", "rank": 62, "score": 41254.815937795756 }, { "content": "fn print_system_information(system_table: &SystemTable<Boot>) -> uefi::Result {\n\n info!(\n\n \"{} v{}\",\n\n env!(\"CARGO_PKG_DESCRIPTION\"),\n\n env!(\"CARGO_PKG_VERSION\")\n\n );\n\n info!(\"By {}\", env!(\"CARGO_PKG_AUTHORS\"));\n\n\n\n info!(\n\n \"UEFI Firmware {} {:#?}\",\n\n system_table.firmware_vendor(),\n\n system_table.firmware_revision()\n\n );\n\n\n\n let now = system_table.runtime_services().get_time().log_warning()?;\n\n let now = Utc\n\n .ymd(now.year() as i32, now.month() as u32, now.day() as u32)\n\n .and_hms(now.hour() as u32, now.minute() as u32, now.second() as u32);\n\n info!(\"TimeZone Bupt/Jwxt: {}\", now);\n\n\n", "file_path": "xv7-bootloader-uefi/src/main.rs", "rank": 63, "score": 36827.00148876027 }, { "content": "#[entry]\n\nfn efi_main(image_handle: Handle, system_table: SystemTable<Boot>) -> Status {\n\n uefi_services::init(&system_table).expect_success(\"Failed to initialize UEFI environment\");\n\n let _ = system_table.stdout().clear().unwrap();\n\n\n\n let boot_services = system_table.boot_services();\n\n\n\n boot_services\n\n .set_watchdog_timer(0, 0x10000, None)\n\n .expect_success(\"Could not set watchdog timer\");\n\n\n\n print_system_information(&system_table).expect_success(\"Failed to print system information\");\n\n\n\n // Initialize our \"kernel\" frame allocator which marks frames as `MEMORY_TYPE_KERNEL`.\n\n let mut frame_allocator = paging::KernelFrameAllocator::new(boot_services);\n\n\n\n let mut page_table = paging::init_recursive(&mut frame_allocator);\n\n // load kernel ELF image.\n\n let kernel_entry = loader::load_elf(\n\n boot_services,\n\n &mut page_table,\n", "file_path": "xv7-bootloader-uefi/src/main.rs", "rank": 64, "score": 35379.34894221662 }, { "content": "use crate::arch::device::uart;\n\nuse crate::device::console::Console;\n\n\n\npub struct SerialConsole;\n\n\n\nimpl SerialConsole {\n\n pub fn new() -> Self {\n\n Self\n\n }\n\n}\n\n\n\nimpl Console for SerialConsole {\n\n fn write(&mut self, buf: &[u8]) {\n\n let mut port = uart::COM1.lock();\n\n for &c in buf {\n\n port.send(c);\n\n }\n\n }\n\n}\n", "file_path": "xv7-kernel/src/arch/x86_64/device/serial_console.rs", "rank": 72, "score": 23.134291635445443 }, { "content": " }\n\n\n\n pub fn initcode() -> Process {\n\n let mut p = Process::new();\n\n p.set_context_switch_return_address(VirtAddr::new(initcode as *const u8 as u64));\n\n p\n\n }\n\n\n\n pub fn intr_stack_frame(&mut self) -> &mut InterruptStackFrameValue {\n\n unsafe {\n\n &mut *((self.kstack.as_u64() + 4096\n\n - core::mem::size_of::<InterruptStackFrameValue>() as u64)\n\n as *mut InterruptStackFrameValue)\n\n }\n\n }\n\n\n\n fn set_context_switch_return_address(&mut self, addr: VirtAddr) {\n\n let stack_pointer =\n\n self.kstack + 4096usize - core::mem::size_of::<InterruptStackFrameValue>() - 8usize;\n\n unsafe {\n", "file_path": "xv7-kernel/src/process.rs", "rank": 75, "score": 22.322446114092756 }, { "content": "use crate::config::*;\n\nuse crate::pretty::Pretty;\n\nuse bitvec::prelude::*;\n\nuse boot::PhysAddr;\n\nuse lazy_static::lazy_static;\n\nuse spin::Mutex;\n\npub use x86_64::structures::paging::{FrameAllocator, FrameDeallocator};\n\nuse x86_64::structures::paging::{PageSize, PhysFrame, Size4KiB};\n\n\n\npub struct BitmapFrameAllocator<'map> {\n\n #[allow(unused)]\n\n inner: &'map mut BitSlice<Lsb0, u8>,\n\n}\n\n\n\nimpl<'map> BitmapFrameAllocator<'map> {\n\n pub fn new(map: &'map mut [u8]) -> Self {\n\n Self {\n\n inner: BitSlice::from_slice_mut(map),\n\n }\n\n }\n", "file_path": "xv7-kernel/src/memory/bitmap.rs", "rank": 77, "score": 21.512750279465305 }, { "content": "\n\n MASTER.ack();\n\n SLAVE.ack();\n\n}\n\n\n\npub struct Pic {\n\n cmd: Port<u8>,\n\n data: Port<u8>,\n\n}\n\n\n\nimpl Pic {\n\n pub const fn new(port: u16) -> Self {\n\n Self {\n\n cmd: Port::new(port),\n\n data: Port::new(port + 1),\n\n }\n\n }\n\n\n\n pub fn ack(&mut self) {\n\n unsafe {\n\n self.cmd.write(0x20);\n\n }\n\n }\n\n}\n", "file_path": "xv7-kernel/src/arch/x86_64/interrupt/controller/pic.rs", "rank": 78, "score": 20.980826789012376 }, { "content": " None\n\n }\n\n })\n\n .next();\n\n\n\n if let Some((index, frame)) = frame {\n\n self.inner.set(index, false);\n\n Some(frame)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<'map> FrameDeallocator<Size4KiB> for BitmapFrameAllocator<'map> {\n\n unsafe fn deallocate_frame(&mut self, frame: PhysFrame<Size4KiB>) {\n\n let index = frame.start_address().as_u64() / Size4KiB::SIZE;\n\n self.inner.set(index as usize, true);\n\n }\n\n}\n\n\n\nlazy_static! {\n\n pub static ref FRAME_ALLOCATOR: Mutex<BitmapFrameAllocator<'static>> = {\n\n static mut MAP: [u8; MAX_FRAMES_SUPPORTED / 8] = [0; MAX_FRAMES_SUPPORTED / 8];\n\n Mutex::new(BitmapFrameAllocator::new(unsafe { &mut MAP }))\n\n };\n\n}\n", "file_path": "xv7-kernel/src/memory/bitmap.rs", "rank": 79, "score": 20.28654623176509 }, { "content": "use x86_64::VirtAddr;\n\n\n\nuse super::*;\n\nuse crate::config::*;\n\n\n\npub const IOAPIC_BASE: u64 = 0xFEC0_0000;\n\n\n\npub struct IoApic {\n\n sel: *mut u32,\n\n data: *mut u32,\n\n}\n\n\n\nimpl IoApic {\n\n #[allow(unused)]\n\n pub unsafe fn new(addr: VirtAddr) -> Self {\n\n Self {\n\n sel: addr.as_mut_ptr(),\n\n data: (addr as VirtAddr + 0x10u64).as_mut_ptr(),\n\n }\n\n }\n", "file_path": "xv7-kernel/src/arch/x86_64/interrupt/controller/ioapic.rs", "rank": 81, "score": 19.654848481278584 }, { "content": " let mut context = Context::user(stack_pointer);\n\n\n\n let vm = AddressSpace::new();\n\n\n\n context.cr3 = vm.cr3.start_address().as_u64() as usize;\n\n\n\n unsafe {\n\n stack_pointer\n\n .as_mut_ptr::<u64>()\n\n .write(interrupt_return as *const u8 as u64);\n\n };\n\n\n\n Process {\n\n pid: NEXT_PID.fetch_add(1, Ordering::Relaxed),\n\n context,\n\n vm,\n\n state: ProcessState::Spawn,\n\n kstack,\n\n fds: Vec::new(),\n\n }\n", "file_path": "xv7-kernel/src/process.rs", "rank": 83, "score": 18.751195748903022 }, { "content": "use crate::context::Context;\n\nuse crate::cpu::my_cpu;\n\nuse crate::fs::file::File;\n\nuse crate::paging::{AddressSpace, VirtAddr};\n\nuse crate::{\n\n config::*,\n\n memory::{FrameAllocator, FRAME_ALLOCATOR},\n\n};\n\nuse alloc::vec::Vec;\n\nuse core::sync::atomic::{AtomicUsize, Ordering};\n\nuse x86_64::structures::idt::InterruptStackFrameValue;\n\n\n\npub enum ProcessState {\n\n Spawn,\n\n Runnable,\n\n Running,\n\n Zombie,\n\n}\n\n\n\nstatic NEXT_PID: AtomicUsize = AtomicUsize::new(0);\n", "file_path": "xv7-kernel/src/process.rs", "rank": 85, "score": 17.75401104475956 }, { "content": " Reserved,\n\n}\n\n\n\n/// Describe video frame buffer.\n\n#[derive(Clone, Copy, Debug)]\n\n#[repr(C)]\n\npub struct FrameBufferDescriptor {\n\n /// Base address\n\n pub base: PhysAddr,\n\n /// buffer length\n\n pub len: usize,\n\n /// resolution\n\n pub resolution: (usize, usize),\n\n}\n\n\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Copy, Debug)]\n\n#[repr(C, packed)]\n\npub struct RsdpDescriptor {\n\n signature: [u8; 8],\n", "file_path": "xv7-boot/src/lib.rs", "rank": 86, "score": 17.657739367529764 }, { "content": "}\n\n\n\npub struct AddressSpace {\n\n pub cr3: PhysFrame,\n\n}\n\n\n\nimpl AddressSpace {\n\n pub fn new() -> AddressSpace {\n\n let table_frame = crate::memory::FRAME_ALLOCATOR\n\n .lock()\n\n .allocate_frame()\n\n .unwrap();\n\n let table = unsafe {\n\n &mut *{ (PAGE_OFFSET_BASE + table_frame.start_address().as_u64()) as *mut PageTable }\n\n };\n\n let current_table = unsafe { active_page_table() };\n\n for i in 0..256 {\n\n table[i].set_unused();\n\n }\n\n for i in 256..512 {\n", "file_path": "xv7-kernel/src/arch/x86_64/paging.rs", "rank": 87, "score": 17.27246321903602 }, { "content": "use lazy_static::lazy_static;\n\nuse spin::Mutex;\n\nuse x86_64::VirtAddr;\n\n\n\nuse super::*;\n\nuse crate::config::*;\n\n\n\npub const LOCAL_APIC_BASE: u64 = 0xFEE0_0000;\n\n\n\npub struct LocalApic {\n\n base: VirtAddr,\n\n}\n\n\n\nimpl LocalApic {\n\n pub unsafe fn new(addr: VirtAddr) -> Self {\n\n Self { base: addr }\n\n }\n\n\n\n pub unsafe fn read(&self, reg: u32) -> u32 {\n\n (self.base + reg as u64).as_ptr::<u32>().read_volatile()\n", "file_path": "xv7-kernel/src/arch/x86_64/interrupt/controller/lapic.rs", "rank": 88, "score": 16.791950625666086 }, { "content": " stack_pointer.as_mut_ptr::<u64>().write(addr.as_u64());\n\n };\n\n }\n\n\n\n pub fn set_userspace_return_address(\n\n &mut self,\n\n instruction_pointer: VirtAddr,\n\n stack_pointer: VirtAddr,\n\n ) {\n\n self.intr_stack_frame().instruction_pointer = instruction_pointer;\n\n self.intr_stack_frame().stack_pointer = stack_pointer;\n\n // FIXME: magic number\n\n self.intr_stack_frame().code_segment = 0x23;\n\n self.intr_stack_frame().stack_segment = 0x1b;\n\n self.intr_stack_frame().cpu_flags = 0x282;\n\n }\n\n}\n\n\n\nimpl Clone for Process {\n\n fn clone(&self) -> Self {\n", "file_path": "xv7-kernel/src/process.rs", "rank": 89, "score": 16.488003136010224 }, { "content": "use crate::arch::gdt;\n\nuse crate::context::Context;\n\nuse crate::process::Process;\n\nuse x86_64::structures::tss::TaskStateSegment;\n\n\n\n#[repr(C)]\n\npub struct Cpu {\n\n pub kernel_context: Context,\n\n pub current_process: Option<Process>,\n\n}\n\n\n\nimpl Cpu {\n\n pub const fn new() -> Cpu {\n\n Cpu {\n\n kernel_context: Context::new(),\n\n current_process: None,\n\n }\n\n }\n\n\n\n pub unsafe fn switch_to_process(&mut self) {\n", "file_path": "xv7-kernel/src/cpu.rs", "rank": 90, "score": 16.267143265049487 }, { "content": "use uefi::prelude::*;\n\nuse uefi::table::boot::{AllocateType, MemoryType};\n\nuse x86_64::registers::control::{Cr3, Cr3Flags, Cr4, Cr4Flags};\n\nuse x86_64::registers::model_specific::{Efer, EferFlags};\n\nuse x86_64::structures::paging::{\n\n FrameAllocator, Mapper, Page, PageSize, PageTable, PageTableFlags, PhysFrame,\n\n RecursivePageTable, Size2MiB, Size4KiB,\n\n};\n\nuse x86_64::{align_up, PhysAddr, VirtAddr};\n\n\n\n/// UEFI allows us to introduce new memory types\n\n/// in the 0x70000000..0xFFFFFFFF range.\n\n#[allow(unused)]\n\npub const MEMORY_TYPE_KERNEL: u32 = 0x80000000;\n\n\n\n/// This frame allocator marks frames as `MEMORY_TYPE_KERNEL`.\n\npub struct KernelFrameAllocator<'a>(&'a BootServices);\n\n\n\nimpl<'a> KernelFrameAllocator<'a> {\n\n pub fn new(services: &'a BootServices) -> Self {\n", "file_path": "xv7-bootloader-uefi/src/paging.rs", "rank": 92, "score": 15.685520520595801 }, { "content": "\n\npub struct Process {\n\n pub pid: usize,\n\n pub state: ProcessState,\n\n pub vm: AddressSpace,\n\n pub context: Context,\n\n pub kstack: VirtAddr,\n\n pub fds: Vec<File>,\n\n}\n\n\n\nimpl Process {\n\n pub fn new() -> Process {\n\n let kstack = {\n\n let frame = FRAME_ALLOCATOR.lock().allocate_frame().unwrap();\n\n VirtAddr::new(frame.start_address().as_u64() + PAGE_OFFSET_BASE)\n\n };\n\n\n\n let stack_pointer =\n\n kstack + 4096usize - core::mem::size_of::<InterruptStackFrameValue>() - 8usize;\n\n\n", "file_path": "xv7-kernel/src/process.rs", "rank": 93, "score": 15.631208796279365 }, { "content": "use crate::{Error, Result};\n\n\n\npub unsafe fn syscall0(mut a: usize) -> Result<usize> {\n\n llvm_asm!(\n\n \"syscall\"\n\n : \"={rax}\"(a)\n\n : \"{rax}\"(a)\n\n : \"rcx\", \"r11\", \"memory\"\n\n : \"volatile\"\n\n );\n\n Error::demux(a)\n\n}\n\n\n\npub unsafe fn syscall1(mut a: usize, b: usize) -> Result<usize> {\n\n llvm_asm!(\n\n \"syscall\"\n\n : \"={rax}\"(a)\n\n : \"{rax}\"(a), \"{rdi}\"(b)\n\n : \"rcx\", \"r11\", \"memory\"\n\n : \"volatile\"\n", "file_path": "xv7-usyscall/src/arch/x86_64.rs", "rank": 94, "score": 15.362960074331353 }, { "content": "\n\nuse alloc::boxed::Box;\n\nuse core::mem::MaybeUninit;\n\n\n\nuse boot::MemoryMapIter;\n\nuse boot::BOOT_ARGS_MAGIC;\n\nuse boot::{BootArgs, FrameBufferDescriptor, KernelEntry, KernelEntryFn, MemoryMap};\n\n\n\nuse chrono::prelude::*;\n\nuse uefi::prelude::*;\n\nuse x86_64::{\n\n structures::paging::{PageSize, Size4KiB},\n\n PhysAddr, VirtAddr,\n\n};\n\n\n\nuse config::*;\n\n\n\nstatic mut KERNEL_ENTRY: KernelEntry = KernelEntry(VirtAddr::new_truncate(0x0));\n\nstatic mut FRAME_BUFFER_BASE: u64 = 0x0;\n\nstatic mut FRAME_BUFFER_LEN: usize = 0x0;\n\nstatic mut RESOLUTION: (usize, usize) = (0, 0);\n\nstatic mut MMAP_ITER: MaybeUninit<MemoryMapIter> = MaybeUninit::uninit();\n\n\n\n#[entry]\n", "file_path": "xv7-bootloader-uefi/src/main.rs", "rank": 95, "score": 15.249858801794947 }, { "content": "use super::controller::LOCAL_APIC;\n\nuse lazy_static::lazy_static;\n\nuse pc_keyboard::{layouts, DecodedKey, HandleControl, Keyboard, ScancodeSet1};\n\nuse spin::Mutex;\n\nuse x86_64::instructions::port::Port;\n\nuse x86_64::structures::idt::InterruptStackFrame;\n\n\n\nlazy_static! {\n\n static ref KEYBOARD: Mutex<Keyboard<layouts::Us104Key, ScancodeSet1>> = Mutex::new(\n\n Keyboard::new(layouts::Us104Key, ScancodeSet1, HandleControl::Ignore)\n\n );\n\n}\n\n\n\npub extern \"x86-interrupt\" fn handler(_stack_frame: InterruptStackFrame) {\n\n let mut keyboard = KEYBOARD.lock();\n\n let mut port = Port::<u8>::new(0x60);\n\n let scancode = unsafe { port.read() };\n\n\n\n if let Ok(Some(key_event)) = keyboard.add_byte(scancode) {\n\n if let Some(key) = keyboard.process_keyevent(key_event) {\n", "file_path": "xv7-kernel/src/arch/x86_64/interrupt/keyboard.rs", "rank": 96, "score": 15.108761618961053 }, { "content": "use crate::paging::VirtAddr;\n\n\n\n#[derive(Debug)]\n\n#[repr(C)]\n\npub struct Context {\n\n pub cr3: usize,\n\n pub rsp: usize,\n\n pub rflags: usize,\n\n pub r15: usize,\n\n pub r14: usize,\n\n pub r13: usize,\n\n pub r12: usize,\n\n pub rbp: usize,\n\n pub rbx: usize,\n\n}\n\n\n\nimpl Context {\n\n pub const fn new() -> Context {\n\n Context {\n\n cr3: 0,\n", "file_path": "xv7-kernel/src/arch/x86_64/context.rs", "rank": 97, "score": 15.100699225839094 }, { "content": "use super::vfs::Inode;\n\nuse crate::arch::interrupt::without_interrupts;\n\npub struct Console;\n\n\n\nimpl Inode for Console {\n\n fn read_at(&self, _offset: usize, buf: &mut [u8]) -> super::Result<usize> {\n\n let mut cnt = 0;\n\n while let Ok(b) = crate::device::console::KEYBOARD_BUFFER.pop() {\n\n if cnt >= buf.len() {\n\n break;\n\n }\n\n buf[cnt] = b;\n\n cnt += 1;\n\n }\n\n Ok(cnt)\n\n }\n\n\n\n fn write_at(&self, _offset: usize, buf: &[u8]) -> super::Result<usize> {\n\n without_interrupts(|| crate::device::console::CONSOLE_DRIVERS.lock().write(buf));\n\n Ok(buf.len())\n\n }\n\n}\n", "file_path": "xv7-kernel/src/fs/dev.rs", "rank": 98, "score": 14.94803738811537 }, { "content": "use crate::fs::vfs::Inode;\n\nuse crate::fs::Result;\n\nuse alloc::sync::Arc;\n\n\n\nuse super::vfs::Metadata;\n\n\n\npub struct File {\n\n inode: Arc<dyn Inode>,\n\n offset: usize,\n\n readable: bool,\n\n writable: bool,\n\n}\n\n\n\nimpl File {\n\n pub fn new(inode: Arc<dyn Inode>, readable: bool, writable: bool) -> Self {\n\n File {\n\n inode,\n\n offset: 0,\n\n readable,\n\n writable,\n", "file_path": "xv7-kernel/src/fs/file.rs", "rank": 99, "score": 14.943458830397446 } ]
Rust
src/sysctrl/sysctrl_lpdsp32_debug_cfg.rs
ldicocco/rsl10-pac
007871e940fe30f83de1da0f15fd25b052d1f340
#[doc = "Reader of register SYSCTRL_LPDSP32_DEBUG_CFG"] pub type R = crate::R<u32, super::SYSCTRL_LPDSP32_DEBUG_CFG>; #[doc = "Writer for register SYSCTRL_LPDSP32_DEBUG_CFG"] pub type W = crate::W<u32, super::SYSCTRL_LPDSP32_DEBUG_CFG>; #[doc = "Register SYSCTRL_LPDSP32_DEBUG_CFG `reset()`'s with value 0"] impl crate::ResetValue for super::SYSCTRL_LPDSP32_DEBUG_CFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "LPDSP32 exit powerdown mode configuration when halted\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A { #[doc = "0: LPDSP32 exit powerdown when halted disabled"] LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED = 0, #[doc = "1: LPDSP32 exit powerdown when halted enabled"] LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED = 1, } impl From<LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A> for bool { #[inline(always)] fn from(variant: LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED`"] pub type LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R = crate::R<bool, LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A>; impl LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A { match self.bits { false => { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED } true => { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED } } } #[doc = "Checks if the value of the field is `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED`"] #[inline(always)] pub fn is_lpdsp32_exit_powerdown_when_halted_disabled(&self) -> bool { *self == LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED } #[doc = "Checks if the value of the field is `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED`"] #[inline(always)] pub fn is_lpdsp32_exit_powerdown_when_halted_enabled(&self) -> bool { *self == LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED } } #[doc = "Write proxy for field `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED`"] pub struct LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W<'a> { w: &'a mut W, } impl<'a> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "LPDSP32 exit powerdown when halted disabled"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted_disabled(self) -> &'a mut W { self.variant( LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED, ) } #[doc = "LPDSP32 exit powerdown when halted enabled"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted_enabled(self) -> &'a mut W { self.variant( LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED, ) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "LPDSP32 debug port enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LPDSP32_DEBUG_ENABLE_A { #[doc = "0: LPDSP32 debug port disabled"] LPDSP32_DEBUG_DISABLED = 0, #[doc = "1: LPDSP32 debug port enabled"] LPDSP32_DEBUG_ENABLED = 1, } impl From<LPDSP32_DEBUG_ENABLE_A> for bool { #[inline(always)] fn from(variant: LPDSP32_DEBUG_ENABLE_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LPDSP32_DEBUG_ENABLE`"] pub type LPDSP32_DEBUG_ENABLE_R = crate::R<bool, LPDSP32_DEBUG_ENABLE_A>; impl LPDSP32_DEBUG_ENABLE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPDSP32_DEBUG_ENABLE_A { match self.bits { false => LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED, true => LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED, } } #[doc = "Checks if the value of the field is `LPDSP32_DEBUG_DISABLED`"] #[inline(always)] pub fn is_lpdsp32_debug_disabled(&self) -> bool { *self == LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED } #[doc = "Checks if the value of the field is `LPDSP32_DEBUG_ENABLED`"] #[inline(always)] pub fn is_lpdsp32_debug_enabled(&self) -> bool { *self == LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED } } #[doc = "Write proxy for field `LPDSP32_DEBUG_ENABLE`"] pub struct LPDSP32_DEBUG_ENABLE_W<'a> { w: &'a mut W, } impl<'a> LPDSP32_DEBUG_ENABLE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPDSP32_DEBUG_ENABLE_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "LPDSP32 debug port disabled"] #[inline(always)] pub fn lpdsp32_debug_disabled(self) -> &'a mut W { self.variant(LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED) } #[doc = "LPDSP32 debug port enabled"] #[inline(always)] pub fn lpdsp32_debug_enabled(self) -> &'a mut W { self.variant(LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bit 1 - LPDSP32 exit powerdown mode configuration when halted"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted(&self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0 - LPDSP32 debug port enable"] #[inline(always)] pub fn lpdsp32_debug_enable(&self) -> LPDSP32_DEBUG_ENABLE_R { LPDSP32_DEBUG_ENABLE_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bit 1 - LPDSP32 exit powerdown mode configuration when halted"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted(&mut self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W { w: self } } #[doc = "Bit 0 - LPDSP32 debug port enable"] #[inline(always)] pub fn lpdsp32_debug_enable(&mut self) -> LPDSP32_DEBUG_ENABLE_W { LPDSP32_DEBUG_ENABLE_W { w: self } } }
#[doc = "Reader of register SYSCTRL_LPDSP32_DEBUG_CFG"] pub type R = crate::R<u32, super::SYSCTRL_LPDSP32_DEBUG_CFG>; #[doc = "Writer for register SYSCTRL_LPDSP32_DEBUG_CFG"] pub type W = crate::W<u32, super::SYSCTRL_LPDSP32_DEBUG_CFG>; #[doc = "Register SYSCTRL_LPDSP32_DEBUG_CFG `reset()`'s with value 0"] impl crate::ResetValue for super::SYSCTRL_LPDSP32_DEBUG_CFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "LPDSP32 exit powerdown mode configuration when halted\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A { #[doc = "0: LPDSP32 exit powerdown when halted disabled"] LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED = 0, #[doc = "1: LPDSP32 exit powerdown when halted enabled"] LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED = 1, } impl From<LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A> for bool { #[inline(always)] fn from(variant: LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED`"] pub type LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R = crate::R<bool, LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A>; impl LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A { match self.bits { false => { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED } true => { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED } } } #[doc = "Checks if the value of the field is `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED`"] #[inline(always)] pub fn is_lpdsp32_exit_powerdown_when_halted_disabled(&self) -> bool { *self == LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED } #[doc = "Checks if the value of the field is `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED`"] #[inline(always)] pub fn is_lpdsp32_exit_powerdown_when_halted_enabled(&self) -> bool { *self == LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED } } #[doc = "Write proxy for field `LPDSP32_EXIT_POWERDOWN_WHEN_HALTED`"] pub struct LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W<'a> { w: &'a mut W, } impl<'a> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "LPDSP32 exit powerdown when halted disabled"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted_disabled(self) -> &'a mut W { self.variant( LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_DISABLED, ) } #[doc = "LPDSP32 exit powerdown when halted enabled"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted_enabled(self) -> &'a mut W { self.variant( LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_A::LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_ENABLED, ) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "LPDSP32 debug port enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LPDSP32_DEBUG_ENABLE_A { #[doc = "0: LPDSP32 debug port disabled"] LPDSP32_DEBUG_DISABLED = 0, #[doc = "1: LPDSP32 debug port enabled"] LPDSP32_DEBUG_ENABLED = 1, } impl From<LPDSP32_DEBUG_ENABLE_A> for bool { #[inline(always)] fn from(variant: LPDSP32_DEBUG_ENABLE_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LPDSP32_DEBUG_ENABLE`"] pub type LPDSP32_DEBUG_ENABLE_R = crate::R<bool, LPDSP32_DEBUG_ENABLE_A>; impl LPDSP32_DEBUG_ENABLE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPDSP32_DEBUG_ENABLE_A {
} #[doc = "Checks if the value of the field is `LPDSP32_DEBUG_DISABLED`"] #[inline(always)] pub fn is_lpdsp32_debug_disabled(&self) -> bool { *self == LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED } #[doc = "Checks if the value of the field is `LPDSP32_DEBUG_ENABLED`"] #[inline(always)] pub fn is_lpdsp32_debug_enabled(&self) -> bool { *self == LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED } } #[doc = "Write proxy for field `LPDSP32_DEBUG_ENABLE`"] pub struct LPDSP32_DEBUG_ENABLE_W<'a> { w: &'a mut W, } impl<'a> LPDSP32_DEBUG_ENABLE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPDSP32_DEBUG_ENABLE_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "LPDSP32 debug port disabled"] #[inline(always)] pub fn lpdsp32_debug_disabled(self) -> &'a mut W { self.variant(LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED) } #[doc = "LPDSP32 debug port enabled"] #[inline(always)] pub fn lpdsp32_debug_enabled(self) -> &'a mut W { self.variant(LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bit 1 - LPDSP32 exit powerdown mode configuration when halted"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted(&self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0 - LPDSP32 debug port enable"] #[inline(always)] pub fn lpdsp32_debug_enable(&self) -> LPDSP32_DEBUG_ENABLE_R { LPDSP32_DEBUG_ENABLE_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bit 1 - LPDSP32 exit powerdown mode configuration when halted"] #[inline(always)] pub fn lpdsp32_exit_powerdown_when_halted(&mut self) -> LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W { LPDSP32_EXIT_POWERDOWN_WHEN_HALTED_W { w: self } } #[doc = "Bit 0 - LPDSP32 debug port enable"] #[inline(always)] pub fn lpdsp32_debug_enable(&mut self) -> LPDSP32_DEBUG_ENABLE_W { LPDSP32_DEBUG_ENABLE_W { w: self } } }
match self.bits { false => LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_DISABLED, true => LPDSP32_DEBUG_ENABLE_A::LPDSP32_DEBUG_ENABLED, }
if_condition
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 148936.91922934874 }, { "content": "#[doc = \"Reader of register DEBUG_DEMCR\"]\n\npub type R = crate::R<u32, super::DEBUG_DEMCR>;\n\n#[doc = \"Writer for register DEBUG_DEMCR\"]\n\npub type W = crate::W<u32, super::DEBUG_DEMCR>;\n\n#[doc = \"Register DEBUG_DEMCR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DEBUG_DEMCR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TRCENA`\"]\n\npub type TRCENA_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TRCENA`\"]\n\npub struct TRCENA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRCENA_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/debug/debug_demcr.rs", "rank": 11, "score": 63444.531671470504 }, { "content": "#[doc = \"Reader of register DEBUG_DCRDR\"]\n\npub type R = crate::R<u32, super::DEBUG_DCRDR>;\n\n#[doc = \"Writer for register DEBUG_DCRDR\"]\n\npub type W = crate::W<u32, super::DEBUG_DCRDR>;\n\n#[doc = \"Register DEBUG_DCRDR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DEBUG_DCRDR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DEBUG_REGDATA`\"]\n\npub type DEBUG_REGDATA_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DEBUG_REGDATA`\"]\n\npub struct DEBUG_REGDATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DEBUG_REGDATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/debug/debug_dcrdr.rs", "rank": 12, "score": 63444.10497968408 }, { "content": "#[doc = \"Reader of register DEBUG_DHCSR\"]\n\npub type R = crate::R<u32, super::DEBUG_DHCSR>;\n\n#[doc = \"Writer for register DEBUG_DHCSR\"]\n\npub type W = crate::W<u32, super::DEBUG_DHCSR>;\n\n#[doc = \"Register DEBUG_DHCSR `reset()`'s with value 0x01\"]\n\nimpl crate::ResetValue for super::DEBUG_DHCSR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x01\n\n }\n\n}\n\n#[doc = \"Debug key must be written to this field in order to write the rest of the register\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u16)]\n\npub enum DBGKEY_AW {\n\n #[doc = \"41055: DEBUG_HALT_KEY\"]\n\n DEBUG_HALT_KEY = 41055,\n\n}\n\nimpl From<DBGKEY_AW> for u16 {\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 13, "score": 63433.210430306826 }, { "content": "#[doc = \"Reader of register DEBUG_DCRSR\"]\n\npub type R = crate::R<u32, super::DEBUG_DCRSR>;\n\n#[doc = \"Writer for register DEBUG_DCRSR\"]\n\npub type W = crate::W<u32, super::DEBUG_DCRSR>;\n\n#[doc = \"Register DEBUG_DCRSR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DEBUG_DCRSR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Indicates direction of register transfer\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum REGWNR_AW {\n\n #[doc = \"0: Indicates register read\"]\n\n REGWNR_READ = 0,\n\n #[doc = \"1: Indicates register write\"]\n\n REGWNR_WRITE = 1,\n\n}\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 14, "score": 63431.132044465696 }, { "content": " #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `C_HALT`\"]\n\npub type C_HALT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `C_HALT`\"]\n\npub struct C_HALT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> C_HALT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 15, "score": 63427.361164220005 }, { "content": " #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `C_STEP`\"]\n\npub type C_STEP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `C_STEP`\"]\n\npub struct C_STEP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> C_STEP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 16, "score": 63424.001671310565 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `VC_NOCPERR`\"]\n\npub type VC_NOCPERR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `VC_NOCPERR`\"]\n\npub struct VC_NOCPERR_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/debug/debug_demcr.rs", "rank": 17, "score": 63421.723512044126 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `VC_HARDERR`\"]\n\npub type VC_HARDERR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `VC_HARDERR`\"]\n\npub struct VC_HARDERR_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/debug/debug_demcr.rs", "rank": 18, "score": 63421.723512044126 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `C_DEBUGEN`\"]\n\npub type C_DEBUGEN_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `C_DEBUGEN`\"]\n\npub struct C_DEBUGEN_W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 19, "score": 63421.278549203686 }, { "content": " false => MON_EN_A::DEBUG_MON_DISABLE,\n\n true => MON_EN_A::DEBUG_MON_ENABLE,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `DEBUG_MON_DISABLE`\"]\n\n #[inline(always)]\n\n pub fn is_debug_mon_disable(&self) -> bool {\n\n *self == MON_EN_A::DEBUG_MON_DISABLE\n\n }\n\n #[doc = \"Checks if the value of the field is `DEBUG_MON_ENABLE`\"]\n\n #[inline(always)]\n\n pub fn is_debug_mon_enable(&self) -> bool {\n\n *self == MON_EN_A::DEBUG_MON_ENABLE\n\n }\n\n}\n\n#[doc = \"Write proxy for field `MON_EN`\"]\n\npub struct MON_EN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MON_EN_W<'a> {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 20, "score": 63420.81490972604 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `MON_REQ`\"]\n\npub type MON_REQ_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `MON_REQ`\"]\n\npub struct MON_REQ_W<'a> {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 21, "score": 63420.54939656758 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `VC_CHKERR`\"]\n\npub type VC_CHKERR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `VC_CHKERR`\"]\n\npub struct VC_CHKERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VC_CHKERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 22, "score": 63420.48967397259 }, { "content": "pub type C_SNAPSTALL_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `C_SNAPSTALL`\"]\n\npub struct C_SNAPSTALL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> C_SNAPSTALL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 23, "score": 63419.94098503938 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MON_EN_A {\n\n #[doc = \"0: Disable debug monitor exceptions\"]\n\n DEBUG_MON_DISABLE = 0,\n\n #[doc = \"1: Enable debug monitor exceptions\"]\n\n DEBUG_MON_ENABLE = 1,\n\n}\n\nimpl From<MON_EN_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MON_EN_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MON_EN`\"]\n\npub type MON_EN_R = crate::R<bool, MON_EN_A>;\n\nimpl MON_EN_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> MON_EN_A {\n\n match self.bits {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 24, "score": 63419.84908825094 }, { "content": "impl<'a> VC_NOCPERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `VC_MMERR`\"]\n\npub type VC_MMERR_R = crate::R<bool, bool>;\n", "file_path": "src/debug/debug_demcr.rs", "rank": 25, "score": 63419.26534138519 }, { "content": "impl<'a> VC_HARDERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `VC_INTERR`\"]\n\npub type VC_INTERR_R = crate::R<bool, bool>;\n", "file_path": "src/debug/debug_demcr.rs", "rank": 26, "score": 63419.26534138519 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `VC_STATERR`\"]\n\npub type VC_STATERR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `VC_STATERR`\"]\n\npub struct VC_STATERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VC_STATERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/debug/debug_demcr.rs", "rank": 27, "score": 63419.262657911895 }, { "content": "#[doc = \"Reader of field `MON_STEP`\"]\n\npub type MON_STEP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `MON_STEP`\"]\n\npub struct MON_STEP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MON_STEP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 28, "score": 63418.97782596534 }, { "content": " self.variant(MON_PEND_A::DEBUG_MON_SETPEND)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Enable the debug monitor exception\\n\\nValue on reset: 0\"]\n", "file_path": "src/debug/debug_demcr.rs", "rank": 29, "score": 63418.9391716268 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:31 - Register read/write data for debugging\"]\n\n #[inline(always)]\n\n pub fn debug_regdata(&self) -> DEBUG_REGDATA_R {\n\n DEBUG_REGDATA_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:31 - Register read/write data for debugging\"]\n\n #[inline(always)]\n\n pub fn debug_regdata(&mut self) -> DEBUG_REGDATA_W {\n\n DEBUG_REGDATA_W { w: self }\n\n }\n\n}\n", "file_path": "src/debug/debug_dcrdr.rs", "rank": 30, "score": 63418.221872157024 }, { "content": " #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u16) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0xffff << 16)) | (((value as u32) & 0xffff) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `S_RESET_ST`\"]\n\npub type S_RESET_ST_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `S_RETIRE_ST`\"]\n\npub type S_RETIRE_ST_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `S_LOCKUP`\"]\n\npub type S_LOCKUP_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `S_SLEEP`\"]\n\npub type S_SLEEP_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `S_HALT`\"]\n\npub type S_HALT_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `S_REGRDY`\"]\n\npub type S_REGRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `C_SNAPSTALL`\"]\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 31, "score": 63416.0436166973 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `VC_CORERESET`\"]\n\npub type VC_CORERESET_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `VC_CORERESET`\"]\n\npub struct VC_CORERESET_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VC_CORERESET_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/debug/debug_demcr.rs", "rank": 32, "score": 63416.03369388947 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `VC_BUSERR`\"]\n\npub type VC_BUSERR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `VC_BUSERR`\"]\n\npub struct VC_BUSERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VC_BUSERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/debug/debug_demcr.rs", "rank": 33, "score": 63416.03369388947 }, { "content": "}\n\nimpl<'a> C_DEBUGEN_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 34, "score": 63415.843297903324 }, { "content": "#[doc = \"Write proxy for field `VC_MMERR`\"]\n\npub struct VC_MMERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VC_MMERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n", "file_path": "src/debug/debug_demcr.rs", "rank": 35, "score": 63414.689099405834 }, { "content": "#[doc = \"Write proxy for field `VC_INTERR`\"]\n\npub struct VC_INTERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VC_INTERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n", "file_path": "src/debug/debug_demcr.rs", "rank": 36, "score": 63414.689099405834 }, { "content": " }\n\n #[doc = \"Access other registers including control, FAULTMASK, BASEPRI and PRIMASK\"]\n\n #[inline(always)]\n\n pub fn regsel_specreg(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_SPECREG)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x1f) | ((value as u32) & 0x1f);\n\n self.w\n\n }\n\n}\n\nimpl R {}\n\nimpl W {\n\n #[doc = \"Bit 16 - Indicates direction of register transfer\"]\n\n #[inline(always)]\n\n pub fn regwn_r(&mut self) -> REGWNR_W {\n\n REGWNR_W { w: self }\n\n }\n\n #[doc = \"Bits 0:4 - Indicates register to be accessed\"]\n\n #[inline(always)]\n\n pub fn regsel(&mut self) -> REGSEL_W {\n\n REGSEL_W { w: self }\n\n }\n\n}\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 37, "score": 63413.618209190965 }, { "content": "impl From<REGWNR_AW> for bool {\n\n #[inline(always)]\n\n fn from(variant: REGWNR_AW) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `REGWnR`\"]\n\npub struct REGWNR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> REGWNR_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: REGWNR_AW) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Indicates register read\"]\n\n #[inline(always)]\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 38, "score": 63413.13556184115 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Pend the monitor exception request\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MON_PEND_A {\n\n #[doc = \"0: Clear the pending status of the debug monitor exception\"]\n\n DEBUG_MON_CLRPEND = 0,\n\n #[doc = \"1: Set pending status of the debug monitor exception\"]\n\n DEBUG_MON_SETPEND = 1,\n\n}\n\nimpl From<MON_PEND_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MON_PEND_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MON_PEND`\"]\n\npub type MON_PEND_R = crate::R<bool, MON_PEND_A>;\n", "file_path": "src/debug/debug_demcr.rs", "rank": 39, "score": 63413.120486069056 }, { "content": " #[doc = \"Bit 2 - Single step the processor\"]\n\n #[inline(always)]\n\n pub fn c_step(&self) -> C_STEP_R {\n\n C_STEP_R::new(((self.bits >> 2) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Halt the processor\"]\n\n #[inline(always)]\n\n pub fn c_halt(&self) -> C_HALT_R {\n\n C_HALT_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 0 - Enable halt mode debugging\"]\n\n #[inline(always)]\n\n pub fn c_debugen(&self) -> C_DEBUGEN_R {\n\n C_DEBUGEN_R::new((self.bits & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 16:31 - Debug key must be written to this field in order to write the rest of the register\"]\n\n #[inline(always)]\n\n pub fn dbgkey(&mut self) -> DBGKEY_W {\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 40, "score": 63412.7230938817 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `C_MASKINTS`\"]\n\npub type C_MASKINTS_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `C_MASKINTS`\"]\n\npub struct C_MASKINTS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> C_MASKINTS_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 41, "score": 63411.59459666164 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> MON_REQ_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n", "file_path": "src/debug/debug_demcr.rs", "rank": 42, "score": 63411.22537473328 }, { "content": " pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Indicates register to be accessed\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum REGSEL_AW {\n\n #[doc = \"0: Select R0\"]\n\n REGSEL_R0 = 0,\n\n #[doc = \"1: Select R1\"]\n\n REGSEL_R1 = 1,\n\n #[doc = \"2: Select R2\"]\n\n REGSEL_R2 = 2,\n\n #[doc = \"3: Select R3\"]\n\n REGSEL_R3 = 3,\n\n #[doc = \"4: Select R4\"]\n\n REGSEL_R4 = 4,\n\n #[doc = \"5: Select R5\"]\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 43, "score": 63409.162029741245 }, { "content": " #[inline(always)]\n\n fn from(variant: DBGKEY_AW) -> Self {\n\n variant as _\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DBGKEY`\"]\n\npub struct DBGKEY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DBGKEY_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: DBGKEY_AW) -> &'a mut W {\n\n unsafe { self.bits(variant.into()) }\n\n }\n\n #[doc = \"DEBUG_HALT_KEY\"]\n\n #[inline(always)]\n\n pub fn debug_halt_key(self) -> &'a mut W {\n\n self.variant(DBGKEY_AW::DEBUG_HALT_KEY)\n\n }\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 44, "score": 63408.00487001657 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 24 - Trace system enable\"]\n\n #[inline(always)]\n\n pub fn trcena(&self) -> TRCENA_R {\n\n TRCENA_R::new(((self.bits >> 24) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 19 - Indicates that the debug monitor is caused by a manual pending request rather than a hardware event\"]\n\n #[inline(always)]\n\n pub fn mon_req(&self) -> MON_REQ_R {\n\n MON_REQ_R::new(((self.bits >> 19) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 18 - Single step the processor\"]\n\n #[inline(always)]\n\n pub fn mon_step(&self) -> MON_STEP_R {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 45, "score": 63404.47097527032 }, { "content": " #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: MON_EN_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Disable debug monitor exceptions\"]\n\n #[inline(always)]\n\n pub fn debug_mon_disable(self) -> &'a mut W {\n\n self.variant(MON_EN_A::DEBUG_MON_DISABLE)\n\n }\n\n #[doc = \"Enable debug monitor exceptions\"]\n\n #[inline(always)]\n\n pub fn debug_mon_enable(self) -> &'a mut W {\n\n self.variant(MON_EN_A::DEBUG_MON_ENABLE)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 46, "score": 63402.90077075782 }, { "content": "impl MON_PEND_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> MON_PEND_A {\n\n match self.bits {\n\n false => MON_PEND_A::DEBUG_MON_CLRPEND,\n\n true => MON_PEND_A::DEBUG_MON_SETPEND,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `DEBUG_MON_CLRPEND`\"]\n\n #[inline(always)]\n\n pub fn is_debug_mon_clrpend(&self) -> bool {\n\n *self == MON_PEND_A::DEBUG_MON_CLRPEND\n\n }\n\n #[doc = \"Checks if the value of the field is `DEBUG_MON_SETPEND`\"]\n\n #[inline(always)]\n\n pub fn is_debug_mon_setpend(&self) -> bool {\n\n *self == MON_PEND_A::DEBUG_MON_SETPEND\n\n }\n\n}\n", "file_path": "src/debug/debug_demcr.rs", "rank": 47, "score": 63402.74687156529 }, { "content": " pub fn regwnr_read(self) -> &'a mut W {\n\n self.variant(REGWNR_AW::REGWNR_READ)\n\n }\n\n #[doc = \"Indicates register write\"]\n\n #[inline(always)]\n\n pub fn regwnr_write(self) -> &'a mut W {\n\n self.variant(REGWNR_AW::REGWNR_WRITE)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 48, "score": 63399.62907566033 }, { "content": " VC_NOCPERR_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 4 - Debug trap on memory management fault\"]\n\n #[inline(always)]\n\n pub fn vc_mmerr(&self) -> VC_MMERR_R {\n\n VC_MMERR_R::new(((self.bits >> 4) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 0 - Debug trap on core reset\"]\n\n #[inline(always)]\n\n pub fn vc_corereset(&self) -> VC_CORERESET_R {\n\n VC_CORERESET_R::new((self.bits & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 24 - Trace system enable\"]\n\n #[inline(always)]\n\n pub fn trcena(&mut self) -> TRCENA_W {\n\n TRCENA_W { w: self }\n\n }\n\n #[doc = \"Bit 19 - Indicates that the debug monitor is caused by a manual pending request rather than a hardware event\"]\n", "file_path": "src/debug/debug_demcr.rs", "rank": 49, "score": 63399.62237858475 }, { "content": " DBGKEY_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - Set to break a stalled memory access\"]\n\n #[inline(always)]\n\n pub fn c_snapstall(&mut self) -> C_SNAPSTALL_W {\n\n C_SNAPSTALL_W { w: self }\n\n }\n\n #[doc = \"Bit 3 - Mask interrupts while stepping\"]\n\n #[inline(always)]\n\n pub fn c_maskints(&mut self) -> C_MASKINTS_W {\n\n C_MASKINTS_W { w: self }\n\n }\n\n #[doc = \"Bit 2 - Single step the processor\"]\n\n #[inline(always)]\n\n pub fn c_step(&mut self) -> C_STEP_W {\n\n C_STEP_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - Halt the processor\"]\n\n #[inline(always)]\n\n pub fn c_halt(&mut self) -> C_HALT_W {\n\n C_HALT_W { w: self }\n\n }\n\n #[doc = \"Bit 0 - Enable halt mode debugging\"]\n\n #[inline(always)]\n\n pub fn c_debugen(&mut self) -> C_DEBUGEN_W {\n\n C_DEBUGEN_W { w: self }\n\n }\n\n}\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 50, "score": 63398.23382615315 }, { "content": "#[doc = \"Write proxy for field `MON_PEND`\"]\n\npub struct MON_PEND_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MON_PEND_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: MON_PEND_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Clear the pending status of the debug monitor exception\"]\n\n #[inline(always)]\n\n pub fn debug_mon_clrpend(self) -> &'a mut W {\n\n self.variant(MON_PEND_A::DEBUG_MON_CLRPEND)\n\n }\n\n #[doc = \"Set pending status of the debug monitor exception\"]\n\n #[inline(always)]\n\n pub fn debug_mon_setpend(self) -> &'a mut W {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 51, "score": 63397.268034504414 }, { "content": " REGSEL_R15 = 15,\n\n #[doc = \"16: Select xPSR/flags\"]\n\n REGSEL_PSRFLGS = 16,\n\n #[doc = \"17: Select main stack pointer\"]\n\n REGSEL_MSP = 17,\n\n #[doc = \"18: Select process stack pointer\"]\n\n REGSEL_PSP = 18,\n\n #[doc = \"20: Access other registers including control, FAULTMASK, BASEPRI and PRIMASK\"]\n\n REGSEL_SPECREG = 20,\n\n}\n\nimpl From<REGSEL_AW> for u8 {\n\n #[inline(always)]\n\n fn from(variant: REGSEL_AW) -> Self {\n\n variant as _\n\n }\n\n}\n\n#[doc = \"Write proxy for field `REGSEL`\"]\n\npub struct REGSEL_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 52, "score": 63394.725348687105 }, { "content": " #[inline(always)]\n\n pub fn vc_harderr(&mut self) -> VC_HARDERR_W {\n\n VC_HARDERR_W { w: self }\n\n }\n\n #[doc = \"Bit 9 - Debug trap on interrupt service errors\"]\n\n #[inline(always)]\n\n pub fn vc_interr(&mut self) -> VC_INTERR_W {\n\n VC_INTERR_W { w: self }\n\n }\n\n #[doc = \"Bit 8 - Debug trap on bus faults\"]\n\n #[inline(always)]\n\n pub fn vc_buserr(&mut self) -> VC_BUSERR_W {\n\n VC_BUSERR_W { w: self }\n\n }\n\n #[doc = \"Bit 7 - Debug trap on usage fault state errors\"]\n\n #[inline(always)]\n\n pub fn vc_staterr(&mut self) -> VC_STATERR_W {\n\n VC_STATERR_W { w: self }\n\n }\n\n #[doc = \"Bit 6 - Debug trap on fault-enabled checking errors (e.g. unaligned access, divide by zero, etc.)\"]\n", "file_path": "src/debug/debug_demcr.rs", "rank": 53, "score": 63393.19782857262 }, { "content": " #[inline(always)]\n\n pub fn vc_chkerr(&mut self) -> VC_CHKERR_W {\n\n VC_CHKERR_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - Debug trap on usage fault no coprocessor errors\"]\n\n #[inline(always)]\n\n pub fn vc_nocperr(&mut self) -> VC_NOCPERR_W {\n\n VC_NOCPERR_W { w: self }\n\n }\n\n #[doc = \"Bit 4 - Debug trap on memory management fault\"]\n\n #[inline(always)]\n\n pub fn vc_mmerr(&mut self) -> VC_MMERR_W {\n\n VC_MMERR_W { w: self }\n\n }\n\n #[doc = \"Bit 0 - Debug trap on core reset\"]\n\n #[inline(always)]\n\n pub fn vc_corereset(&mut self) -> VC_CORERESET_W {\n\n VC_CORERESET_W { w: self }\n\n }\n\n}\n", "file_path": "src/debug/debug_demcr.rs", "rank": 54, "score": 63390.01612870582 }, { "content": " #[inline(always)]\n\n pub fn mon_req(&mut self) -> MON_REQ_W {\n\n MON_REQ_W { w: self }\n\n }\n\n #[doc = \"Bit 18 - Single step the processor\"]\n\n #[inline(always)]\n\n pub fn mon_step(&mut self) -> MON_STEP_W {\n\n MON_STEP_W { w: self }\n\n }\n\n #[doc = \"Bit 17 - Pend the monitor exception request\"]\n\n #[inline(always)]\n\n pub fn mon_pend(&mut self) -> MON_PEND_W {\n\n MON_PEND_W { w: self }\n\n }\n\n #[doc = \"Bit 16 - Enable the debug monitor exception\"]\n\n #[inline(always)]\n\n pub fn mon_en(&mut self) -> MON_EN_W {\n\n MON_EN_W { w: self }\n\n }\n\n #[doc = \"Bit 10 - Debug trap on hard faults\"]\n", "file_path": "src/debug/debug_demcr.rs", "rank": 55, "score": 63389.627038904364 }, { "content": " #[doc = \"Bit 17 - Indicates is core is halted\"]\n\n #[inline(always)]\n\n pub fn s_halt(&self) -> S_HALT_R {\n\n S_HALT_R::new(((self.bits >> 17) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Indicates register read/write operation is completed\"]\n\n #[inline(always)]\n\n pub fn s_regrdy(&self) -> S_REGRDY_R {\n\n S_REGRDY_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Set to break a stalled memory access\"]\n\n #[inline(always)]\n\n pub fn c_snapstall(&self) -> C_SNAPSTALL_R {\n\n C_SNAPSTALL_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 3 - Mask interrupts while stepping\"]\n\n #[inline(always)]\n\n pub fn c_maskints(&self) -> C_MASKINTS_R {\n\n C_MASKINTS_R::new(((self.bits >> 3) & 0x01) != 0)\n\n }\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 56, "score": 63389.304308380415 }, { "content": " VC_INTERR_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 8 - Debug trap on bus faults\"]\n\n #[inline(always)]\n\n pub fn vc_buserr(&self) -> VC_BUSERR_R {\n\n VC_BUSERR_R::new(((self.bits >> 8) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 7 - Debug trap on usage fault state errors\"]\n\n #[inline(always)]\n\n pub fn vc_staterr(&self) -> VC_STATERR_R {\n\n VC_STATERR_R::new(((self.bits >> 7) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 6 - Debug trap on fault-enabled checking errors (e.g. unaligned access, divide by zero, etc.)\"]\n\n #[inline(always)]\n\n pub fn vc_chkerr(&self) -> VC_CHKERR_R {\n\n VC_CHKERR_R::new(((self.bits >> 6) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Debug trap on usage fault no coprocessor errors\"]\n\n #[inline(always)]\n\n pub fn vc_nocperr(&self) -> VC_NOCPERR_R {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 57, "score": 63387.947269272896 }, { "content": " #[doc = \"Bit 25 - Core has been reset or is being rest. Bit is cleared on read.\"]\n\n #[inline(always)]\n\n pub fn s_reset_st(&self) -> S_RESET_ST_R {\n\n S_RESET_ST_R::new(((self.bits >> 25) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 24\"]\n\n #[inline(always)]\n\n pub fn s_retire_st(&self) -> S_RETIRE_ST_R {\n\n S_RETIRE_ST_R::new(((self.bits >> 24) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 19 - Indicates if core is in lockup state\"]\n\n #[inline(always)]\n\n pub fn s_lockup(&self) -> S_LOCKUP_R {\n\n S_LOCKUP_R::new(((self.bits >> 19) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 18 - Indicates if core is in sleep mode\"]\n\n #[inline(always)]\n\n pub fn s_sleep(&self) -> S_SLEEP_R {\n\n S_SLEEP_R::new(((self.bits >> 18) & 0x01) != 0)\n\n }\n", "file_path": "src/debug/debug_dhcsr.rs", "rank": 58, "score": 63385.63010702852 }, { "content": "impl<'a> REGSEL_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: REGSEL_AW) -> &'a mut W {\n\n unsafe { self.bits(variant.into()) }\n\n }\n\n #[doc = \"Select R0\"]\n\n #[inline(always)]\n\n pub fn regsel_r0(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R0)\n\n }\n\n #[doc = \"Select R1\"]\n\n #[inline(always)]\n\n pub fn regsel_r1(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R1)\n\n }\n\n #[doc = \"Select R2\"]\n\n #[inline(always)]\n\n pub fn regsel_r2(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R2)\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 59, "score": 63385.55668794304 }, { "content": " MON_STEP_R::new(((self.bits >> 18) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 17 - Pend the monitor exception request\"]\n\n #[inline(always)]\n\n pub fn mon_pend(&self) -> MON_PEND_R {\n\n MON_PEND_R::new(((self.bits >> 17) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Enable the debug monitor exception\"]\n\n #[inline(always)]\n\n pub fn mon_en(&self) -> MON_EN_R {\n\n MON_EN_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 10 - Debug trap on hard faults\"]\n\n #[inline(always)]\n\n pub fn vc_harderr(&self) -> VC_HARDERR_R {\n\n VC_HARDERR_R::new(((self.bits >> 10) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - Debug trap on interrupt service errors\"]\n\n #[inline(always)]\n\n pub fn vc_interr(&self) -> VC_INTERR_R {\n", "file_path": "src/debug/debug_demcr.rs", "rank": 60, "score": 63384.956988308586 }, { "content": " }\n\n #[doc = \"Select R7\"]\n\n #[inline(always)]\n\n pub fn regsel_r7(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R7)\n\n }\n\n #[doc = \"Select R8\"]\n\n #[inline(always)]\n\n pub fn regsel_r8(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R8)\n\n }\n\n #[doc = \"Select R9\"]\n\n #[inline(always)]\n\n pub fn regsel_r9(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R9)\n\n }\n\n #[doc = \"Select R10\"]\n\n #[inline(always)]\n\n pub fn regsel_r10(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R10)\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 61, "score": 63375.04939446442 }, { "content": " }\n\n #[doc = \"Select R11\"]\n\n #[inline(always)]\n\n pub fn regsel_r11(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R11)\n\n }\n\n #[doc = \"Select R12\"]\n\n #[inline(always)]\n\n pub fn regsel_r12(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R12)\n\n }\n\n #[doc = \"Select R13\"]\n\n #[inline(always)]\n\n pub fn regsel_r13(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R13)\n\n }\n\n #[doc = \"Select R14\"]\n\n #[inline(always)]\n\n pub fn regsel_r14(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R14)\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 62, "score": 63375.04939446442 }, { "content": " }\n\n #[doc = \"Select R3\"]\n\n #[inline(always)]\n\n pub fn regsel_r3(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R3)\n\n }\n\n #[doc = \"Select R4\"]\n\n #[inline(always)]\n\n pub fn regsel_r4(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R4)\n\n }\n\n #[doc = \"Select R5\"]\n\n #[inline(always)]\n\n pub fn regsel_r5(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R5)\n\n }\n\n #[doc = \"Select R6\"]\n\n #[inline(always)]\n\n pub fn regsel_r6(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R6)\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 63, "score": 63375.04939446442 }, { "content": " }\n\n #[doc = \"Select R15\"]\n\n #[inline(always)]\n\n pub fn regsel_r15(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_R15)\n\n }\n\n #[doc = \"Select xPSR/flags\"]\n\n #[inline(always)]\n\n pub fn regsel_psrflgs(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_PSRFLGS)\n\n }\n\n #[doc = \"Select main stack pointer\"]\n\n #[inline(always)]\n\n pub fn regsel_msp(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_MSP)\n\n }\n\n #[doc = \"Select process stack pointer\"]\n\n #[inline(always)]\n\n pub fn regsel_psp(self) -> &'a mut W {\n\n self.variant(REGSEL_AW::REGSEL_PSP)\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 64, "score": 63374.81546333747 }, { "content": " REGSEL_R5 = 5,\n\n #[doc = \"6: Select R6\"]\n\n REGSEL_R6 = 6,\n\n #[doc = \"7: Select R7\"]\n\n REGSEL_R7 = 7,\n\n #[doc = \"8: Select R8\"]\n\n REGSEL_R8 = 8,\n\n #[doc = \"9: Select R9\"]\n\n REGSEL_R9 = 9,\n\n #[doc = \"10: Select R10\"]\n\n REGSEL_R10 = 10,\n\n #[doc = \"11: Select R11\"]\n\n REGSEL_R11 = 11,\n\n #[doc = \"12: Select R12\"]\n\n REGSEL_R12 = 12,\n\n #[doc = \"13: Select R13\"]\n\n REGSEL_R13 = 13,\n\n #[doc = \"14: Select R14\"]\n\n REGSEL_R14 = 14,\n\n #[doc = \"15: Select R15\"]\n", "file_path": "src/debug/debug_dcrsr.rs", "rank": 65, "score": 63359.219319485535 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 66, "score": 62924.25430686071 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 67, "score": 55286.1537546481 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 68, "score": 55274.77486772495 }, { "content": "impl crate::Writable for DEBUG_DHCSR {}\n\n#[doc = \"Debug Halting Control and Status Register\"]\n\npub mod debug_dhcsr;\n\n#[doc = \"Debug Core Register Selector Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [debug_dcrsr](debug_dcrsr) module\"]\n\npub type DEBUG_DCRSR = crate::Reg<u32, _DEBUG_DCRSR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _DEBUG_DCRSR;\n\n#[doc = \"`read()` method returns [debug_dcrsr::R](debug_dcrsr::R) reader structure\"]\n\nimpl crate::Readable for DEBUG_DCRSR {}\n\n#[doc = \"`write(|w| ..)` method takes [debug_dcrsr::W](debug_dcrsr::W) writer structure\"]\n\nimpl crate::Writable for DEBUG_DCRSR {}\n\n#[doc = \"Debug Core Register Selector Register\"]\n\npub mod debug_dcrsr;\n\n#[doc = \"Debug Core Register Data Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [debug_dcrdr](debug_dcrdr) module\"]\n\npub type DEBUG_DCRDR = crate::Reg<u32, _DEBUG_DCRDR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _DEBUG_DCRDR;\n\n#[doc = \"`read()` method returns [debug_dcrdr::R](debug_dcrdr::R) reader structure\"]\n", "file_path": "src/debug.rs", "rank": 69, "score": 54515.88751712053 }, { "content": "impl crate::Readable for DEBUG_DCRDR {}\n\n#[doc = \"`write(|w| ..)` method takes [debug_dcrdr::W](debug_dcrdr::W) writer structure\"]\n\nimpl crate::Writable for DEBUG_DCRDR {}\n\n#[doc = \"Debug Core Register Data Register\"]\n\npub mod debug_dcrdr;\n\n#[doc = \"Debug Exception and Monitor Control Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [debug_demcr](debug_demcr) module\"]\n\npub type DEBUG_DEMCR = crate::Reg<u32, _DEBUG_DEMCR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _DEBUG_DEMCR;\n\n#[doc = \"`read()` method returns [debug_demcr::R](debug_demcr::R) reader structure\"]\n\nimpl crate::Readable for DEBUG_DEMCR {}\n\n#[doc = \"`write(|w| ..)` method takes [debug_demcr::W](debug_demcr::W) writer structure\"]\n\nimpl crate::Writable for DEBUG_DEMCR {}\n\n#[doc = \"Debug Exception and Monitor Control Register\"]\n\npub mod debug_demcr;\n", "file_path": "src/debug.rs", "rank": 70, "score": 54514.72084587185 }, { "content": "#[doc = r\"Register block\"]\n\n#[repr(C)]\n\npub struct RegisterBlock {\n\n #[doc = \"0x00 - Debug Halting Control and Status Register\"]\n\n pub debug_dhcsr: DEBUG_DHCSR,\n\n #[doc = \"0x04 - Debug Core Register Selector Register\"]\n\n pub debug_dcrsr: DEBUG_DCRSR,\n\n #[doc = \"0x08 - Debug Core Register Data Register\"]\n\n pub debug_dcrdr: DEBUG_DCRDR,\n\n #[doc = \"0x0c - Debug Exception and Monitor Control Register\"]\n\n pub debug_demcr: DEBUG_DEMCR,\n\n}\n\n#[doc = \"Debug Halting Control and Status Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [debug_dhcsr](debug_dhcsr) module\"]\n\npub type DEBUG_DHCSR = crate::Reg<u32, _DEBUG_DHCSR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _DEBUG_DHCSR;\n\n#[doc = \"`read()` method returns [debug_dhcsr::R](debug_dhcsr::R) reader structure\"]\n\nimpl crate::Readable for DEBUG_DHCSR {}\n\n#[doc = \"`write(|w| ..)` method takes [debug_dhcsr::W](debug_dhcsr::W) writer structure\"]\n", "file_path": "src/debug.rs", "rank": 71, "score": 54514.06340217679 }, { "content": "#[doc = \"Reader of register CRC_VALUE\"]\n\npub type R = crate::R<u32, super::CRC_VALUE>;\n\n#[doc = \"Writer for register CRC_VALUE\"]\n\npub type W = crate::W<u32, super::CRC_VALUE>;\n\n#[doc = \"Register CRC_VALUE `reset()`'s with value 0xffff\"]\n\nimpl crate::ResetValue for super::CRC_VALUE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xffff\n\n }\n\n}\n\n#[doc = \"CRC generator value: Write 0xFFFFFFF (32) or 0xFFFF (CCITT) to initialize the CRC, read provides the current CRC value.\\n\\nValue on reset: 65535\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u32)]\n\npub enum CURRENT_CRC_A {\n\n #[doc = \"65535: Initial value for the CRC CCITT calculation\"]\n\n CRC_CCITT_INIT_VALUE = 65535,\n\n #[doc = \"4294967295: Initial value for the CRC 32 calculation\"]\n\n CRC_32_INIT_VALUE = 4294967295,\n", "file_path": "src/crc/crc_value.rs", "rank": 72, "score": 49956.28143884527 }, { "content": " }\n\n #[doc = \"Initial value for the CRC CCITT calculation\"]\n\n #[inline(always)]\n\n pub fn crc_ccitt_init_value(self) -> &'a mut W {\n\n self.variant(CURRENT_CRC_A::CRC_CCITT_INIT_VALUE)\n\n }\n\n #[doc = \"Initial value for the CRC 32 calculation\"]\n\n #[inline(always)]\n\n pub fn crc_32_init_value(self) -> &'a mut W {\n\n self.variant(CURRENT_CRC_A::CRC_32_INIT_VALUE)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:31 - CRC generator value: Write 0xFFFFFFF (32) or 0xFFFF (CCITT) to initialize the CRC, read provides the current CRC value.\"]\n", "file_path": "src/crc/crc_value.rs", "rank": 73, "score": 49939.16973745901 }, { "content": " #[doc = \"Checks if the value of the field is `CRC_CCITT_INIT_VALUE`\"]\n\n #[inline(always)]\n\n pub fn is_crc_ccitt_init_value(&self) -> bool {\n\n *self == CURRENT_CRC_A::CRC_CCITT_INIT_VALUE\n\n }\n\n #[doc = \"Checks if the value of the field is `CRC_32_INIT_VALUE`\"]\n\n #[inline(always)]\n\n pub fn is_crc_32_init_value(&self) -> bool {\n\n *self == CURRENT_CRC_A::CRC_32_INIT_VALUE\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CURRENT_CRC`\"]\n\npub struct CURRENT_CRC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CURRENT_CRC_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: CURRENT_CRC_A) -> &'a mut W {\n\n unsafe { self.bits(variant.into()) }\n", "file_path": "src/crc/crc_value.rs", "rank": 74, "score": 49939.057340938656 }, { "content": "}\n\nimpl From<CURRENT_CRC_A> for u32 {\n\n #[inline(always)]\n\n fn from(variant: CURRENT_CRC_A) -> Self {\n\n variant as _\n\n }\n\n}\n\n#[doc = \"Reader of field `CURRENT_CRC`\"]\n\npub type CURRENT_CRC_R = crate::R<u32, CURRENT_CRC_A>;\n\nimpl CURRENT_CRC_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> crate::Variant<u32, CURRENT_CRC_A> {\n\n use crate::Variant::*;\n\n match self.bits {\n\n 65535 => Val(CURRENT_CRC_A::CRC_CCITT_INIT_VALUE),\n\n 4294967295 => Val(CURRENT_CRC_A::CRC_32_INIT_VALUE),\n\n i => Res(i),\n\n }\n\n }\n", "file_path": "src/crc/crc_value.rs", "rank": 75, "score": 49938.84010361827 }, { "content": " #[inline(always)]\n\n pub fn current_crc(&self) -> CURRENT_CRC_R {\n\n CURRENT_CRC_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:31 - CRC generator value: Write 0xFFFFFFF (32) or 0xFFFF (CCITT) to initialize the CRC, read provides the current CRC value.\"]\n\n #[inline(always)]\n\n pub fn current_crc(&mut self) -> CURRENT_CRC_W {\n\n CURRENT_CRC_W { w: self }\n\n }\n\n}\n", "file_path": "src/crc/crc_value.rs", "rank": 76, "score": 49934.53474744288 }, { "content": "#[doc = \"Reader of register DIO_MODE\"]\n\npub type R = crate::R<u32, super::DIO_MODE>;\n\n#[doc = \"DIO\\\\[15:0\\\\]\n\nmode\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u16)]\n\npub enum GPIO_A {\n\n #[doc = \"0: This DIO is not configured as a CM3 controlled GPIO\"]\n\n DIO0_IS_NOT_GPIO = 0,\n\n #[doc = \"1: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO0_IS_GPIO = 1,\n\n #[doc = \"2: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO1_IS_GPIO = 2,\n\n #[doc = \"4: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO2_IS_GPIO = 4,\n\n #[doc = \"8: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO3_IS_GPIO = 8,\n\n #[doc = \"16: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO4_IS_GPIO = 16,\n\n #[doc = \"32: This DIO is configured as a CM3 controlled GPIO\"]\n", "file_path": "src/dio/dio_mode.rs", "rank": 77, "score": 49931.850466931486 }, { "content": " DIO15_IS_GPIO = 32768,\n\n}\n\nimpl From<GPIO_A> for u16 {\n\n #[inline(always)]\n\n fn from(variant: GPIO_A) -> Self {\n\n variant as _\n\n }\n\n}\n\n#[doc = \"Reader of field `GPIO`\"]\n\npub type GPIO_R = crate::R<u16, GPIO_A>;\n\nimpl GPIO_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> crate::Variant<u16, GPIO_A> {\n\n use crate::Variant::*;\n\n match self.bits {\n\n 0 => Val(GPIO_A::DIO0_IS_NOT_GPIO),\n\n 1 => Val(GPIO_A::DIO0_IS_GPIO),\n\n 2 => Val(GPIO_A::DIO1_IS_GPIO),\n\n 4 => Val(GPIO_A::DIO2_IS_GPIO),\n", "file_path": "src/dio/dio_mode.rs", "rank": 78, "score": 49924.88545239991 }, { "content": " }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:15 - DIO\\\\[15:0\\\\]\n\nmode\"]\n\n #[inline(always)]\n\n pub fn gpio(&self) -> GPIO_R {\n\n GPIO_R::new((self.bits & 0xffff) as u16)\n\n }\n\n}\n", "file_path": "src/dio/dio_mode.rs", "rank": 79, "score": 49913.512118648105 }, { "content": " }\n\n #[doc = \"Checks if the value of the field is `DIO8_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio8_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO8_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO9_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio9_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO9_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO10_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio10_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO10_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO11_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio11_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO11_IS_GPIO\n", "file_path": "src/dio/dio_mode.rs", "rank": 80, "score": 49909.700886519684 }, { "content": " }\n\n #[doc = \"Checks if the value of the field is `DIO0_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio0_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO0_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO1_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio1_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO1_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO2_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio2_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO2_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO3_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio3_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO3_IS_GPIO\n", "file_path": "src/dio/dio_mode.rs", "rank": 81, "score": 49909.700886519684 }, { "content": " }\n\n #[doc = \"Checks if the value of the field is `DIO12_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio12_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO12_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO13_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio13_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO13_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO14_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio14_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO14_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO15_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio15_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO15_IS_GPIO\n", "file_path": "src/dio/dio_mode.rs", "rank": 82, "score": 49909.700886519684 }, { "content": " }\n\n #[doc = \"Checks if the value of the field is `DIO4_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio4_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO4_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO5_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio5_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO5_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO6_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio6_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO6_IS_GPIO\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO7_IS_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio7_is_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO7_IS_GPIO\n", "file_path": "src/dio/dio_mode.rs", "rank": 83, "score": 49909.700886519684 }, { "content": " 8 => Val(GPIO_A::DIO3_IS_GPIO),\n\n 16 => Val(GPIO_A::DIO4_IS_GPIO),\n\n 32 => Val(GPIO_A::DIO5_IS_GPIO),\n\n 64 => Val(GPIO_A::DIO6_IS_GPIO),\n\n 128 => Val(GPIO_A::DIO7_IS_GPIO),\n\n 256 => Val(GPIO_A::DIO8_IS_GPIO),\n\n 512 => Val(GPIO_A::DIO9_IS_GPIO),\n\n 1024 => Val(GPIO_A::DIO10_IS_GPIO),\n\n 2048 => Val(GPIO_A::DIO11_IS_GPIO),\n\n 4096 => Val(GPIO_A::DIO12_IS_GPIO),\n\n 8192 => Val(GPIO_A::DIO13_IS_GPIO),\n\n 16384 => Val(GPIO_A::DIO14_IS_GPIO),\n\n 32768 => Val(GPIO_A::DIO15_IS_GPIO),\n\n i => Res(i),\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `DIO0_IS_NOT_GPIO`\"]\n\n #[inline(always)]\n\n pub fn is_dio0_is_not_gpio(&self) -> bool {\n\n *self == GPIO_A::DIO0_IS_NOT_GPIO\n", "file_path": "src/dio/dio_mode.rs", "rank": 84, "score": 49899.68341469848 }, { "content": " DIO5_IS_GPIO = 32,\n\n #[doc = \"64: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO6_IS_GPIO = 64,\n\n #[doc = \"128: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO7_IS_GPIO = 128,\n\n #[doc = \"256: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO8_IS_GPIO = 256,\n\n #[doc = \"512: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO9_IS_GPIO = 512,\n\n #[doc = \"1024: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO10_IS_GPIO = 1024,\n\n #[doc = \"2048: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO11_IS_GPIO = 2048,\n\n #[doc = \"4096: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO12_IS_GPIO = 4096,\n\n #[doc = \"8192: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO13_IS_GPIO = 8192,\n\n #[doc = \"16384: This DIO is configured as a CM3 controlled GPIO\"]\n\n DIO14_IS_GPIO = 16384,\n\n #[doc = \"32768: This DIO is configured as a CM3 controlled GPIO\"]\n", "file_path": "src/dio/dio_mode.rs", "rank": 85, "score": 49894.91491201828 }, { "content": "#[doc = \"Reader of register ASRC_INT_ENABLE\"]\n\npub type R = crate::R<u32, super::ASRC_INT_ENABLE>;\n\n#[doc = \"Writer for register ASRC_INT_ENABLE\"]\n\npub type W = crate::W<u32, super::ASRC_INT_ENABLE>;\n\n#[doc = \"Register ASRC_INT_ENABLE `reset()`'s with value 0x08\"]\n\nimpl crate::ResetValue for super::ASRC_INT_ENABLE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x08\n\n }\n\n}\n\n#[doc = \"The ASRC state/configuration update error interrupt mask\\n\\nValue on reset: 1\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ASRC_UPDATE_ERR_A {\n\n #[doc = \"0: This source can not set an interrupt\"]\n\n INT_DIS_ASRC_UPDATE_ERR = 0,\n\n #[doc = \"1: This source can set the interrupt line\"]\n\n INT_EBL_ASRC_UPDATE_ERR = 1,\n\n}\n", "file_path": "src/asrc/asrc_int_enable.rs", "rank": 86, "score": 47900.35995257849 }, { "content": " self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Copier or Comparator Mode Configuration\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Flash copier mode\"]\n\n COPY_MODE = 0,\n\n #[doc = \"1: Flash comparator mode\"]\n\n COMPARATOR_MODE = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n", "file_path": "src/flash/flash_copy_cfg.rs", "rank": 87, "score": 47899.066092119494 }, { "content": "#[doc = \"Reader of register FLASH_COPY_CTRL\"]\n\npub type R = crate::R<u32, super::FLASH_COPY_CTRL>;\n\n#[doc = \"Writer for register FLASH_COPY_CTRL\"]\n\npub type W = crate::W<u32, super::FLASH_COPY_CTRL>;\n\n#[doc = \"Register FLASH_COPY_CTRL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FLASH_COPY_CTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Error status\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ERROR_A {\n\n #[doc = \"0: No write / comparison error\"]\n\n COPY_NO_ERROR = 0,\n\n #[doc = \"1: Write or comparison error\"]\n\n COPY_ERROR = 1,\n\n}\n", "file_path": "src/flash/flash_copy_ctrl.rs", "rank": 88, "score": 47897.330112288524 }, { "content": " #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"The ASRC_IN register interrupt status\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ASRC_IN_REQ_A {\n", "file_path": "src/asrc/asrc_int_enable.rs", "rank": 89, "score": 47895.07431943407 }, { "content": " COPY_TO_40BIT = 1,\n\n}\n\nimpl From<COPY_MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: COPY_MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COPY_MODE`\"]\n\npub type COPY_MODE_R = crate::R<bool, COPY_MODE_A>;\n\nimpl COPY_MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> COPY_MODE_A {\n\n match self.bits {\n\n false => COPY_MODE_A::COPY_TO_32BIT,\n\n true => COPY_MODE_A::COPY_TO_40BIT,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `COPY_TO_32BIT`\"]\n", "file_path": "src/flash/flash_copy_cfg.rs", "rank": 90, "score": 47894.81060432739 }, { "content": "#[doc = \"Reader of register SYSCTRL_LPDSP32_CNT\"]\n\npub type R = crate::R<u32, super::SYSCTRL_LPDSP32_CNT>;\n\n#[doc = \"Writer for register SYSCTRL_LPDSP32_CNT\"]\n\npub type W = crate::W<u32, super::SYSCTRL_LPDSP32_CNT>;\n\n#[doc = \"Register SYSCTRL_LPDSP32_CNT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SYSCTRL_LPDSP32_CNT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `LPDSP32_CNT`\"]\n\npub type LPDSP32_CNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `LPDSP32_CNT`\"]\n\npub struct LPDSP32_CNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LPDSP32_CNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/sysctrl/sysctrl_lpdsp32_cnt.rs", "rank": 91, "score": 47893.7789140873 }, { "content": " #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"The ASRC input interface error interrupt mask\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ASRC_IN_ERR_A {\n\n #[doc = \"0: This source can not set an interrupt\"]\n\n INT_DIS_ASRC_IN_ERR = 0,\n\n #[doc = \"1: This source can set the interrupt line\"]\n\n INT_EBL_ASRC_IN_ERR = 1,\n\n}\n\nimpl From<ASRC_IN_ERR_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: ASRC_IN_ERR_A) -> Self {\n\n variant as u8 != 0\n\n }\n", "file_path": "src/asrc/asrc_int_enable.rs", "rank": 92, "score": 47891.98338179379 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Select copier mode (32-bit or 40-bit)\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum COPY_MODE_A {\n\n #[doc = \"0: Copy Flash to 32-bit memory\"]\n\n COPY_TO_32BIT = 0,\n\n #[doc = \"1: Copy Flash to 40-bit memory\"]\n", "file_path": "src/flash/flash_copy_cfg.rs", "rank": 93, "score": 47891.29931907313 }, { "content": " variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> MODE_A {\n\n match self.bits {\n\n false => MODE_A::COPY_MODE,\n\n true => MODE_A::COMPARATOR_MODE,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `COPY_MODE`\"]\n\n #[inline(always)]\n\n pub fn is_copy_mode(&self) -> bool {\n\n *self == MODE_A::COPY_MODE\n\n }\n\n #[doc = \"Checks if the value of the field is `COMPARATOR_MODE`\"]\n", "file_path": "src/flash/flash_copy_cfg.rs", "rank": 94, "score": 47890.198714691665 }, { "content": "#[doc = \"Reader of register FLASH_COPY_CFG\"]\n\npub type R = crate::R<u32, super::FLASH_COPY_CFG>;\n\n#[doc = \"Writer for register FLASH_COPY_CFG\"]\n\npub type W = crate::W<u32, super::FLASH_COPY_CFG>;\n\n#[doc = \"Register FLASH_COPY_CFG `reset()`'s with value 0x0002_0000\"]\n\nimpl crate::ResetValue for super::FLASH_COPY_CFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0002_0000\n\n }\n\n}\n\n#[doc = \"Comparator address increment/decrement by 1 or 2\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum COMP_ADDR_STEP_A {\n\n #[doc = \"0: Address increment/decrement by 1 between two reads\"]\n\n COMP_ADDR_STEP_1 = 0,\n\n #[doc = \"1: Address increment/decrement by 2 between two reads\"]\n\n COMP_ADDR_STEP_2 = 1,\n\n}\n", "file_path": "src/flash/flash_copy_cfg.rs", "rank": 95, "score": 47889.19522239034 }, { "content": " #[inline(always)]\n\n pub fn is_copy_to_32bit(&self) -> bool {\n\n *self == COPY_MODE_A::COPY_TO_32BIT\n\n }\n\n #[doc = \"Checks if the value of the field is `COPY_TO_40BIT`\"]\n\n #[inline(always)]\n\n pub fn is_copy_to_40bit(&self) -> bool {\n\n *self == COPY_MODE_A::COPY_TO_40BIT\n\n }\n\n}\n\n#[doc = \"Write proxy for field `COPY_MODE`\"]\n\npub struct COPY_MODE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COPY_MODE_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: COPY_MODE_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n", "file_path": "src/flash/flash_copy_cfg.rs", "rank": 96, "score": 47888.42570696061 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Start the transfer\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum START_AW {\n\n #[doc = \"1: Start the current transfer\"]\n\n COPY_START = 1,\n\n}\n", "file_path": "src/flash/flash_copy_ctrl.rs", "rank": 97, "score": 47886.597187655716 }, { "content": " #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Comparator address-up or address-down\\n\\nValue on reset: 1\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum COMP_ADDR_DIR_A {\n\n #[doc = \"0: FLASH_COPIER address count-down\"]\n\n COMP_ADDR_DOWN = 0,\n\n #[doc = \"1: FLASH_COPIER address count-up\"]\n\n COMP_ADDR_UP = 1,\n\n}\n\nimpl From<COMP_ADDR_DIR_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: COMP_ADDR_DIR_A) -> Self {\n\n variant as u8 != 0\n\n }\n", "file_path": "src/flash/flash_copy_cfg.rs", "rank": 98, "score": 47886.26996904012 }, { "content": "impl From<ERROR_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: ERROR_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ERROR`\"]\n\npub type ERROR_R = crate::R<bool, ERROR_A>;\n\nimpl ERROR_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> ERROR_A {\n\n match self.bits {\n\n false => ERROR_A::COPY_NO_ERROR,\n\n true => ERROR_A::COPY_ERROR,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `COPY_NO_ERROR`\"]\n\n #[inline(always)]\n\n pub fn is_copy_no_error(&self) -> bool {\n", "file_path": "src/flash/flash_copy_ctrl.rs", "rank": 99, "score": 47886.10752632601 } ]
Rust
src/component/splits/tests/mod.rs
ash2x3zb9cy/livesplit-core
9c5e9c5877f905a518461e3a0586d58d4f840fcc
use super::{ ColumnSettings, ColumnStartWith, ColumnUpdateTrigger, ColumnUpdateWith, Component, Settings, State, }; use crate::{Run, Segment, TimeSpan, Timer, TimingMethod}; pub mod column; #[test] fn zero_visual_split_count_always_shows_all_splits() { let mut run = Run::new(); for _ in 0..32 { run.push_segment(Segment::new("")); } let timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { visual_split_count: 0, ..Default::default() }); let mut state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_down(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_down(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_up(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); } #[test] fn one_visual_split() { let mut run = Run::new(); run.push_segment(Segment::new("A")); run.push_segment(Segment::new("B")); run.push_segment(Segment::new("C")); let mut timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { always_show_last_split: false, split_preview_count: 0, visual_split_count: 1, ..Default::default() }); let mut state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "A"); assert_eq!(state.splits.len(), 1); timer.start(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "A"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "B"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "C"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "C"); assert_eq!(state.splits.len(), 1); } #[test] fn negative_segment_times() { let mut run = Run::new(); run.push_segment(Segment::new("")); let mut timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { columns: vec![ColumnSettings { start_with: ColumnStartWith::Empty, update_with: ColumnUpdateWith::SegmentTime, update_trigger: ColumnUpdateTrigger::OnStartingSegment, ..Default::default() }], ..Default::default() }); timer.start(); timer.set_current_timing_method(TimingMethod::GameTime); timer.initialize_game_time(); timer.pause_game_time(); timer.set_game_time(TimeSpan::from_seconds(-1.0)); let state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].columns[0].value, "−0:01"); } #[test] fn unique_split_indices() { let mut run = Run::new(); run.push_segment(Segment::new("")); run.push_segment(Segment::new("")); run.push_segment(Segment::new("")); let timer = Timer::new(run).unwrap(); let mut component = Component::with_settings(Settings { visual_split_count: 20, fill_with_blank_space: true, ..Default::default() }); let state = component.state(&timer.snapshot(), &Default::default()); let mut indices = state .splits .into_iter() .map(|s| s.index) .collect::<Vec<_>>(); indices.sort_unstable(); assert!(indices.windows(2).all(|pair| pair[0] != pair[1])); }
use super::{ ColumnSettings, ColumnStartWith, ColumnUpdateTrigger, ColumnUpdateWith, Component, Settings, State, }; use crate::{Run, Segment, TimeSpan, Timer, TimingMethod}; pub mod column; #[test] fn zero_visual_split_count_always_shows_all_splits() { let mut run = Run::new(); for _ in 0..32 { run.push_segment(Segment::new("")); } let timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { visual_split_count: 0, ..Default::default() }); let mut state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_down(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_down(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); component.scroll_up(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits.len(), 32); } #[test] fn one_visual_split() { let mut run = Run::new(); run.push_segment(Segment::new("A")); run.push_segment(Segment::new("B")); run.push_segment(Segment::new("C")); let mut timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings { always_show_last_split: false, split_preview_count: 0, visual_split_count: 1, ..Default::default() }); let mut state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "A"); assert_eq!(state.splits.len(), 1); timer.start(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "A"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "B"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "C"); assert_eq!(state.splits.len(), 1); timer.split(); state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].name, "C"); assert_eq!(state.splits.len(), 1); } #[test] fn negative_segment_times() { let mut run = Run::new(); run.push_segment(Segment::new("")); let mut timer = Timer::new(run).unwrap(); let layout_settings = Default::default(); let mut component = Component::with_settings(Settings {
#[test] fn unique_split_indices() { let mut run = Run::new(); run.push_segment(Segment::new("")); run.push_segment(Segment::new("")); run.push_segment(Segment::new("")); let timer = Timer::new(run).unwrap(); let mut component = Component::with_settings(Settings { visual_split_count: 20, fill_with_blank_space: true, ..Default::default() }); let state = component.state(&timer.snapshot(), &Default::default()); let mut indices = state .splits .into_iter() .map(|s| s.index) .collect::<Vec<_>>(); indices.sort_unstable(); assert!(indices.windows(2).all(|pair| pair[0] != pair[1])); }
columns: vec![ColumnSettings { start_with: ColumnStartWith::Empty, update_with: ColumnUpdateWith::SegmentTime, update_trigger: ColumnUpdateTrigger::OnStartingSegment, ..Default::default() }], ..Default::default() }); timer.start(); timer.set_current_timing_method(TimingMethod::GameTime); timer.initialize_game_time(); timer.pause_game_time(); timer.set_game_time(TimeSpan::from_seconds(-1.0)); let state = component.state(&timer.snapshot(), &layout_settings); assert_eq!(state.splits[0].columns[0].value, "−0:01"); }
function_block-function_prefix_line
[ { "content": "pub fn start_run(timer: &mut Timer) {\n\n timer.set_current_timing_method(TimingMethod::GameTime);\n\n timer.start();\n\n timer.initialize_game_time();\n\n timer.pause_game_time();\n\n timer.set_game_time(TimeSpan::zero());\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 0, "score": 413717.6256337635 }, { "content": "pub fn run_with_splits(timer: &mut Timer, splits: &[f64]) {\n\n start_run(timer);\n\n\n\n for &split in splits {\n\n timer.set_game_time(TimeSpan::from_seconds(split));\n\n timer.split();\n\n }\n\n\n\n timer.reset(true);\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 1, "score": 389921.8534167691 }, { "content": "pub fn run_with_splits_opt(timer: &mut Timer, splits: &[Option<f64>]) {\n\n start_run(timer);\n\n make_progress_run_with_splits_opt(timer, splits);\n\n timer.reset(true);\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 2, "score": 373954.3383437826 }, { "content": "/// Same as run_with_splits_opt, but progresses an already active attempt and\n\n/// doesn't reset it. Useful for checking intermediate states.\n\npub fn make_progress_run_with_splits_opt(timer: &mut Timer, splits: &[Option<f64>]) {\n\n for &split in splits {\n\n if let Some(split) = split {\n\n timer.set_game_time(TimeSpan::from_seconds(split));\n\n timer.split();\n\n } else {\n\n timer.skip_split();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 3, "score": 364115.6346116101 }, { "content": "fn fix_history_from_best_segment_times(segment: &mut Segment, method: TimingMethod) {\n\n if let Some(best_segment) = segment.best_segment_time()[method] {\n\n for (_, time) in segment.segment_history_mut().iter_mut() {\n\n // Make sure no times in the history are lower than the Best Segment\n\n if let Some(time) = &mut time[method] {\n\n if *time < best_segment {\n\n *time = best_segment;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Iterator that iterates over all the comparisons. This includes both the\n\n/// custom comparisons defined by the user and the Comparison Generators.\n\npub struct ComparisonsIter<'a> {\n\n custom: &'a [String],\n\n generators: &'a [Box<dyn ComparisonGenerator>],\n\n}\n\n\n", "file_path": "src/run/mod.rs", "rank": 4, "score": 347245.56230370776 }, { "content": "fn fix_history_from_none_best_segments(segment: &mut Segment, method: TimingMethod) {\n\n // Only do anything if the Best Segment Time is gone for the Segment in question\n\n if segment.best_segment_time()[method].is_none() {\n\n // Keep only the skipped segments\n\n segment\n\n .segment_history_mut()\n\n .retain(|&(_, time)| time[method].is_none());\n\n }\n\n}\n\n\n", "file_path": "src/run/mod.rs", "rank": 5, "score": 347245.56230370776 }, { "content": "/// Calculates whether or not the Split Times for the indicated split qualify as\n\n/// a Best Segment.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The split to check.\n\n/// - `method`: The timing method to use.\n\n///\n\n/// Returns whether or not the indicated split is a Best Segment.\n\npub fn check_best_segment(timer: &Timer, segment_index: usize, method: TimingMethod) -> bool {\n\n if timer.run().segment(segment_index).split_time()[method].is_none() {\n\n return false;\n\n }\n\n\n\n let delta = previous_segment_delta(timer, segment_index, best_segments::NAME, method);\n\n let current_segment = previous_segment_time(timer, segment_index, method);\n\n let best_segment = timer.run().segment(segment_index).best_segment_time()[method];\n\n best_segment.map_or(true, |b| {\n\n current_segment.map_or(false, |c| c < b) || delta.map_or(false, |d| d < TimeSpan::zero())\n\n })\n\n}\n", "file_path": "src/analysis/state_helper.rs", "rank": 6, "score": 327572.39997979224 }, { "content": "fn timer() -> Timer {\n\n Timer::new(run()).unwrap()\n\n}\n\n\n", "file_path": "src/component/splits/tests/column.rs", "rank": 7, "score": 324692.7641657198 }, { "content": "fn run() -> Run {\n\n let mut run = Run::new();\n\n\n\n run.push_segment(Segment::new(\"A\"));\n\n run.push_segment(Segment::new(\"B\"));\n\n run.push_segment(Segment::new(\"C\"));\n\n run.push_segment(Segment::new(\"D\"));\n\n run.push_segment(Segment::new(\"E\"));\n\n run.push_segment(Segment::new(\"F\"));\n\n\n\n run\n\n}\n\n\n", "file_path": "src/component/splits/tests/column.rs", "rank": 8, "score": 323566.47134881304 }, { "content": "fn run() -> Run {\n\n let mut run = Run::new();\n\n\n\n run.push_segment(Segment::new(\"A\"));\n\n run.push_segment(Segment::new(\"B\"));\n\n run.push_segment(Segment::new(\"C\"));\n\n\n\n run\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 9, "score": 323109.6270079617 }, { "content": "fn create_timer() -> Timer {\n\n let mut timer = tests_helper::create_timer(&[\"A\", \"B\"]);\n\n run_with_splits(&mut timer, &[12.3, 45.6]);\n\n timer\n\n}\n\n\n", "file_path": "src/component/segment_time/tests.rs", "rank": 10, "score": 309492.77069581294 }, { "content": "/// Calculates the PB chance for a run. No information about an active attempt\n\n/// is used. Instead the general chance to beat the Personal Best is calculated.\n\n/// The value is being reported as a floating point number in the range from 0\n\n/// (0%) to 1 (100%).\n\npub fn for_run(run: &Run, method: TimingMethod) -> f64 {\n\n calculate(run.segments(), method, TimeSpan::zero())\n\n}\n\n\n", "file_path": "src/analysis/pb_chance/mod.rs", "rank": 11, "score": 303710.6610308413 }, { "content": "pub fn update_state(\n\n state: &mut ColumnState,\n\n column: &ColumnSettings,\n\n timer: &Snapshot<'_>,\n\n layout_settings: &GeneralLayoutSettings,\n\n segment: &Segment,\n\n segment_index: usize,\n\n current_split: Option<usize>,\n\n method: TimingMethod,\n\n) {\n\n let method = column.timing_method.unwrap_or(method);\n\n let resolved_comparison = comparison::resolve(&column.comparison_override, timer);\n\n let comparison = comparison::or_current(resolved_comparison, timer);\n\n\n\n let update_value = column_update_value(\n\n column,\n\n timer,\n\n segment,\n\n segment_index,\n\n current_split,\n", "file_path": "src/component/splits/column.rs", "rank": 12, "score": 303204.2198483726 }, { "content": "fn make_uniform(state: &mut State) {\n\n for grid_line in &mut state.horizontal_grid_lines {\n\n *grid_line /= HEIGHT;\n\n }\n\n\n\n for grid_line in &mut state.vertical_grid_lines {\n\n *grid_line /= WIDTH;\n\n }\n\n\n\n state.middle /= HEIGHT;\n\n\n\n for point in &mut state.points {\n\n point.x /= WIDTH;\n\n point.y /= HEIGHT;\n\n }\n\n}\n\n\n", "file_path": "src/component/graph.rs", "rank": 13, "score": 302049.8181515887 }, { "content": "fn generate(segments: &mut [Segment], method: TimingMethod) {\n\n let mut attempt_id = None;\n\n for segment in segments.iter_mut().rev() {\n\n if let Some(max_index) = segment.segment_history().try_get_max_index() {\n\n attempt_id = Some(max_index);\n\n break;\n\n }\n\n }\n\n\n\n if let Some(attempt_id) = attempt_id {\n\n let mut remaining_segments = segments.iter_mut();\n\n\n\n let mut total_time = TimeSpan::zero();\n\n for segment in remaining_segments.by_ref() {\n\n let segment_time = segment.segment_history().get(attempt_id).map(|t| t[method]);\n\n\n\n let split_time = match segment_time {\n\n Some(Some(segment_time)) => {\n\n total_time += segment_time;\n\n Some(total_time)\n", "file_path": "src/comparison/latest_run.rs", "rank": 14, "score": 300947.00678857154 }, { "content": "fn prepare() -> (Timer, Component, GeneralLayoutSettings) {\n\n let mut run = Run::new();\n\n let mut segment = Segment::new(\"foo\");\n\n segment.set_icon(&[0x00, 0x12, 0x34]);\n\n run.push_segment(segment);\n\n let timer = Timer::new(run).unwrap();\n\n\n\n let component = Component::with_settings(Settings {\n\n display_icon: true,\n\n show_segment_name: true,\n\n ..Default::default()\n\n });\n\n\n\n (timer, component, GeneralLayoutSettings::default())\n\n}\n\n\n", "file_path": "src/component/detailed_timer/tests.rs", "rank": 15, "score": 298878.0264780213 }, { "content": "pub fn settings<R>(reader: &mut Reader<R>, buf: &mut Vec<u8>, _: &mut Component) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n parse_children(reader, buf, |reader, tag| -> Result<()> {\n\n // Unused:\n\n // AttemptCount\n\n // UsePercentOfAttempts\n\n // UseFixedAttempts\n\n // IgnoreRunCount\n\n // FIXME:\n\n // DisplayOdds\n\n end_tag(reader, tag.into_buf())\n\n })?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/layout/parser/pb_chance.rs", "rank": 16, "score": 297506.20698330866 }, { "content": "/// Calculates the PB chance for a timer. The chance is calculated in terms of\n\n/// the current attempt. If there is no attempt in progress it yields the same\n\n/// result as the PB chance for the run. The value is being reported as a\n\n/// floating point number in the range from 0 (0%) to 1 (100%). Additionally a\n\n/// boolean is returned that indicates if the value is currently actively\n\n/// changing as time is being lost.\n\npub fn for_timer(timer: &Snapshot<'_>) -> (f64, bool) {\n\n let method = timer.current_timing_method();\n\n let all_segments = timer.run().segments();\n\n\n\n let is_live =\n\n super::check_live_delta(timer, false, comparison::personal_best::NAME, method).is_some();\n\n\n\n let (segments, current_time) = if is_live {\n\n // If there is a live delta, act as if we did just split.\n\n (\n\n &all_segments[timer.current_split_index().unwrap() + 1..],\n\n timer.current_time()[method].unwrap_or_default(),\n\n )\n\n } else if let Some((index, time)) = all_segments\n\n .iter()\n\n .enumerate()\n\n .rev()\n\n .find_map(|(i, s)| Some((i, s.split_time()[method]?)))\n\n {\n\n // Otherwise fall back to the the last split that we did split.\n", "file_path": "src/analysis/pb_chance/mod.rs", "rank": 17, "score": 294441.0361879027 }, { "content": "pub fn width(component: &ComponentState) -> f32 {\n\n match component {\n\n ComponentState::BlankSpace(state) => state.size as f32 * PSEUDO_PIXELS,\n\n ComponentState::DetailedTimer(_) => 7.0,\n\n ComponentState::Graph(_) => 7.0,\n\n ComponentState::KeyValue(_) => 6.0,\n\n ComponentState::Separator(_) => SEPARATOR_THICKNESS,\n\n ComponentState::Splits(state) => {\n\n let column_count = 2.0; // FIXME: Not always 2.\n\n let split_width = 2.0 + column_count * splits::COLUMN_WIDTH;\n\n state.splits.len() as f32 * split_width\n\n }\n\n ComponentState::Text(_) => 6.0,\n\n ComponentState::Timer(_) => 8.25,\n\n ComponentState::Title(_) => 8.0,\n\n }\n\n}\n\n\n", "file_path": "src/rendering/component/mod.rs", "rank": 18, "score": 292359.9025193773 }, { "content": "pub fn height(component: &ComponentState) -> f32 {\n\n match component {\n\n ComponentState::BlankSpace(state) => state.size as f32 * PSEUDO_PIXELS,\n\n ComponentState::DetailedTimer(_) => 2.5,\n\n ComponentState::Graph(state) => state.height as f32 * PSEUDO_PIXELS,\n\n ComponentState::KeyValue(state) => {\n\n if state.display_two_rows {\n\n TWO_ROW_HEIGHT\n\n } else {\n\n DEFAULT_COMPONENT_HEIGHT\n\n }\n\n }\n\n ComponentState::Separator(_) => SEPARATOR_THICKNESS,\n\n ComponentState::Splits(state) => {\n\n state.splits.len() as f32\n\n * if state.display_two_rows {\n\n TWO_ROW_HEIGHT\n\n } else {\n\n DEFAULT_COMPONENT_HEIGHT\n\n }\n", "file_path": "src/rendering/component/mod.rs", "rank": 19, "score": 292359.9025193773 }, { "content": "fn check_column_state(state: &State, state_index: usize, expected_values: Values) {\n\n let actual_values = state\n\n .splits\n\n .iter()\n\n .map(|split| split.columns[0].value.as_str())\n\n .collect::<Vec<_>>();\n\n let actual_colors = state\n\n .splits\n\n .iter()\n\n .map(|split| split.columns[0].semantic_color)\n\n .collect::<Vec<_>>();\n\n let actual_state = (actual_values, actual_colors);\n\n let (expected_values, expected_colors) = &expected_values[state_index];\n\n let expected_state = (expected_values.to_vec(), expected_colors.to_vec());\n\n assert_eq!(actual_state, expected_state, \"State index: {}\", state_index);\n\n}\n\n\n", "file_path": "src/component/splits/tests/column.rs", "rank": 20, "score": 291908.29996679694 }, { "content": "fn run_with_splits(timer: &mut Timer, splits: &[f64]) {\n\n timer.start();\n\n timer.initialize_game_time();\n\n timer.pause_game_time();\n\n\n\n for &split in splits {\n\n timer.set_game_time(TimeSpan::from_seconds(split));\n\n timer.split();\n\n }\n\n\n\n timer.reset(true);\n\n}\n\n\n", "file_path": "benches/balanced_pb.rs", "rank": 21, "score": 283514.1214553248 }, { "content": "fn timer() -> Timer {\n\n Timer::new(run()).unwrap()\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 22, "score": 282875.10886884807 }, { "content": "pub fn create_timer(names: &[&str]) -> Timer {\n\n Timer::new(create_run(names)).unwrap()\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 23, "score": 281432.0123330222 }, { "content": "pub fn create_run(names: &[&str]) -> Run {\n\n let mut run = Run::new();\n\n for &name in names {\n\n run.push_segment(Segment::new(name));\n\n }\n\n run\n\n}\n\n\n", "file_path": "src/tests_helper.rs", "rank": 24, "score": 280333.08378060994 }, { "content": "/// Populates the segments with a goal comparison. The segment history is used\n\n/// to generate comparison times such that they end up with the goal time\n\n/// specified. The values are stored in the comparison with the name provided.\n\n/// Only the range between the sum of the best segments and the sum of the worst\n\n/// segments is supported. Every other goal time is capped within that range.\n\npub fn generate(segments: &mut [Segment], goal_time: Time, comparison: &str) {\n\n let mut skill_curve = SkillCurve::new();\n\n\n\n if let Some(real_time) = goal_time.real_time {\n\n generate_for_timing_method_with_buf(\n\n segments,\n\n TimingMethod::RealTime,\n\n Some(real_time),\n\n comparison,\n\n &mut skill_curve,\n\n );\n\n } else {\n\n for segment in &mut *segments {\n\n segment.comparison_mut(comparison).real_time = None;\n\n }\n\n }\n\n\n\n if let Some(game_time) = goal_time.game_time {\n\n generate_for_timing_method_with_buf(\n\n segments,\n", "file_path": "src/comparison/goal.rs", "rank": 25, "score": 277917.5692809775 }, { "content": "fn check_column_color(state: &State, split_index: usize, expected_color: SemanticColor) {\n\n assert_eq!(\n\n state.splits[split_index].columns[0].semantic_color,\n\n expected_color\n\n );\n\n}\n", "file_path": "src/component/splits/tests/column.rs", "rank": 26, "score": 277538.3773177764 }, { "content": "/// Calculates how much time could be saved on the remainder of the run with the\n\n/// given comparison. This information is based on the best segments.\n\n/// Considering the best segments don't represent theoretically perfect segment\n\n/// times, this information is only an approximation of how much time can\n\n/// actually be saved. This information is always live, so the total possible\n\n/// time save will shrink towards zero throughout the run and when time is lost\n\n/// on a segment. The time returned by this function can never be below zero.\n\npub fn calculate_total(timer: &Snapshot<'_>, segment_index: usize, comparison: &str) -> TimeSpan {\n\n let mut total = TimeSpan::zero();\n\n\n\n for index in segment_index..timer.run().len() {\n\n if let Some(time_save) = calculate(timer, index, comparison, true) {\n\n total += time_save;\n\n }\n\n }\n\n\n\n total\n\n}\n", "file_path": "src/analysis/possible_time_save.rs", "rank": 27, "score": 274038.57660093217 }, { "content": "#[test]\n\nfn column_segment_delta_update_on_ending_segment() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::SegmentDelta,\n\n ColumnUpdateTrigger::OnEndingSegment,\n\n &[\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 28, "score": 269793.9260557972 }, { "content": "#[test]\n\nfn column_comparison_segment_time_segment_delta() {\n\n check_columns(\n\n ColumnStartWith::ComparisonSegmentTime,\n\n ColumnUpdateWith::SegmentDelta,\n\n &[\n\n (\n\n [\"0:05\", \"—\", \"—\", \"0:10\", \"0:05\", \"1:05\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+3.5\", \"—\", \"—\", \"0:10\", \"0:05\", \"1:05\"],\n\n [BehindLosing, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+3.5\", \"—\", \"—\", \"0:10\", \"0:05\", \"1:05\"],\n\n [BehindLosing, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n // In the original LiveSplit, we showed the segment time for\n\n // this column type if the comparison segment time was missing.\n", "file_path": "src/component/splits/tests/column.rs", "rank": 29, "score": 269793.9260557972 }, { "content": "#[test]\n\nfn column_comparison_segment_time_segment_time() {\n\n check_columns(\n\n ColumnStartWith::ComparisonSegmentTime,\n\n ColumnUpdateWith::SegmentTime,\n\n &[\n\n (\n\n [\"0:05\", \"—\", \"—\", \"0:10\", \"0:05\", \"1:05\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:08\", \"—\", \"—\", \"0:10\", \"0:05\", \"1:05\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:08\", \"—\", \"—\", \"0:10\", \"0:05\", \"1:05\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:08\", \"—\", \"0:01\", \"0:10\", \"0:05\", \"1:05\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 30, "score": 269793.9260557972 }, { "content": "#[test]\n\nfn column_segment_delta_update_on_starting_segment() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::SegmentDelta,\n\n ColumnUpdateTrigger::OnStartingSegment,\n\n &[\n\n (\n\n [\"−5.0\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"−3.0\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"−1.5\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+0.5\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 31, "score": 269793.9260557972 }, { "content": "#[test]\n\nfn column_segment_time_update_on_ending_segment() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::SegmentTime,\n\n ColumnUpdateTrigger::OnEndingSegment,\n\n &[\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 32, "score": 269793.9260557972 }, { "content": "#[test]\n\nfn column_segment_time_update_on_starting_segment() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::SegmentTime,\n\n ColumnUpdateTrigger::OnStartingSegment,\n\n &[\n\n (\n\n [\"0:00\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:02\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:03\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:05\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 33, "score": 269793.9260557972 }, { "content": "pub fn layout_height(layout: &LayoutState) -> f32 {\n\n layout.components.iter().map(height).sum()\n\n}\n\n\n", "file_path": "src/rendering/component/mod.rs", "rank": 34, "score": 269465.15823053825 }, { "content": "pub fn layout_width(layout: &LayoutState) -> f32 {\n\n layout.components.iter().map(width).sum()\n\n}\n\n\n", "file_path": "src/rendering/component/mod.rs", "rank": 35, "score": 269465.15823053825 }, { "content": "/// Attempts to parse a Llanfair splits file.\n\npub fn parse<R: Read + Seek>(mut source: R) -> Result<Run> {\n\n let mut buf = Vec::new();\n\n let mut buf2 = Vec::new();\n\n\n\n // The protocol is documented here:\n\n // https://docs.oracle.com/javase/7/docs/platform/serialization/spec/protocol.html\n\n\n\n const HEADER: [u8; 30] = [\n\n 0xAC, 0xED, // Magic\n\n 0x00, 0x05, // Version\n\n 0x73, // New Object\n\n 0x72, // New Class Declaration\n\n 0x00, 0x16, // Length of Class Name\n\n // org.fenix.llanfair.Run\n\n 0x6F, 0x72, 0x67, 0x2E, 0x66, 0x65, 0x6E, 0x69, 0x78, 0x2E, 0x6C, 0x6C, 0x61, 0x6E, 0x66,\n\n 0x61, 0x69, 0x72, 0x2E, 0x52, 0x75, 0x6E,\n\n ];\n\n let mut header_buf = [0; 30];\n\n source.read_exact(&mut header_buf).context(ReadHeader)?;\n\n if HEADER != header_buf {\n", "file_path": "src/run/parser/llanfair.rs", "rank": 36, "score": 268856.79292901495 }, { "content": "#[test]\n\nfn column_empty_segment_time() {\n\n check_columns(\n\n ColumnStartWith::Empty,\n\n ColumnUpdateWith::SegmentTime,\n\n &[\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:08\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:08\", \"—\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:08\", \"—\", \"0:01\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 37, "score": 266826.68020475423 }, { "content": "#[test]\n\nfn column_empty_segment_delta() {\n\n check_columns(\n\n ColumnStartWith::Empty,\n\n ColumnUpdateWith::SegmentDelta,\n\n &[\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+3.5\", \"\", \"\", \"\", \"\", \"\"],\n\n [BehindLosing, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+3.5\", \"—\", \"\", \"\", \"\", \"\"],\n\n [BehindLosing, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+3.5\", \"—\", \"—\", \"\", \"\", \"\"],\n\n [BehindLosing, Text, Best, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 38, "score": 266826.68020475423 }, { "content": "fn assert(timer: &Timer, got: [Option<Prediction>; 4], [a, b, c]: [(f64, usize, bool); 3]) {\n\n assert_eq!(\n\n got,\n\n [\n\n Prediction {\n\n time: span(0.0),\n\n predecessor: 0,\n\n }\n\n .into(),\n\n Prediction {\n\n time: span(a.0),\n\n predecessor: a.1,\n\n }\n\n .into(),\n\n Prediction {\n\n time: span(b.0),\n\n predecessor: b.1,\n\n }\n\n .into(),\n\n Prediction {\n", "file_path": "src/analysis/sum_of_segments/tests.rs", "rank": 39, "score": 266793.0255479873 }, { "content": "fn generate(segments: &mut [Segment], method: TimingMethod) {\n\n let mut accumulated = Some(TimeSpan::zero());\n\n\n\n let mut previous_segment: Option<&Segment> = None;\n\n for segment in segments {\n\n if let Some(accumulated_val) = &mut accumulated {\n\n let (mut total_weights, mut total_time) = (0.0, 0.0);\n\n let mut current_weight = 1.0;\n\n\n\n for &(id, time) in segment.segment_history().iter_actual_runs().rev() {\n\n if let Some(time) = time[method] {\n\n // Skip all the combined segments\n\n let skip = catch! {\n\n previous_segment?.segment_history().get(id)?[method].is_none()\n\n }\n\n .unwrap_or(false);\n\n\n\n if !skip {\n\n total_weights += current_weight;\n\n total_time += current_weight * time.total_seconds();\n", "file_path": "src/comparison/average_segments.rs", "rank": 40, "score": 265319.15092991124 }, { "content": "pub fn from_reader<T, B>(reader: B) -> Result<T>\n\nwhere\n\n T: DeserializeOwned,\n\n B: BufRead,\n\n{\n\n let mut deserializer = Deserializer::from_reader(reader);\n\n let t = T::deserialize(&mut deserializer)?;\n\n deserializer.skip_whitespace()?;\n\n if deserializer.index == deserializer.buf.len() {\n\n Ok(t)\n\n } else {\n\n Err(Error::TrailingCharacters)\n\n }\n\n}\n\n\n\nimpl<'de, B: BufRead> de::Deserializer<'de> for &mut Deserializer<B> {\n\n type Error = Error;\n\n\n\n fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value>\n\n where\n", "file_path": "src/run/parser/flitter/s_expressions.rs", "rank": 41, "score": 265070.38193028735 }, { "content": "/// Saves the Run in use by the Timer provided as a LiveSplit splits file\n\n/// (*.lss).\n\npub fn save_timer<W: Write>(timer: &Timer, writer: W) -> Result<()> {\n\n let run;\n\n let run = if timer.current_phase() == TimerPhase::NotRunning {\n\n timer.run()\n\n } else {\n\n run = timer.clone().into_run(true);\n\n &run\n\n };\n\n save_run(run, writer)\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 42, "score": 263854.2352192335 }, { "content": "#[test]\n\nfn column_delta_best_segment_colors() {\n\n let mut timer = timer();\n\n\n\n // Set best segment times, but no PB time\n\n run_with_splits_opt(\n\n &mut timer,\n\n &[Some(5.0), Some(8.0), Some(12.0), None, Some(20.0)],\n\n );\n\n\n\n start_run(&mut timer);\n\n\n\n let layout_settings = Default::default();\n\n let mut component = Component::with_settings(Settings {\n\n columns: vec![ColumnSettings {\n\n start_with: ColumnStartWith::Empty,\n\n update_with: ColumnUpdateWith::Delta,\n\n ..Default::default()\n\n }],\n\n fill_with_blank_space: false,\n\n ..Default::default()\n", "file_path": "src/component/splits/tests/column.rs", "rank": 43, "score": 261899.0191828413 }, { "content": "#[test]\n\nfn column_segment_time_update_contextual() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::SegmentTime,\n\n ColumnUpdateTrigger::Contextual,\n\n &[\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:03\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:05\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 44, "score": 261899.0191828413 }, { "content": "#[test]\n\nfn column_segment_delta_update_contextual() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::SegmentDelta,\n\n ColumnUpdateTrigger::Contextual,\n\n &[\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"−1.5\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+0.5\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 45, "score": 261899.0191828413 }, { "content": "#[test]\n\nfn column_delta_update_on_starting_segment() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::Delta,\n\n ColumnUpdateTrigger::OnStartingSegment,\n\n &[\n\n (\n\n [\"−5.0\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"−3.0\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"−1.5\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+0.5\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 46, "score": 261899.0191828413 }, { "content": "#[test]\n\nfn column_delta_update_on_ending_segment() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::Delta,\n\n ColumnUpdateTrigger::OnEndingSegment,\n\n &[\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 47, "score": 261899.0191828413 }, { "content": "fn calculate_deltas(timer: &Timer, comparison: &str, draw_info: &mut DrawInfo) {\n\n let timing_method = timer.current_timing_method();\n\n for segment in timer.run().segments() {\n\n let time = catch! {\n\n let time = segment.split_time()[timing_method]?\n\n - segment.comparison(comparison)[timing_method]?;\n\n\n\n if time > draw_info.max_delta {\n\n draw_info.max_delta = time;\n\n } else if time < draw_info.min_delta {\n\n draw_info.min_delta = time;\n\n }\n\n\n\n time\n\n };\n\n draw_info.deltas.push(time);\n\n }\n\n}\n\n\n", "file_path": "src/component/graph.rs", "rank": 48, "score": 261539.42814174833 }, { "content": "#[test]\n\nfn not_when_setting_run() {\n\n let mut timer = timer();\n\n let mut run = Run::new();\n\n run.push_segment(Segment::new(\"\"));\n\n timer.set_run(run).unwrap();\n\n assert!(!timer.run().has_been_modified());\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mark_as_modified.rs", "rank": 49, "score": 259359.93399178257 }, { "content": "/// Helper function for accessing either the given comparison or a Timer's\n\n/// current comparison if the given comparison is `None`.\n\npub fn or_current<'a>(comparison: Option<&'a str>, timer: &'a Timer) -> &'a str {\n\n comparison.unwrap_or_else(|| timer.current_comparison())\n\n}\n\n\n", "file_path": "src/comparison/mod.rs", "rank": 50, "score": 258811.5473328951 }, { "content": "#[test]\n\nfn new_best_segment() {\n\n let mut run = Run::new();\n\n run.push_segment(Segment::new(\"\"));\n\n run.push_segment(Segment::new(\"\"));\n\n\n\n let mut editor = Editor::new(run).unwrap();\n\n\n\n editor\n\n .active_segment()\n\n .parse_and_set_split_time(\"1:00\")\n\n .unwrap();\n\n\n\n editor.select_only(1);\n\n\n\n editor\n\n .active_segment()\n\n .parse_and_set_split_time(\"3:00\")\n\n .unwrap();\n\n\n\n editor.insert_segment_above();\n", "file_path": "src/run/editor/tests/mod.rs", "rank": 52, "score": 258520.16429874132 }, { "content": "#[test]\n\nfn column_split_time_update_on_starting_segment() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::SplitTime,\n\n ColumnUpdateTrigger::OnStartingSegment,\n\n &[\n\n (\n\n [\"0:00\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:02\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:03\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"0:05\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 53, "score": 257180.3667622559 }, { "content": "#[test]\n\nfn column_comparison_segment_time_dont_update() {\n\n check_columns(\n\n ColumnStartWith::ComparisonSegmentTime,\n\n ColumnUpdateWith::DontUpdate,\n\n &[(\n\n [\"0:05\", \"—\", \"—\", \"0:10\", \"0:05\", \"1:05\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ); 7],\n\n )\n\n}\n\n\n", "file_path": "src/component/splits/tests/column.rs", "rank": 54, "score": 257180.3667622559 }, { "content": "#[test]\n\nfn column_split_time_update_on_ending_segment() {\n\n check_columns_update_trigger(\n\n ColumnUpdateWith::SplitTime,\n\n ColumnUpdateTrigger::OnEndingSegment,\n\n &[\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"\", \"\", \"\", \"\", \"\", \"\"],\n\n [Text, Text, Text, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 55, "score": 257180.3667622559 }, { "content": "#[test]\n\nfn column_possible_time_save_segment_delta() {\n\n check_columns(\n\n ColumnStartWith::PossibleTimeSave,\n\n ColumnUpdateWith::SegmentDelta,\n\n &[\n\n (\n\n [\"0.00\", \"—\", \"—\", \"0.00\", \"4.00\", \"1:04.00\"],\n\n [Text, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+3.5\", \"—\", \"—\", \"0.00\", \"4.00\", \"1:04.00\"],\n\n [BehindLosing, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+3.5\", \"—\", \"—\", \"0.00\", \"4.00\", \"1:04.00\"],\n\n [BehindLosing, Text, Text, Text, Text, Text],\n\n ),\n\n (\n\n [\"+3.5\", \"—\", \"—\", \"0.00\", \"4.00\", \"1:04.00\"],\n\n [BehindLosing, Text, Best, Text, Text, Text],\n", "file_path": "src/component/splits/tests/column.rs", "rank": 56, "score": 257180.3667622559 }, { "content": "/// Tries to resolve the given comparison based on a Timer object. If either\n\n/// `None` is given or the comparison doesn't exist, `None` is returned.\n\n/// Otherwise the comparison name stored in the Timer is returned by reference.\n\npub fn resolve<'a>(comparison: &Option<String>, timer: &'a Timer) -> Option<&'a str> {\n\n let comparison = comparison.as_ref()?;\n\n timer.run().comparisons().find(|&rc| comparison == rc)\n\n}\n", "file_path": "src/comparison/mod.rs", "rank": 58, "score": 255226.65425912308 }, { "content": "#[test]\n\nfn reset_and_set_attempt_as_pb() {\n\n let mut timer = timer();\n\n\n\n // Call it for the phase NotRunning\n\n assert_eq!(timer.current_phase(), TimerPhase::NotRunning);\n\n timer.reset_and_set_attempt_as_pb();\n\n for segment in timer.run().segments() {\n\n assert_eq!(segment.personal_best_split_time().game_time, None);\n\n }\n\n\n\n // Call it for the phase Running, but don't do any splits yet\n\n start_run(&mut timer);\n\n assert_eq!(timer.current_phase(), TimerPhase::Running);\n\n timer.reset_and_set_attempt_as_pb();\n\n assert_eq!(timer.current_phase(), TimerPhase::NotRunning);\n\n for segment in timer.run().segments() {\n\n assert_eq!(segment.personal_best_split_time().game_time, None);\n\n }\n\n\n\n // Call it for the phase Paused, but don't do any splits yet\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 59, "score": 254181.71865518036 }, { "content": "#[test]\n\nfn shows_segment_name_during_attempt() {\n\n let (mut timer, mut component, layout_settings) = prepare();\n\n\n\n timer.start();\n\n\n\n assert_eq!(\n\n component\n\n .state(&timer.snapshot(), &layout_settings)\n\n .segment_name\n\n .unwrap(),\n\n \"foo\",\n\n );\n\n}\n\n\n", "file_path": "src/component/detailed_timer/tests.rs", "rank": 60, "score": 254114.16357685364 }, { "content": "#[test]\n\nfn import_pb_into_segment_history() {\n\n let mut editor = Editor::new(run()).unwrap();\n\n editor.select_timing_method(TimingMethod::GameTime);\n\n\n\n editor.select_only(0);\n\n let fake_first = Some(TimeSpan::from_seconds(5.0));\n\n editor.active_segment().set_split_time(fake_first);\n\n\n\n editor.select_only(1);\n\n let fake_second = Some(TimeSpan::from_seconds(10.0));\n\n editor.active_segment().set_split_time(fake_second);\n\n\n\n editor.select_only(2);\n\n let fake_third = Some(TimeSpan::from_seconds(15.0));\n\n editor.active_segment().set_split_time(fake_third);\n\n\n\n let run = editor.close();\n\n let mut timer = Timer::new(run).unwrap();\n\n\n\n let (real_first, real_second, real_third) = (\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 61, "score": 254062.772827653 }, { "content": "#[test]\n\nfn clears_run_id_when_pbing() {\n\n let mut timer = timer();\n\n\n\n // Get a PB\n\n let (first, second, third) = (\n\n TimeSpan::from_seconds(5.0),\n\n TimeSpan::from_seconds(10.0),\n\n TimeSpan::from_seconds(15.0),\n\n );\n\n run_with_splits(\n\n &mut timer,\n\n &[\n\n first.total_seconds(),\n\n second.total_seconds(),\n\n third.total_seconds(),\n\n ],\n\n );\n\n\n\n let mut run = timer.into_run(true);\n\n\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 62, "score": 253046.86156901674 }, { "content": "fn generate(segments: &mut [Segment], medians: &mut Vec<(f64, f64)>, method: TimingMethod) {\n\n let mut accumulated = Some(TimeSpan::zero());\n\n\n\n let mut previous_segment: Option<&Segment> = None;\n\n for segment in segments {\n\n if let Some(accumulated_val) = &mut accumulated {\n\n medians.clear();\n\n\n\n let mut current_weight = 1.0;\n\n\n\n for &(id, time) in segment.segment_history().iter_actual_runs().rev() {\n\n if let Some(time) = time[method] {\n\n // Skip all the combined segments\n\n let skip = catch! {\n\n previous_segment?.segment_history().get(id)?[method].is_none()\n\n }\n\n .unwrap_or(false);\n\n\n\n if !skip {\n\n medians.push((current_weight, time.total_seconds()));\n", "file_path": "src/comparison/median_segments.rs", "rank": 63, "score": 252980.60736586788 }, { "content": "/// Gets the length of the last segment that leads up to a certain split, using\n\n/// the live segment time if the split is not completed yet.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The index of the split that represents the end of the\n\n/// segment.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the length of the segment leading up to `segment_index`, returning\n\n/// the live segment time if the split is not completed yet.\n\npub fn live_segment_time(\n\n timer: &Snapshot<'_>,\n\n segment_index: usize,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_time(\n\n timer.run(),\n\n segment_index,\n\n timer.current_time()[method]?,\n\n method,\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 64, "score": 252395.85206338277 }, { "content": "/// Gets the amount of time lost or gained on a certain split, using the live\n\n/// segment delta if the split is not completed yet.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The index of the split for which the delta is calculated.\n\n/// - `comparison`: The comparison that you are comparing with.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the segment delta for a certain split, returning the live segment\n\n/// delta if the split is not completed yet.\n\npub fn live_segment_delta(\n\n timer: &Snapshot<'_>,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_delta(\n\n timer.run(),\n\n segment_index,\n\n timer.current_time()[method]?,\n\n comparison,\n\n method,\n\n )\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 65, "score": 252395.2499011837 }, { "content": "/// Gets the length of the last segment that leads up to a certain split.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The index of the split that represents the end of the\n\n/// segment.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the length of the segment leading up to `segment_index`, returning\n\n/// None if the split is not completed yet.\n\npub fn previous_segment_time(\n\n timer: &Timer,\n\n segment_index: usize,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_time(\n\n timer.run(),\n\n segment_index,\n\n timer.run().segment(segment_index).split_time()[method]?,\n\n method,\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 66, "score": 252395.02261457607 }, { "content": "/// Gets the amount of time lost or gained on a certain split.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `segment_index`: The index of the split for which the delta is calculated.\n\n/// - `comparison`: The comparison that you are comparing with.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the segment delta for a certain split, returning None if the split\n\n/// is not completed yet.\n\npub fn previous_segment_delta(\n\n timer: &Timer,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_delta(\n\n timer.run(),\n\n segment_index,\n\n timer.run().segment(segment_index).split_time()[method]?,\n\n comparison,\n\n method,\n\n )\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 67, "score": 252393.83417097444 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let mut override_color = false;\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TimerHeight\" {\n\n text_parsed(reader, tag.into_buf(), |v| {\n\n settings.height = translate_size(v)\n\n })\n\n } else if tag.name() == b\"TimerFormat\" {\n\n // Version >= 1.5\n", "file_path": "src/layout/parser/timer.rs", "rank": 68, "score": 252339.92911254335 }, { "content": "#[test]\n\nfn deleting_best_segment_time_clears_segment_history() {\n\n let mut timer = timer();\n\n\n\n let (first, second, third) = (\n\n TimeSpan::from_seconds(5.0),\n\n TimeSpan::from_seconds(10.0),\n\n TimeSpan::from_seconds(15.0),\n\n );\n\n run_with_splits(\n\n &mut timer,\n\n &[\n\n first.total_seconds(),\n\n second.total_seconds(),\n\n third.total_seconds(),\n\n ],\n\n );\n\n\n\n let run = timer.into_run(true);\n\n let run2 = run.clone();\n\n\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 69, "score": 252181.94347340273 }, { "content": "#[test]\n\nfn modifying_best_segment_time_fixes_segment_history() {\n\n let mut timer = timer();\n\n\n\n let (first, second, third) = (\n\n TimeSpan::from_seconds(5.0),\n\n TimeSpan::from_seconds(10.0),\n\n TimeSpan::from_seconds(15.0),\n\n );\n\n run_with_splits(\n\n &mut timer,\n\n &[\n\n first.total_seconds(),\n\n second.total_seconds(),\n\n third.total_seconds(),\n\n ],\n\n );\n\n\n\n let run = timer.into_run(true);\n\n let run2 = run.clone();\n\n\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 70, "score": 252181.94347340273 }, { "content": "#[test]\n\npub fn sum_of_best() {\n\n let mut timer = create_timer(&[\"A\", \"B\", \"C\"]);\n\n\n\n run_with_splits_opt(&mut timer, &[Some(5.0), Some(20.0), Some(60.0)]);\n\n let mut predictions = [None; 4];\n\n best::calculate(\n\n timer.run().segments(),\n\n &mut predictions,\n\n false,\n\n false,\n\n TimingMethod::GameTime,\n\n );\n\n assert(\n\n &timer,\n\n predictions,\n\n [(5.0, 0, true), (20.0, 1, true), (60.0, 2, true)],\n\n );\n\n\n\n run_with_splits_opt(&mut timer, &[None, Some(10.0), None]);\n\n predictions = [None; 4];\n", "file_path": "src/analysis/sum_of_segments/tests.rs", "rank": 71, "score": 251768.99831927998 }, { "content": "/// Calculates the Sum of Worst Segments for the timing method provided. This is\n\n/// the slowest time possible to complete a run of a category, based on\n\n/// information collected from all the previous attempts. This obviously isn't\n\n/// really the worst possible time, but may be useful information regardless.\n\n/// If there's an active attempt, you can choose to take it into account as\n\n/// well.\n\npub fn calculate_worst(\n\n segments: &[Segment],\n\n use_current_run: bool,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n let mut predictions = vec![None; segments.len() + 1];\n\n worst::calculate(segments, &mut predictions, use_current_run, method)\n\n}\n\n\n", "file_path": "src/analysis/sum_of_segments/mod.rs", "rank": 72, "score": 251354.8897807818 }, { "content": "/// Calculates the Sum of Best Segments for the timing method provided. This is\n\n/// the fastest time possible to complete a run of a category, based on\n\n/// information collected from all the previous attempts. This often matches up\n\n/// with the sum of the best segment times of all the segments, but that may not\n\n/// always be the case, as skipped segments may introduce combined segments that\n\n/// may be faster than the actual sum of their best segment times. The name is\n\n/// therefore a bit misleading, but sticks around for historical reasons. You\n\n/// can choose to do a simple calculation instead, which excludes the Segment\n\n/// History from the calculation process. If there's an active attempt, you can\n\n/// choose to take it into account as well.\n\npub fn calculate_best(\n\n segments: &[Segment],\n\n simple_calculation: bool,\n\n use_current_run: bool,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n let mut predictions = vec![None; segments.len() + 1];\n\n best::calculate(\n\n segments,\n\n &mut predictions,\n\n simple_calculation,\n\n use_current_run,\n\n method,\n\n )\n\n}\n\n\n", "file_path": "src/analysis/sum_of_segments/mod.rs", "rank": 73, "score": 251352.15289053397 }, { "content": "/// Follows a path starting from a certain segment in a certain attempt to the\n\n/// next split that didn't get skipped. Returns the index of the segment after\n\n/// the segment that has the next split time and a sum of the combined segment\n\n/// times and the current time provided. If the tracked attempt ends before a\n\n/// split time is found, the index returned is 0.\n\npub fn track_branch(\n\n segments: &[Segment],\n\n current_time: Option<TimeSpan>,\n\n segment_index: usize,\n\n run_index: i32,\n\n method: TimingMethod,\n\n) -> (usize, Time) {\n\n for (segment_index, segment) in segments.iter().enumerate().skip(segment_index) {\n\n if let Some(cur_time) = segment.segment_history().get(run_index) {\n\n if let Some(cur_time) = cur_time[method] {\n\n return (\n\n segment_index + 1,\n\n Time::new().with_timing_method(method, current_time.map(|t| cur_time + t)),\n\n );\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n (0, Time::default())\n\n}\n", "file_path": "src/analysis/sum_of_segments/mod.rs", "rank": 74, "score": 251348.76871407626 }, { "content": "/// Attempts to parse a SourceLiveTimer splits file.\n\npub fn parse<R: Read>(source: R) -> Result<Run> {\n\n let splits: Splits = from_reader(source).context(Json)?;\n\n\n\n let mut run = Run::new();\n\n\n\n if splits.Category.starts_with(\"Portal 2\") {\n\n run.set_game_name(\"Portal 2\");\n\n } else if splits.Category.starts_with(\"Portal\") {\n\n run.set_game_name(\"Portal\");\n\n } else if splits.Category.starts_with(\"Half Life 2\") {\n\n run.set_game_name(\"Half Life 2\");\n\n }\n\n\n\n if let Some(run_name) = splits.RunName {\n\n if run_name != splits.Category {\n\n run.set_category_name(run_name);\n\n } else {\n\n run.set_category_name(splits.Category);\n\n }\n\n } else {\n", "file_path": "src/run/parser/source_live_timer.rs", "rank": 75, "score": 249553.933777364 }, { "content": "#[test]\n\nfn stops_showing_segment_name_when_resetting() {\n\n let (mut timer, mut component, layout_settings) = prepare();\n\n\n\n timer.start();\n\n timer.split();\n\n timer.reset(true);\n\n\n\n assert_eq!(\n\n component\n\n .state(&timer.snapshot(), &layout_settings)\n\n .segment_name,\n\n None\n\n );\n\n}\n\n\n", "file_path": "src/component/detailed_timer/tests.rs", "rank": 76, "score": 249007.77789031892 }, { "content": "#[test]\n\nfn shows_segment_name_at_the_end_of_an_attempt() {\n\n let (mut timer, mut component, layout_settings) = prepare();\n\n\n\n timer.start();\n\n timer.split();\n\n\n\n assert_eq!(\n\n component\n\n .state(&timer.snapshot(), &layout_settings)\n\n .segment_name\n\n .unwrap(),\n\n \"foo\",\n\n );\n\n}\n\n\n", "file_path": "src/component/detailed_timer/tests.rs", "rank": 77, "score": 249007.77789031892 }, { "content": "/// Attempts to parse a Flitter splits file.\n\npub fn parse<R: BufRead>(source: R) -> Result<Run> {\n\n let splits: Splits = self::s_expressions::from_reader(source)?;\n\n\n\n let mut run = Run::new();\n\n\n\n run.set_game_name(splits.title);\n\n run.set_category_name(splits.category);\n\n run.set_attempt_count(splits.attempts);\n\n\n\n if splits.world_record.is_some() {\n\n run.add_custom_comparison(world_record::NAME).unwrap();\n\n }\n\n\n\n let segments = run.segments_mut();\n\n\n\n segments.extend(splits.split_names.into_iter().map(Segment::new));\n\n\n\n if let Some(pb) = splits.personal_best {\n\n for (segment, pb) in segments.iter_mut().zip(pb.splits) {\n\n segment.set_personal_best_split_time(Time::new().with_real_time(pb.time));\n", "file_path": "src/run/parser/flitter/mod.rs", "rank": 78, "score": 248921.68873063585 }, { "content": "/// Calculates the comparison's segment time of the segment with the timing\n\n/// method specified, combining segments if the segment before it is empty.\n\n/// This is not calculating the current attempt's segment times.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if the provided `segment_index` is greater than or equal to\n\n/// `run.len()`.\n\npub fn comparison_combined_segment_time(\n\n run: &Run,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n if comparison == best_segments::NAME {\n\n return run.segment(segment_index).best_segment_time()[method];\n\n }\n\n\n\n let current_comparison_time = run.segment(segment_index).comparison(comparison)[method]?;\n\n\n\n let previous_comparison_time = find_previous_non_empty_comparison_time(\n\n &run.segments()[..segment_index],\n\n comparison,\n\n method,\n\n )\n\n .unwrap_or_default();\n\n\n\n Some(current_comparison_time - previous_comparison_time)\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 79, "score": 248077.31498151942 }, { "content": "/// Calculates the comparison's segment time of the segment with the timing\n\n/// method specified. This is not calculating the current attempt's segment\n\n/// times.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if the provided `segment_index` is greater than or equal to\n\n/// `run.len()`.\n\npub fn comparison_single_segment_time(\n\n run: &Run,\n\n segment_index: usize,\n\n comparison: &str,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n if comparison == best_segments::NAME {\n\n return run.segment(segment_index).best_segment_time()[method];\n\n }\n\n\n\n if segment_index == 0 {\n\n run.segment(segment_index).comparison(comparison)[method]\n\n } else {\n\n let current_comparison_time = run.segment(segment_index).comparison(comparison)[method]?;\n\n\n\n let previous_comparison_time =\n\n run.segment(segment_index - 1).comparison(comparison)[method]?;\n\n\n\n Some(current_comparison_time - previous_comparison_time)\n\n }\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 80, "score": 248077.23144056194 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let settings = component.settings_mut();\n\n let mut background_builder = GradientBuilder::new();\n\n let mut override_label = false;\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"TextColor\" {\n\n color(reader, tag.into_buf(), |c| settings.label_color = Some(c))\n\n } else if tag.name() == b\"OverrideTextColor\" {\n\n parse_bool(reader, tag.into_buf(), |b| override_label = b)\n\n } else if tag.name() == b\"DeltaAccuracy\" {\n\n accuracy(reader, tag.into_buf(), |v| settings.accuracy = v)\n", "file_path": "src/layout/parser/previous_segment.rs", "rank": 81, "score": 247963.42867975228 }, { "content": "pub fn settings<R>(\n\n reader: &mut Reader<R>,\n\n buf: &mut Vec<u8>,\n\n component: &mut Component,\n\n) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut settings = component.settings().clone();\n\n let mut background_builder = GradientBuilder::new();\n\n let mut timer_override_color = false;\n\n let (mut total_height, mut segment_timer_ratio) = (65u32, 0.4);\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if let Some(tag) = background_builder.parse_background(reader, tag)? {\n\n if tag.name() == b\"Height\" {\n\n text_parsed(reader, tag.into_buf(), |v| total_height = translate_size(v))\n\n } else if tag.name() == b\"SegmentTimerSizeRatio\" {\n\n text_parsed(reader, tag.into_buf(), |v: u32| {\n\n segment_timer_ratio = v as f32 / 100.0\n", "file_path": "src/layout/parser/detailed_timer.rs", "rank": 82, "score": 247818.0042404273 }, { "content": "/// Attempts to parse a Portal 2 Live Timer splits file.\n\npub fn parse<R: BufRead>(source: R) -> Result<Run> {\n\n let mut run = Run::new();\n\n\n\n run.set_game_name(\"Portal 2\");\n\n run.set_category_name(\"Any%\");\n\n\n\n let mut lines = source.lines();\n\n lines.next(); // Skip the header\n\n\n\n let mut aggregate_ticks = 0.0;\n\n\n\n let mut line = lines.next().context(ExpectedMap)?.context(ReadMap)?;\n\n for &(chapter_name, maps) in &CHAPTERS {\n\n for &map in maps {\n\n {\n\n let mut splits = line.split(',');\n\n let map_name = splits.next().context(ExpectedMapName)?;\n\n if map_name != map {\n\n return Err(Error::ExpectedDifferentMapName {\n\n expected: map,\n", "file_path": "src/run/parser/portal2_live_timer.rs", "rank": 83, "score": 246374.15373466615 }, { "content": "/// Calculates the delta of the current attempt to the comparison provided.\n\n/// Additionally a value is returned that indicates whether the delta value is a\n\n/// live delta. A live delta indicates that the value is actively changing at\n\n/// the moment. This may be the case when the current attempt is slower than the\n\n/// comparison at the current split.\n\npub fn calculate(timer: &Snapshot<'_>, comparison: &str) -> (Option<TimeSpan>, bool) {\n\n let timing_method = timer.current_timing_method();\n\n let last_segment = timer.run().segments().last().unwrap();\n\n\n\n let mut use_live_delta = false;\n\n\n\n let time = match timer.current_phase() {\n\n TimerPhase::Running | TimerPhase::Paused => {\n\n let mut delta = analysis::last_delta(\n\n timer.run(),\n\n timer.current_split_index().unwrap(),\n\n comparison,\n\n timing_method,\n\n );\n\n\n\n catch! {\n\n let live_delta = timer.current_time()[timing_method]?\n\n - timer.current_split().unwrap().comparison(comparison)[timing_method]?;\n\n\n\n if live_delta > delta.unwrap_or_default() {\n", "file_path": "src/analysis/delta.rs", "rank": 84, "score": 246145.14740975184 }, { "content": "#[test]\n\nfn doesnt_show_segment_name_outside_attempt() {\n\n let (timer, mut component, layout_settings) = prepare();\n\n\n\n assert_eq!(\n\n component\n\n .state(&timer.snapshot(), &layout_settings)\n\n .segment_name,\n\n None\n\n );\n\n}\n\n\n", "file_path": "src/component/detailed_timer/tests.rs", "rank": 85, "score": 244131.97992085599 }, { "content": "#[test]\n\nfn import_best_segment_with_game_time_usage() {\n\n let mut timer = timer();\n\n\n\n let first = TimeSpan::from_seconds(5.0);\n\n run_with_splits(&mut timer, &[first.total_seconds()]);\n\n\n\n let run = timer.into_run(true);\n\n let mut editor = Editor::new(run).unwrap();\n\n\n\n editor.select_timing_method(TimingMethod::GameTime);\n\n\n\n editor.select_only(0);\n\n let best = Some(TimeSpan::from_seconds(4.0));\n\n editor.active_segment().set_best_segment_time(best);\n\n\n\n editor.insert_segment_above();\n\n\n\n let history = editor.run().segment(0).segment_history();\n\n // The newly inserted segment's history should have a null time with a\n\n // non-positive index. This represents a skipped split for the imported best\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 86, "score": 244082.90981021742 }, { "content": "/// Calculates the current pace of the active attempt based on the comparison\n\n/// provided. If there's no active attempt, the final time of the comparison is\n\n/// returned instead.\n\npub fn calculate(timer: &Snapshot<'_>, comparison: &str) -> (Option<TimeSpan>, bool) {\n\n let timing_method = timer.current_timing_method();\n\n let last_segment = timer.run().segments().last().unwrap();\n\n\n\n match timer.current_phase() {\n\n TimerPhase::Running | TimerPhase::Paused => {\n\n let mut delta = analysis::last_delta(\n\n timer.run(),\n\n timer.current_split_index().unwrap(),\n\n comparison,\n\n timing_method,\n\n )\n\n .unwrap_or_default();\n\n\n\n let mut is_live = false;\n\n\n\n catch! {\n\n let live_delta = timer.current_time()[timing_method]?\n\n - timer.current_split().unwrap().comparison(comparison)[timing_method]?;\n\n\n", "file_path": "src/analysis/current_pace.rs", "rank": 87, "score": 243573.75140714765 }, { "content": "fn next_timing_method(run: &Run, predictions: &mut Vec<Option<Prediction>>, method: TimingMethod) {\n\n let segments = run.segments();\n\n\n\n predictions.clear();\n\n predictions.resize(segments.len() + 1, None);\n\n best::calculate(segments, predictions, true, false, method);\n\n}\n", "file_path": "src/run/editor/cleaning.rs", "rank": 88, "score": 240576.24247097765 }, { "content": "fn generate(segments: &mut [Segment], attempts: &[Attempt], method: TimingMethod) {\n\n for attempt in attempts {\n\n let id = attempt.index();\n\n let mut total_time = TimeSpan::zero();\n\n\n\n for segment in segments.iter_mut() {\n\n if let Some(time) = segment.segment_history().get(id) {\n\n if let Some(time) = time[method] {\n\n total_time += time;\n\n\n\n let comp = &mut segment.comparison_mut(NAME)[method];\n\n if comp.map_or(true, |c| total_time < c) {\n\n *comp = Some(total_time);\n\n }\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n", "file_path": "src/comparison/best_split_times.rs", "rank": 89, "score": 240234.2258239931 }, { "content": "#[test]\n\nfn reattaches_unattached_segment_history_elements_by_using_negative_ids() {\n\n let mut timer = create_timer(&[\"A\", \"B\"]);\n\n run_with_splits(&mut timer, &[3.0, 6.0]);\n\n run_with_splits(&mut timer, &[2.0, 4.0]);\n\n let mut run = timer.into_run(true);\n\n\n\n // We pop the last attempt from the history, but keep it in the segment\n\n // history, which makes the segment history elements unattached.\n\n run.attempt_history.pop().unwrap();\n\n\n\n run.fix_splits();\n\n\n\n let segments = run.segments();\n\n\n\n assert_eq!(segments[0].segment_history().try_get_min_index(), Some(0));\n\n assert_eq!(segments[0].segment_history().try_get_max_index(), Some(1));\n\n\n\n assert_eq!(segments[1].segment_history().try_get_min_index(), Some(0));\n\n assert_eq!(segments[1].segment_history().try_get_max_index(), Some(1));\n\n}\n\n\n\n// The below tests should be in crate::timing::timer::tests, but we ended up\n\n// having to put them here due to run.attempt_history being private.\n\n\n", "file_path": "src/run/tests/fixing.rs", "rank": 90, "score": 239785.37488032985 }, { "content": "#[test]\n\nfn still_shows_icon_of_last_segment_at_the_end_of_an_attempt() {\n\n let (mut timer, mut component, layout_settings) = prepare();\n\n\n\n component.state(&timer.snapshot(), &layout_settings);\n\n\n\n timer.start();\n\n\n\n component.state(&timer.snapshot(), &layout_settings);\n\n\n\n timer.split();\n\n\n\n assert!(component\n\n .state(&timer.snapshot(), &layout_settings)\n\n .icon_change\n\n .is_none());\n\n}\n\n\n", "file_path": "src/component/detailed_timer/tests.rs", "rank": 91, "score": 239471.49564748976 }, { "content": "#[test]\n\nfn import_pb_into_segment_history_and_remove_null_values() {\n\n let mut editor = Editor::new(run()).unwrap();\n\n editor.select_timing_method(TimingMethod::GameTime);\n\n\n\n editor.select_only(0);\n\n let fake_first = Some(TimeSpan::from_seconds(5.0));\n\n editor.active_segment().set_split_time(fake_first);\n\n\n\n editor.select_only(2);\n\n let fake_third = Some(TimeSpan::from_seconds(15.0));\n\n editor.active_segment().set_split_time(fake_third);\n\n\n\n let run = editor.close();\n\n let mut timer = Timer::new(run).unwrap();\n\n\n\n let (real_first, real_third) = (TimeSpan::from_seconds(4.0), TimeSpan::from_seconds(14.0));\n\n run_with_splits_opt(\n\n &mut timer,\n\n &[\n\n Some(real_first.total_seconds()),\n", "file_path": "src/timing/timer/tests/mod.rs", "rank": 92, "score": 239423.50899713358 }, { "content": "fn no_reuse_artificial(c: &mut Criterion) {\n\n let (timer, mut layout) = artificial();\n\n\n\n c.bench_function(\"No Reuse (Artificial)\", move |b| {\n\n b.iter(|| layout.state(&timer.snapshot()))\n\n });\n\n}\n\n\n", "file_path": "benches/layout_state.rs", "rank": 93, "score": 237883.7009692347 }, { "content": "fn reuse_real(c: &mut Criterion) {\n\n let (timer, mut layout) = real();\n\n\n\n let mut state = layout.state(&timer.snapshot());\n\n\n\n c.bench_function(\"Reuse (Real)\", move |b| {\n\n b.iter(|| layout.update_state(&mut state, &timer.snapshot()))\n\n });\n\n}\n\n\n", "file_path": "benches/layout_state.rs", "rank": 94, "score": 237883.7009692347 }, { "content": "fn reuse_artificial(c: &mut Criterion) {\n\n let (timer, mut layout) = artificial();\n\n\n\n let mut state = layout.state(&timer.snapshot());\n\n\n\n c.bench_function(\"Reuse (Artificial)\", move |b| {\n\n b.iter(|| layout.update_state(&mut state, &timer.snapshot()))\n\n });\n\n}\n", "file_path": "benches/layout_state.rs", "rank": 95, "score": 237883.7009692347 }, { "content": "fn no_reuse_real(c: &mut Criterion) {\n\n let (timer, mut layout) = real();\n\n\n\n c.bench_function(\"No Reuse (Real)\", move |b| {\n\n b.iter(|| layout.state(&timer.snapshot()))\n\n });\n\n}\n\n\n", "file_path": "benches/layout_state.rs", "rank": 96, "score": 237883.7009692347 }, { "content": "fn calculate(segments: &[Segment], method: TimingMethod, offset: TimeSpan) -> f64 {\n\n if segments\n\n .last()\n\n .and_then(|s| s.personal_best_split_time()[method])\n\n .is_none()\n\n {\n\n // If there is no PB time, then it's always a 100% chance.\n\n return 1.0;\n\n }\n\n\n\n comparison::goal::determine_percentile(offset, segments, method, None, &mut SkillCurve::new())\n\n}\n\n\n", "file_path": "src/analysis/pb_chance/mod.rs", "rank": 97, "score": 237472.53085483302 }, { "content": "fn timer() -> Timer {\n\n use super::run;\n\n let mut run = run();\n\n run.metadata_mut()\n\n .custom_variable_mut(\"Permanent\")\n\n .permanent();\n\n let timer = Timer::new(run).unwrap();\n\n assert!(!timer.run().has_been_modified());\n\n timer\n\n}\n\n\n", "file_path": "src/timing/timer/tests/mark_as_modified.rs", "rank": 98, "score": 233932.75107031874 }, { "content": "fn check_columns(\n\n start_with: ColumnStartWith,\n\n update_with: ColumnUpdateWith,\n\n expected_values: Values,\n\n) {\n\n let mut timer = timer();\n\n\n\n // Set initial best segment times\n\n run_with_splits_opt(\n\n &mut timer,\n\n &[\n\n Some(6.0),\n\n None,\n\n Some(100.0),\n\n Some(101.0),\n\n Some(102.0),\n\n Some(103.0),\n\n ],\n\n );\n\n\n", "file_path": "src/component/splits/tests/column.rs", "rank": 99, "score": 230487.12321818064 } ]
Rust
src/many-macros/src/lib.rs
hansl/many-rs
6400eb6e2b30c09ee13a28d20ef16b81a2a7fe05
use inflections::Inflect; use proc_macro2::{Ident, Span, TokenStream}; use quote::{quote, quote_spanned}; use serde::Deserialize; use serde_tokenstream::from_tokenstream; use syn::spanned::Spanned; use syn::PathArguments::AngleBracketed; use syn::{ AngleBracketedGenericArguments, FnArg, GenericArgument, PatType, ReturnType, Signature, TraitItem, Type, TypePath, }; #[derive(Deserialize)] struct ManyModuleAttributes { pub id: Option<u32>, pub name: Option<String>, pub namespace: Option<String>, pub many_crate: Option<String>, } #[derive(Debug)] struct Endpoint { pub name: String, pub func: Ident, pub span: Span, pub is_async: bool, pub is_mut: bool, pub has_sender: bool, pub arg_type: Option<Box<Type>>, #[allow(unused)] pub ret_type: Box<Type>, } impl Endpoint { pub fn new(signature: &Signature) -> Result<Self, (String, Span)> { let func = signature.ident.clone(); let name = func.to_string(); let is_async = signature.asyncness.is_some(); let mut has_sender = false; let arg_type: Option<Box<Type>>; let mut ret_type: Option<Box<Type>> = None; let mut inputs = signature.inputs.iter(); let receiver = inputs.next().ok_or_else(|| { ( "Must have at least 1 argument".to_string(), signature.span(), ) })?; let is_mut = if let FnArg::Receiver(r) = receiver { r.mutability.is_some() } else { return Err(( "Function in trait must have a receiver".to_string(), receiver.span(), )); }; let maybe_identity = inputs.next(); let maybe_argument = inputs.next(); match (maybe_identity, maybe_argument) { (_id, Some(FnArg::Typed(PatType { ty, .. }))) => { has_sender = true; arg_type = Some(ty.clone()); } (Some(FnArg::Typed(PatType { ty, .. })), None) => { arg_type = Some(ty.clone()); } (None, None) => { arg_type = None; } (_, _) => { return Err(("Must have 2 or 3 arguments".to_string(), signature.span())); } } if let ReturnType::Type(_, ty) = &signature.output { if let Type::Path(TypePath { path: syn::Path { segments, .. }, .. }) = ty.as_ref() { if segments[0].ident == "Result" || segments .iter() .map(|x| x.ident.to_string()) .collect::<Vec<String>>() .join("::") == "std::result::Result" { if let AngleBracketed(AngleBracketedGenericArguments { ref args, .. }) = segments[0].arguments { ret_type = Some( args.iter() .find_map(|x| match x { GenericArgument::Type(t) => Some(Box::new(t.clone())), _ => None, }) .unwrap(), ); } } } } if ret_type.is_none() { return Err(( "Must have a result return type.".to_string(), signature.output.span(), )); } Ok(Self { name, func, span: signature.span(), is_async, is_mut, has_sender, arg_type, ret_type: ret_type.unwrap(), }) } } #[allow(clippy::too_many_lines)] fn many_module_impl(attr: &TokenStream, item: TokenStream) -> Result<TokenStream, syn::Error> { let attrs: ManyModuleAttributes = from_tokenstream(attr)?; let many = Ident::new( attrs.many_crate.as_ref().map_or("many", String::as_str), attr.span(), ); let namespace = attrs.namespace; let span = item.span(); let tr: syn::ItemTrait = syn::parse2(item) .map_err(|_| syn::Error::new(span, "`many_module` only applies to traits.".to_string()))?; let struct_name = attrs.name.clone().unwrap_or_else(|| tr.ident.to_string()); let struct_ident = Ident::new( struct_name.as_str(), attrs .name .as_ref() .map_or_else(|| attr.span(), |_| tr.ident.span()), ); let mut trait_ = tr.clone(); if attrs.name.is_none() { trait_.ident = Ident::new(&format!("{}Backend", struct_name), tr.ident.span()); } let trait_ident = trait_.ident.clone(); let vis = trait_.vis.clone(); let attr_id = attrs.id.iter(); let attr_name = inflections::Inflect::to_constant_case(format!("{}Attribute", struct_name).as_str()); let attr_ident = Ident::new(&attr_name, attr.span()); let info_name = format!("{}Info", struct_name); let info_ident = Ident::new(&info_name, attr.span()); let endpoints: Result<Vec<_>, (String, Span)> = trait_ .items .iter() .filter_map(|item| match item { TraitItem::Method(m) => Some(m), _ => None, }) .map(|item| Endpoint::new(&item.sig)) .collect(); let endpoints = endpoints.map_err(|(msg, span)| syn::Error::new(span, msg))?; let ns = namespace.clone(); let endpoint_strings: Vec<String> = endpoints .iter() .map(move |e| { let name = e.name.as_str().to_camel_case(); match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, } }) .collect(); let ns = namespace.clone(); let validate_endpoint_pat = endpoints.iter().map(|e| { let span = e.span; let name = e.name.as_str().to_camel_case(); let ep = match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, }; if let Some(ty) = &e.arg_type { quote_spanned! { span => #ep => { minicbor::decode::<'_, #ty>(data) .map_err(|e| ManyError::deserialization_error(e.to_string()))?; } } } else { quote! { #ep => {} } } }); let validate = quote! { fn validate(&self, message: & #many ::message::RequestMessage) -> Result<(), #many ::ManyError> { let method = message.method.as_str(); let data = message.data.as_slice(); match method { #(#validate_endpoint_pat)* _ => return Err( #many ::ManyError::invalid_method_name(method.to_string())), }; Ok(()) } }; let ns = namespace; let execute_endpoint_pat = endpoints.iter().map(|e| { let span = e.span; let name = e.name.as_str().to_camel_case(); let ep = match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, }; let ep_ident = &e.func; let backend_decl = if e.is_mut { quote! { let mut backend = self.backend.lock().unwrap(); } } else { quote! { let backend = self.backend.lock().unwrap(); } }; let call = match (e.has_sender, e.arg_type.is_some(), e.is_async) { (false, true, false) => quote_spanned! { span => encode( backend . #ep_ident ( decode( data )? ) ) }, (false, true, true) => quote_spanned! { span => encode( backend . #ep_ident ( decode( data )? ).await ) }, (true, true, false) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default(), decode( data )? ) ) }, (true, true, true) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default(), decode( data )? ).await ) }, (false, false, false) => quote_spanned! { span => encode( backend . #ep_ident ( ) ) }, (false, false, true) => quote_spanned! { span => encode( backend . #ep_ident ( ).await ) }, (true, false, false) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default() ) ) }, (true, false, true) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default() ).await ) }, }; quote_spanned! { span => #ep => { #backend_decl #call } } }); let execute = quote! { async fn execute( &self, message: #many ::message::RequestMessage, ) -> Result< #many ::message::ResponseMessage, #many ::ManyError> { use #many ::ManyError; fn decode<'a, T: minicbor::Decode<'a>>(data: &'a [u8]) -> Result<T, ManyError> { minicbor::decode(data).map_err(|e| ManyError::deserialization_error(e.to_string())) } fn encode<T: minicbor::Encode>(result: Result<T, ManyError>) -> Result<Vec<u8>, ManyError> { minicbor::to_vec(result?).map_err(|e| ManyError::serialization_error(e.to_string())) } let data = message.data.as_slice(); let result = match message.method.as_str() { #( #execute_endpoint_pat )* _ => Err(ManyError::internal_server_error()), }?; Ok( #many ::message::ResponseMessage::from_request( &message, &message.to, Ok(result), )) } }; let attribute = if attrs.id.is_some() { quote! { Some(#attr_ident) } } else { quote! { None } }; Ok(quote! { #( #vis const #attr_ident: #many ::protocol::Attribute = #many ::protocol::Attribute::id(#attr_id); )* #vis struct #info_ident; impl std::ops::Deref for #info_ident { type Target = #many ::server::module::ManyModuleInfo; fn deref(&self) -> & #many ::server::module::ManyModuleInfo { use #many ::server::module::ManyModuleInfo; static ONCE: std::sync::Once = std::sync::Once::new(); static mut VALUE: *mut ManyModuleInfo = 0 as *mut ManyModuleInfo; unsafe { ONCE.call_once(|| VALUE = Box::into_raw(Box::new(ManyModuleInfo { name: #struct_name .to_string(), attribute: #attribute, endpoints: vec![ #( #endpoint_strings .to_string() ),* ], }))); &*VALUE } } } #[async_trait::async_trait] #trait_ #vis struct #struct_ident<T: #trait_ident> { backend: std::sync::Arc<std::sync::Mutex<T>> } impl<T: #trait_ident> std::fmt::Debug for #struct_ident<T> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct(#struct_name).finish() } } impl<T: #trait_ident> #struct_ident<T> { pub fn new(backend: std::sync::Arc<std::sync::Mutex<T>>) -> Self { Self { backend } } } #[async_trait::async_trait] impl<T: #trait_ident> #many ::ManyModule for #struct_ident<T> { fn info(&self) -> & #many ::server::module::ManyModuleInfo { & #info_ident } #validate #execute } }) } #[proc_macro_attribute] pub fn many_module( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { many_module_impl(&attr.into(), item.into()) .unwrap_or_else(|e| e.to_compile_error()) .into() }
use inflections::Inflect; use proc_macro2::{Ident, Span, TokenStream}; use quote::{quote, quote_spanned}; use serde::Deserialize; use serde_tokenstream::from_tokenstream; use syn::spanned::Spanned; use syn::PathArguments::AngleBracketed; use syn::{ AngleBracketedGenericArguments, FnArg, GenericArgument, PatType, ReturnType, Signature, TraitItem, Type, TypePath, }; #[derive(Deserialize)] struct ManyModuleAttributes { pub id: Option<u32>, pub name: Option<String>, pub namespace: Option<String>, pub many_crate: Option<String>, } #[derive(Debug)] struct Endpoint { pub name: String, pub func: Ident, pub span: Span, pub is_async: bool, pub is_mut: bool, pub has_sender: bool, pub arg_type: Option<Box<Type>>, #[allow(unused)] pub ret_type: Box<Type>, } impl Endpoint { pub fn new(signature: &Signature) -> Result<Self, (String, Span)> { let func = signature.ident.clone(); let name = func.to_string(); let is_async = signature.asyncness.is_some(); let mut has_sender = false; let arg_type: Option<Box<Type>>; let mut ret_type: Option<Box<Type>> = None; let mut inputs = signature.inputs.iter(); let receiver = inputs.next().ok_or_else(|| { ( "Must have at least 1 argument".to_string(), signature.span(), ) })?; let is_mut = if let FnArg::Receiver(r) = receiver { r.mutability.is_some() } else { return Err(( "Function in trait must have a receiver".to_string(), receiver.span(), )); }; let maybe_identity = inputs.next(); let maybe_argument = inputs.next(); match (maybe_identity, maybe_argument) { (_id, Some(FnArg::Typed(PatType { ty, .. }))) => { has_sender = true; arg_type = Some(ty.clone()); } (Some(FnArg::Typed(PatType { ty, .. })), None) => { arg_type = Some(ty.clone()); } (None, None) => { arg_type = None; } (_, _) => { return Err(("Must have 2 or 3 arguments".to_string(), signature.span())); } } if let ReturnType::Type(_, ty) = &signature.output { if let Type::Path(TypePath { path: syn::Path { segments, .. }, .. }) = ty.as_ref() { if segments[0].ident == "Result" || segments
= from_tokenstream(attr)?; let many = Ident::new( attrs.many_crate.as_ref().map_or("many", String::as_str), attr.span(), ); let namespace = attrs.namespace; let span = item.span(); let tr: syn::ItemTrait = syn::parse2(item) .map_err(|_| syn::Error::new(span, "`many_module` only applies to traits.".to_string()))?; let struct_name = attrs.name.clone().unwrap_or_else(|| tr.ident.to_string()); let struct_ident = Ident::new( struct_name.as_str(), attrs .name .as_ref() .map_or_else(|| attr.span(), |_| tr.ident.span()), ); let mut trait_ = tr.clone(); if attrs.name.is_none() { trait_.ident = Ident::new(&format!("{}Backend", struct_name), tr.ident.span()); } let trait_ident = trait_.ident.clone(); let vis = trait_.vis.clone(); let attr_id = attrs.id.iter(); let attr_name = inflections::Inflect::to_constant_case(format!("{}Attribute", struct_name).as_str()); let attr_ident = Ident::new(&attr_name, attr.span()); let info_name = format!("{}Info", struct_name); let info_ident = Ident::new(&info_name, attr.span()); let endpoints: Result<Vec<_>, (String, Span)> = trait_ .items .iter() .filter_map(|item| match item { TraitItem::Method(m) => Some(m), _ => None, }) .map(|item| Endpoint::new(&item.sig)) .collect(); let endpoints = endpoints.map_err(|(msg, span)| syn::Error::new(span, msg))?; let ns = namespace.clone(); let endpoint_strings: Vec<String> = endpoints .iter() .map(move |e| { let name = e.name.as_str().to_camel_case(); match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, } }) .collect(); let ns = namespace.clone(); let validate_endpoint_pat = endpoints.iter().map(|e| { let span = e.span; let name = e.name.as_str().to_camel_case(); let ep = match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, }; if let Some(ty) = &e.arg_type { quote_spanned! { span => #ep => { minicbor::decode::<'_, #ty>(data) .map_err(|e| ManyError::deserialization_error(e.to_string()))?; } } } else { quote! { #ep => {} } } }); let validate = quote! { fn validate(&self, message: & #many ::message::RequestMessage) -> Result<(), #many ::ManyError> { let method = message.method.as_str(); let data = message.data.as_slice(); match method { #(#validate_endpoint_pat)* _ => return Err( #many ::ManyError::invalid_method_name(method.to_string())), }; Ok(()) } }; let ns = namespace; let execute_endpoint_pat = endpoints.iter().map(|e| { let span = e.span; let name = e.name.as_str().to_camel_case(); let ep = match ns { Some(ref namespace) => format!("{}.{}", namespace, name), None => name, }; let ep_ident = &e.func; let backend_decl = if e.is_mut { quote! { let mut backend = self.backend.lock().unwrap(); } } else { quote! { let backend = self.backend.lock().unwrap(); } }; let call = match (e.has_sender, e.arg_type.is_some(), e.is_async) { (false, true, false) => quote_spanned! { span => encode( backend . #ep_ident ( decode( data )? ) ) }, (false, true, true) => quote_spanned! { span => encode( backend . #ep_ident ( decode( data )? ).await ) }, (true, true, false) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default(), decode( data )? ) ) }, (true, true, true) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default(), decode( data )? ).await ) }, (false, false, false) => quote_spanned! { span => encode( backend . #ep_ident ( ) ) }, (false, false, true) => quote_spanned! { span => encode( backend . #ep_ident ( ).await ) }, (true, false, false) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default() ) ) }, (true, false, true) => quote_spanned! { span => encode( backend . #ep_ident ( &message.from.unwrap_or_default() ).await ) }, }; quote_spanned! { span => #ep => { #backend_decl #call } } }); let execute = quote! { async fn execute( &self, message: #many ::message::RequestMessage, ) -> Result< #many ::message::ResponseMessage, #many ::ManyError> { use #many ::ManyError; fn decode<'a, T: minicbor::Decode<'a>>(data: &'a [u8]) -> Result<T, ManyError> { minicbor::decode(data).map_err(|e| ManyError::deserialization_error(e.to_string())) } fn encode<T: minicbor::Encode>(result: Result<T, ManyError>) -> Result<Vec<u8>, ManyError> { minicbor::to_vec(result?).map_err(|e| ManyError::serialization_error(e.to_string())) } let data = message.data.as_slice(); let result = match message.method.as_str() { #( #execute_endpoint_pat )* _ => Err(ManyError::internal_server_error()), }?; Ok( #many ::message::ResponseMessage::from_request( &message, &message.to, Ok(result), )) } }; let attribute = if attrs.id.is_some() { quote! { Some(#attr_ident) } } else { quote! { None } }; Ok(quote! { #( #vis const #attr_ident: #many ::protocol::Attribute = #many ::protocol::Attribute::id(#attr_id); )* #vis struct #info_ident; impl std::ops::Deref for #info_ident { type Target = #many ::server::module::ManyModuleInfo; fn deref(&self) -> & #many ::server::module::ManyModuleInfo { use #many ::server::module::ManyModuleInfo; static ONCE: std::sync::Once = std::sync::Once::new(); static mut VALUE: *mut ManyModuleInfo = 0 as *mut ManyModuleInfo; unsafe { ONCE.call_once(|| VALUE = Box::into_raw(Box::new(ManyModuleInfo { name: #struct_name .to_string(), attribute: #attribute, endpoints: vec![ #( #endpoint_strings .to_string() ),* ], }))); &*VALUE } } } #[async_trait::async_trait] #trait_ #vis struct #struct_ident<T: #trait_ident> { backend: std::sync::Arc<std::sync::Mutex<T>> } impl<T: #trait_ident> std::fmt::Debug for #struct_ident<T> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct(#struct_name).finish() } } impl<T: #trait_ident> #struct_ident<T> { pub fn new(backend: std::sync::Arc<std::sync::Mutex<T>>) -> Self { Self { backend } } } #[async_trait::async_trait] impl<T: #trait_ident> #many ::ManyModule for #struct_ident<T> { fn info(&self) -> & #many ::server::module::ManyModuleInfo { & #info_ident } #validate #execute } }) } #[proc_macro_attribute] pub fn many_module( attr: proc_macro::TokenStream, item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { many_module_impl(&attr.into(), item.into()) .unwrap_or_else(|e| e.to_compile_error()) .into() }
.iter() .map(|x| x.ident.to_string()) .collect::<Vec<String>>() .join("::") == "std::result::Result" { if let AngleBracketed(AngleBracketedGenericArguments { ref args, .. }) = segments[0].arguments { ret_type = Some( args.iter() .find_map(|x| match x { GenericArgument::Type(t) => Some(Box::new(t.clone())), _ => None, }) .unwrap(), ); } } } } if ret_type.is_none() { return Err(( "Must have a result return type.".to_string(), signature.output.span(), )); } Ok(Self { name, func, span: signature.span(), is_async, is_mut, has_sender, arg_type, ret_type: ret_type.unwrap(), }) } } #[allow(clippy::too_many_lines)] fn many_module_impl(attr: &TokenStream, item: TokenStream) -> Result<TokenStream, syn::Error> { let attrs: ManyModuleAttributes
random
[ { "content": "// TODO: Change the error type\n\npub fn public_key(key: &CoseKey) -> Result<CoseKey, String> {\n\n let params = BTreeMap::from_iter(key.params.clone().into_iter());\n\n match key.alg {\n\n Some(Algorithm::Assigned(coset::iana::Algorithm::EdDSA)) => {\n\n let x = params.get(&Label::Int(OkpKeyParameter::X.to_i64()));\n\n if let Some(x) = x.cloned() {\n\n let x = x\n\n .as_bytes()\n\n .cloned()\n\n .ok_or_else(|| \"Could not get EdDSA X parameter\".to_string())?;\n\n Ok(eddsa_cose_key(x, None))\n\n } else {\n\n Err(\"Key doesn't have a public key\".to_string())\n\n }\n\n }\n\n Some(Algorithm::Assigned(coset::iana::Algorithm::ES256)) => {\n\n let x = params.get(&Label::Int(Ec2KeyParameter::X.to_i64()));\n\n let y = params.get(&Label::Int(Ec2KeyParameter::Y.to_i64()));\n\n\n\n if let (Some(x), Some(y)) = (x.cloned(), y.cloned()) {\n", "file_path": "src/many/src/cose_helpers.rs", "rank": 0, "score": 161103.1168625402 }, { "content": "#[derive(Copy, Clone, Eq, Debug, Ord, PartialOrd)]\n\n#[non_exhaustive]\n\n#[must_use]\n\nstruct InnerIdentity {\n\n bytes: [u8; MAX_IDENTITY_BYTE_LEN],\n\n}\n\n\n\n// Identity needs to be bound to 32 bytes maximum.\n\nstatic_assertions::assert_eq_size!([u8; MAX_IDENTITY_BYTE_LEN], InnerIdentity);\n\nstatic_assertions::const_assert_eq!(InnerIdentity::anonymous().to_byte_array()[0], 0);\n\n\n\nimpl PartialEq for InnerIdentity {\n\n fn eq(&self, other: &Self) -> bool {\n\n match (&self.bytes[0], &other.bytes[0]) {\n\n // Anonymous\n\n (0, 0) => true,\n\n\n\n // Public Key\n\n (1, 1) => self.bytes[1..=SHA_OUTPUT_SIZE] == other.bytes[1..=SHA_OUTPUT_SIZE],\n\n\n\n // Subresource\n\n (x @ 0x80..=0xFF, y @ 0x80..=0xFF) if x == y => self.bytes[1..] == other.bytes[1..],\n\n\n", "file_path": "src/many/src/types/identity.rs", "rank": 2, "score": 130707.31421677046 }, { "content": "pub fn decode_request_from_cose_sign1(sign1: CoseSign1) -> Result<RequestMessage, ManyError> {\n\n let request = CoseSign1RequestMessage { sign1 };\n\n let from_id = request\n\n .verify()\n\n .map_err(ManyError::could_not_verify_signature)?;\n\n\n\n let payload = request\n\n .sign1\n\n .payload\n\n .ok_or_else(ManyError::empty_envelope)?;\n\n let message = RequestMessage::from_bytes(&payload).map_err(ManyError::deserialization_error)?;\n\n\n\n // Check the `from` field.\n\n if from_id != message.from.unwrap_or_default() {\n\n return Err(ManyError::invalid_from_identity());\n\n }\n\n\n\n // We don't check the `to` field, leave that to the server itself.\n\n // Some servers might want to proxy messages that aren't for them, for example, or\n\n // accept anonymous messages.\n\n\n\n Ok(message)\n\n}\n\n\n", "file_path": "src/many/src/message.rs", "rank": 4, "score": 116109.3242770808 }, { "content": "pub fn encode_cose_sign1_from_response(\n\n response: ResponseMessage,\n\n cose_key: &CoseKeyIdentity,\n\n) -> Result<CoseSign1, String> {\n\n encode_cose_sign1_from_payload(\n\n response\n\n .to_bytes()\n\n .map_err(|e| format!(\"Could not serialize response: {}\", e))?,\n\n cose_key,\n\n )\n\n}\n\n\n", "file_path": "src/many/src/message.rs", "rank": 6, "score": 102237.47507091405 }, { "content": "pub fn encode_cose_sign1_from_request(\n\n request: RequestMessage,\n\n cose_key: &CoseKeyIdentity,\n\n) -> Result<CoseSign1, String> {\n\n encode_cose_sign1_from_payload(request.to_bytes().unwrap(), cose_key)\n\n}\n\n\n\n/// Provide utility functions surrounding request and response messages.\n\n#[derive(Clone, Debug, Default)]\n\npub(crate) struct CoseSign1RequestMessage {\n\n pub sign1: CoseSign1,\n\n}\n\n\n\n/// WebAuthn ClientData\n", "file_path": "src/many/src/message.rs", "rank": 7, "score": 102237.47507091405 }, { "content": "pub fn decode_response_from_cose_sign1(\n\n sign1: CoseSign1,\n\n to: Option<Identity>,\n\n) -> Result<ResponseMessage, String> {\n\n let request = CoseSign1RequestMessage { sign1 };\n\n let from_id = request.verify()?;\n\n\n\n let payload = request\n\n .sign1\n\n .payload\n\n .ok_or_else(|| \"Envelope does not have payload.\".to_string())?;\n\n let message = ResponseMessage::from_bytes(&payload)?;\n\n\n\n // Check the `from` field.\n\n if from_id != message.from {\n\n return Err(\"The message's from field does not match the envelope.\".to_string());\n\n }\n\n\n\n // Check the `to` field to make sure we have the right one.\n\n if let Some(to_id) = to {\n\n if to_id != message.to.unwrap_or_default() {\n\n return Err(\"The message's to field is not for this server.\".to_string());\n\n }\n\n }\n\n\n\n Ok(message)\n\n}\n\n\n", "file_path": "src/many/src/message.rs", "rank": 8, "score": 102237.47507091405 }, { "content": "pub trait FeatureInfo {\n\n fn as_feature(&self) -> Feature;\n\n fn roles() -> BTreeSet<String>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use proptest::prelude::*;\n\n\n\n proptest! {\n\n #[test]\n\n fn features(good_id: FeatureId, bad_id: FeatureId) {\n\n prop_assume!(good_id != bad_id && good_id != 1 && bad_id != 1);\n\n\n\n let mut set = FeatureSet::default();\n\n set.insert(Feature::with_id(good_id));\n\n set.insert(Feature::with_id(1).with_argument(CborAny::Int(1)));\n\n\n\n assert!(set.has_id(good_id));\n", "file_path": "src/many/src/server/module/_9_account/features.rs", "rank": 9, "score": 99486.27867419312 }, { "content": "pub trait TryFromAttributeSet: Sized {\n\n fn try_from_set(set: &AttributeSet) -> Result<Self, ManyError>;\n\n}\n\n\n\nimpl IntoIterator for AttributeSet {\n\n type Item = Attribute;\n\n type IntoIter = std::collections::btree_set::IntoIter<Attribute>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.0.into_iter()\n\n }\n\n}\n\n\n\nimpl FromIterator<Attribute> for AttributeSet {\n\n fn from_iter<T: IntoIterator<Item = Attribute>>(iter: T) -> Self {\n\n Self(BTreeSet::from_iter(iter))\n\n }\n\n}\n", "file_path": "src/many/src/protocol/attributes.rs", "rank": 10, "score": 95888.57895123307 }, { "content": "#[many_module(name = AsyncModule, id = 8, namespace = async, many_crate = crate)]\n\npub trait AsyncModuleBackend: Send {\n\n fn status(&self, sender: &Identity, args: StatusArgs) -> Result<StatusReturn, ManyError>;\n\n}\n", "file_path": "src/many/src/server/module/_8_async.rs", "rank": 11, "score": 94149.03257780176 }, { "content": "#[many_module(name = AccountModule, id = 9, namespace = account, many_crate = crate)]\n\npub trait AccountModuleBackend: Send {\n\n /// Create an account.\n\n fn create(&mut self, sender: &Identity, args: CreateArgs) -> Result<CreateReturn, ManyError>;\n\n\n\n /// Set the description of an account.\n\n fn set_description(\n\n &mut self,\n\n sender: &Identity,\n\n args: SetDescriptionArgs,\n\n ) -> Result<SetDescriptionReturn, ManyError>;\n\n\n\n /// List all the roles supported by an account.\n\n fn list_roles(\n\n &self,\n\n sender: &Identity,\n\n args: ListRolesArgs,\n\n ) -> Result<ListRolesReturn, ManyError>;\n\n\n\n /// Get roles associated with an identity for an account.\n\n fn get_roles(&self, sender: &Identity, args: GetRolesArgs)\n", "file_path": "src/many/src/server/module/_9_account.rs", "rank": 12, "score": 94149.03257780176 }, { "content": "#[many_module(name = LedgerModule, id = 2, namespace = ledger, many_crate = crate)]\n\n#[cfg_attr(test, automock)]\n\npub trait LedgerModuleBackend: Send {\n\n fn info(&self, sender: &Identity, args: InfoArgs) -> Result<InfoReturns, ManyError>;\n\n fn balance(&self, sender: &Identity, args: BalanceArgs) -> Result<BalanceReturns, ManyError>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{\n\n server::module::testutils::{call_module, call_module_cbor},\n\n types::identity::tests::identity,\n\n types::{ledger::TokenAmount, VecOrSingle},\n\n };\n\n use minicbor::bytes::ByteVec;\n\n use mockall::predicate;\n\n use once_cell::sync::Lazy;\n\n use std::{\n\n collections::BTreeMap,\n\n str::FromStr,\n\n sync::{Arc, Mutex},\n", "file_path": "src/many/src/server/module/_2_ledger.rs", "rank": 13, "score": 94148.47151496983 }, { "content": "#[many_module(name = BlockchainModule, id = 1, namespace = blockchain, many_crate = crate)]\n\n#[cfg_attr(test, automock)]\n\npub trait BlockchainModuleBackend: Send {\n\n fn info(&self) -> Result<InfoReturns, ManyError>;\n\n fn block(&self, args: BlockArgs) -> Result<BlockReturns, ManyError>;\n\n fn transaction(&self, args: TransactionArgs) -> Result<TransactionReturns, ManyError>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{\n\n server::module::testutils::{call_module, call_module_cbor},\n\n types::{blockchain::TransactionIdentifier, Timestamp},\n\n };\n\n use mockall::predicate;\n\n use std::sync::{Arc, Mutex};\n\n\n\n #[test]\n\n fn info() {\n\n let mut mock = MockBlockchainModuleBackend::new();\n\n mock.expect_info().times(1).return_const(Ok(InfoReturns {\n", "file_path": "src/many/src/server/module/_1_blockchain.rs", "rank": 14, "score": 94148.47151496983 }, { "content": "#[many_module(name = BaseModule, id = 0, many_crate = crate)]\n\n#[cfg_attr(test, automock)]\n\npub trait BaseModuleBackend: Send {\n\n fn endpoints(&self) -> Result<Endpoints, ManyError>;\n\n fn heartbeat(&self) -> Result<HeartbeatReturn, ManyError> {\n\n Ok(HeartbeatReturn {})\n\n }\n\n fn status(&self) -> Result<Status, ManyError>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::{Arc, Mutex};\n\n\n\n use crate::{\n\n protocol::Attribute, server::module::testutils::call_module,\n\n types::identity::cose::tests::generate_random_eddsa_identity,\n\n };\n\n\n\n use super::*;\n\n #[test]\n\n fn status() {\n", "file_path": "src/many/src/server/module/_0_base.rs", "rank": 15, "score": 94144.82229390723 }, { "content": "#[many_module(name = KvStoreModule, id = 3, namespace = kvstore, many_crate = crate)]\n\n#[cfg_attr(test, automock)]\n\npub trait KvStoreModuleBackend: Send {\n\n fn info(&self, sender: &Identity, args: InfoArg) -> Result<InfoReturns, ManyError>;\n\n fn get(&self, sender: &Identity, args: GetArgs) -> Result<GetReturns, ManyError>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::server::module::testutils::{call_module, call_module_cbor};\n\n use crate::types::identity::tests::identity;\n\n use minicbor::bytes::ByteVec;\n\n use mockall::predicate;\n\n use std::sync::{Arc, Mutex};\n\n\n\n #[test]\n\n fn info() {\n\n let mut mock = MockKvStoreModuleBackend::new();\n\n mock.expect_info()\n\n .with(predicate::eq(tests::identity(1)), predicate::eq(InfoArg {}))\n\n .times(1)\n", "file_path": "src/many/src/server/module/_3_kvstore.rs", "rank": 16, "score": 92496.43772561397 }, { "content": "pub trait TryCreateFeature: Sized {\n\n const ID: FeatureId;\n\n\n\n fn try_create(feature: &Feature) -> Result<Self, ManyError>;\n\n}\n\n\n", "file_path": "src/many/src/server/module/_9_account/features.rs", "rank": 17, "score": 92485.35750586743 }, { "content": "#[many_module(name = LedgerCommandsModule, id = 6, namespace = ledger, many_crate = crate)]\n\npub trait LedgerCommandsModuleBackend: Send {\n\n fn send(&mut self, sender: &Identity, args: SendArgs) -> Result<(), ManyError>;\n\n}\n", "file_path": "src/many/src/server/module/_6_ledger_commands.rs", "rank": 18, "score": 90936.26954276602 }, { "content": "#[many_module(name = AbciFrontendModule, id = 1001, namespace = abci, many_crate = crate)]\n\npub trait AbciClientModuleBackend: Send {\n\n fn status(&self) -> Result<StatusReturns, ManyError>;\n\n}\n", "file_path": "src/many/src/server/module/_1001_abci_frontend.rs", "rank": 19, "score": 90936.26954276602 }, { "content": "#[many_module(name = LedgerTransactionsModule, id = 4, namespace = ledger, many_crate = crate)]\n\npub trait LedgerTransactionsModuleBackend: Send {\n\n fn transactions(&self, args: TransactionsArgs) -> Result<TransactionsReturns, ManyError>;\n\n fn list(&self, args: ListArgs) -> Result<ListReturns, ManyError>;\n\n}\n", "file_path": "src/many/src/server/module/_4_ledger_transactions.rs", "rank": 20, "score": 90936.26954276602 }, { "content": "#[derive(Parser)]\n\nstruct IdOpt {\n\n /// An hexadecimal value to encode, an identity textual format to decode or\n\n /// a PEM file to read\n\n arg: String,\n\n\n\n /// Allow to generate the identity with a specific subresource ID.\n\n subid: Option<u32>,\n\n}\n\n\n", "file_path": "src/many-cli/src/main.rs", "rank": 21, "score": 90056.87427770579 }, { "content": "#[many_module(name = KvStoreCommandsModule, id = 7, namespace = kvstore, many_crate = crate)]\n\npub trait KvStoreCommandsModuleBackend: Send {\n\n fn put(&mut self, sender: &Identity, args: PutArgs) -> Result<PutReturns, ManyError>;\n\n fn delete(&mut self, sender: &Identity, args: DeleteArgs) -> Result<DeleteReturn, ManyError>;\n\n}\n", "file_path": "src/many/src/server/module/_7_kvstore_commands.rs", "rank": 22, "score": 89459.25033976267 }, { "content": "#[many_module(name = AccountMultisigModule, namespace = account, many_crate = crate)]\n\npub trait AccountMultisigModuleBackend: Send {\n\n fn multisig_submit_transaction(\n\n &mut self,\n\n sender: &Identity,\n\n args: SubmitTransactionArgs,\n\n ) -> Result<SubmitTransactionReturn, ManyError>;\n\n fn multisig_info(&self, sender: &Identity, args: InfoArgs) -> Result<InfoReturn, ManyError>;\n\n fn multisig_set_defaults(\n\n &mut self,\n\n sender: &Identity,\n\n args: SetDefaultsArgs,\n\n ) -> Result<SetDefaultsReturn, ManyError>;\n\n fn multisig_approve(\n\n &mut self,\n\n sender: &Identity,\n\n args: ApproveArgs,\n\n ) -> Result<ApproveReturn, ManyError>;\n\n fn multisig_revoke(\n\n &mut self,\n\n sender: &Identity,\n", "file_path": "src/many/src/server/module/_9_account/features/multisig.rs", "rank": 23, "score": 89455.75174955757 }, { "content": "#[async_trait]\n\npub trait ManyRequestHandler: Send + Sync + Debug {\n\n /// Validate that a message is okay with us.\n\n fn validate(&self, _message: &RequestMessage) -> Result<(), ManyError> {\n\n Ok(())\n\n }\n\n\n\n /// Handle an incoming request message, and returns the response message.\n\n /// This cannot fail. It should instead responds with a proper error response message.\n\n /// See the spec.\n\n async fn execute(&self, message: RequestMessage) -> Result<ResponseMessage, ManyError>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct SimpleRequestHandlerAdapter<I: SimpleRequestHandler>(pub I);\n\n\n\n#[async_trait]\n\nimpl<I: SimpleRequestHandler> ManyRequestHandler for SimpleRequestHandlerAdapter<I> {\n\n fn validate(&self, message: &RequestMessage) -> Result<(), ManyError> {\n\n self.0\n\n .validate(message.method.as_str(), message.data.as_slice())\n", "file_path": "src/many/src/transport.rs", "rank": 24, "score": 88608.251102638 }, { "content": "#[async_trait]\n\npub trait SimpleRequestHandler: Send + Sync + Debug {\n\n fn validate(&self, _method: &str, _payload: &[u8]) -> Result<(), ManyError> {\n\n Ok(())\n\n }\n\n\n\n async fn handle(&self, method: &str, payload: &[u8]) -> Result<Vec<u8>, ManyError>;\n\n}\n\n\n", "file_path": "src/many/src/transport.rs", "rank": 25, "score": 88608.251102638 }, { "content": "#[async_trait]\n\npub trait ManyModule: Sync + Send + Debug {\n\n /// Returns information about this module.\n\n fn info(&self) -> &ManyModuleInfo;\n\n\n\n /// Verify that a message is well formed (ACLs, arguments, etc).\n\n fn validate(&self, _message: &RequestMessage) -> Result<(), ManyError> {\n\n Ok(())\n\n }\n\n\n\n /// Execute a message and returns its response.\n\n async fn execute(&self, message: RequestMessage) -> Result<ResponseMessage, ManyError>;\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod testutils {\n\n use crate::message::RequestMessage;\n\n use crate::types::identity::tests;\n\n use crate::{ManyError, ManyModule};\n\n\n\n pub fn call_module(\n", "file_path": "src/many/src/server/module.rs", "rank": 26, "score": 88608.251102638 }, { "content": "#[derive(Parser)]\n\nstruct HsmIdOpt {\n\n /// HSM PKCS#11 module path\n\n module: PathBuf,\n\n\n\n /// HSM PKCS#11 slot ID\n\n slot: u64,\n\n\n\n /// HSM PKCS#11 key ID\n\n keyid: String,\n\n\n\n /// Allow to generate the identity with a specific subresource ID.\n\n subid: Option<u32>,\n\n}\n\n\n\n#[derive(Parser)]\n\n#[clap(\n\n group(\n\n ArgGroup::new(\"hsm\")\n\n .multiple(true)\n\n .args(&[\"module\", \"slot\", \"keyid\"])\n\n .requires_all(&[\"module\", \"slot\", \"keyid\"])\n\n )\n\n)]\n", "file_path": "src/many-cli/src/main.rs", "rank": 27, "score": 87933.8325623625 }, { "content": "#[derive(Parser)]\n\nstruct GetTokenIdOpt {\n\n /// The server to call. It MUST implement the ledger attribute (2).\n\n server: url::Url,\n\n\n\n /// The token to get. If not listed in the list of tokens, this will\n\n /// error.\n\n symbol: String,\n\n}\n\n\n", "file_path": "src/many-cli/src/main.rs", "rank": 28, "score": 85942.52799453837 }, { "content": "#[async_trait]\n\npub trait LowLevelManyRequestHandler: Send + Sync + Debug {\n\n async fn execute(&self, envelope: CoseSign1) -> Result<CoseSign1, String>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct HandlerExecutorAdapter<H: ManyRequestHandler + Debug> {\n\n handler: H,\n\n identity: CoseKeyIdentity,\n\n}\n\n\n\nimpl<H: ManyRequestHandler + Debug> HandlerExecutorAdapter<H> {\n\n pub fn new(handler: H, identity: CoseKeyIdentity) -> Self {\n\n Self { handler, identity }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<H: ManyRequestHandler + Debug> LowLevelManyRequestHandler for HandlerExecutorAdapter<H> {\n\n async fn execute(&self, envelope: CoseSign1) -> Result<CoseSign1, String> {\n\n let request = crate::message::decode_request_from_cose_sign1(envelope)\n", "file_path": "src/many/src/transport.rs", "rank": 29, "score": 85395.63353246794 }, { "content": "#[many_module(name = AbciModule, id = 1000, namespace = abci, many_crate = crate)]\n\npub trait ManyAbciModuleBackend: std::fmt::Debug + Send + Sync {\n\n /// Called when the ABCI frontend is initialized. No action should be taken here, only\n\n /// information should be returned. If the ABCI frontend is restarted, this method\n\n /// will be called again.\n\n fn init(&mut self) -> Result<AbciInit, ManyError>;\n\n\n\n /// Called at Genesis of the Tendermint blockchain.\n\n fn init_chain(&mut self) -> Result<(), ManyError>;\n\n\n\n /// Called at the start of a block.\n\n fn begin_block(&mut self, _info: AbciBlock) -> Result<(), ManyError> {\n\n Ok(())\n\n }\n\n\n\n /// Called when info is needed from the backend.\n\n fn info(&self) -> Result<AbciInfo, ManyError>;\n\n\n\n /// Called at the end of a block.\n\n fn end_block(&mut self) -> Result<(), ManyError> {\n\n Ok(())\n\n }\n\n\n\n /// Called after a block. The app should take this call and serialize its state.\n\n fn commit(&mut self) -> Result<AbciCommitInfo, ManyError>;\n\n}\n", "file_path": "src/many/src/server/module/_1000_abci_backend.rs", "rank": 30, "score": 75878.99496903182 }, { "content": "/// Build an EdDSA CoseKey\n\n///\n\n/// # Arguments\n\n///\n\n/// * `x` - Public key\n\n/// * `d` - Private key\n\npub fn eddsa_cose_key(x: Vec<u8>, d: Option<Vec<u8>>) -> CoseKey {\n\n let mut params: Vec<(Label, Value)> = Vec::from([\n\n (\n\n Label::Int(coset::iana::OkpKeyParameter::Crv as i64),\n\n Value::from(coset::iana::EllipticCurve::Ed25519 as u64),\n\n ),\n\n (\n\n Label::Int(coset::iana::OkpKeyParameter::X as i64),\n\n Value::Bytes(x),\n\n ),\n\n ]);\n\n let mut key_ops: BTreeSet<KeyOperation> =\n\n BTreeSet::from([KeyOperation::Assigned(coset::iana::KeyOperation::Verify)]);\n\n\n\n if let Some(d) = d {\n\n params.push((\n\n Label::Int(coset::iana::OkpKeyParameter::D as i64),\n\n Value::Bytes(d),\n\n ));\n\n key_ops.insert(KeyOperation::Assigned(coset::iana::KeyOperation::Sign));\n", "file_path": "src/many/src/cose_helpers.rs", "rank": 31, "score": 73251.30361194152 }, { "content": "impl<'b> Decode<'b> for Identity {\n\n fn decode(d: &mut Decoder<'b>) -> Result<Self, minicbor::decode::Error> {\n\n let mut is_tagged = false;\n\n // Check all the tags.\n\n while d.datatype()? == Type::Tag {\n\n if d.tag()? == minicbor::data::Tag::Unassigned(10000) {\n\n is_tagged = true;\n\n }\n\n }\n\n\n\n match d.datatype()? {\n\n Type::String => Self::from_str(d.str()?),\n\n _ => {\n\n if !is_tagged {\n\n return Err(minicbor::decode::Error::Message(\n\n \"identities need to be tagged\",\n\n ));\n\n }\n\n\n\n Self::try_from(d.bytes()?)\n", "file_path": "src/many/src/types/identity.rs", "rank": 32, "score": 68697.68569837 }, { "content": "impl Debug for Identity {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n f.debug_tuple(\"Identity\")\n\n .field(&if self.is_anonymous() {\n\n \"anonymous\".to_string()\n\n } else if self.is_public_key() {\n\n \"public-key\".to_string()\n\n } else if self.is_subresource() {\n\n format!(\"subresource({})\", self.subresource_id().unwrap_or_default())\n\n } else {\n\n \"??\".to_string()\n\n })\n\n .field(&self.to_string())\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Default for Identity {\n\n fn default() -> Self {\n\n Identity::anonymous()\n", "file_path": "src/many/src/types/identity.rs", "rank": 33, "score": 68691.57418838555 }, { "content": " pub fn from_bytes(bytes: &[u8]) -> Result<Self, ManyError> {\n\n let bytes = bytes;\n\n if bytes.is_empty() {\n\n return Err(ManyError::invalid_identity());\n\n }\n\n\n\n match bytes[0] {\n\n 0 => {\n\n if bytes.len() > 1 {\n\n Err(ManyError::invalid_identity())\n\n } else {\n\n Ok(Self::anonymous())\n\n }\n\n }\n\n 1 => {\n\n if bytes.len() != 29 {\n\n Err(ManyError::invalid_identity())\n\n } else {\n\n let mut slice = [0; 28];\n\n slice.copy_from_slice(&bytes[1..29]);\n", "file_path": "src/many/src/types/identity.rs", "rank": 34, "score": 68689.54348190772 }, { "content": "\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n InnerIdentity::from_str(s).map(Self)\n\n }\n\n}\n\n\n\nimpl AsRef<[u8; MAX_IDENTITY_BYTE_LEN]> for Identity {\n\n fn as_ref(&self) -> &[u8; MAX_IDENTITY_BYTE_LEN] {\n\n let result: &[u8; MAX_IDENTITY_BYTE_LEN] = unsafe { std::mem::transmute(self) };\n\n result\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Eq, Debug, Ord, PartialOrd)]\n\n#[non_exhaustive]\n\n#[must_use]\n", "file_path": "src/many/src/types/identity.rs", "rank": 35, "score": 68689.4712430321 }, { "content": "}\n\n\n\nimpl TryFrom<&[u8]> for Identity {\n\n type Error = ManyError;\n\n\n\n fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {\n\n Self::from_bytes(bytes)\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for Identity {\n\n type Error = ManyError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n InnerIdentity::try_from(value).map(Self)\n\n }\n\n}\n\n\n\nimpl FromStr for Identity {\n\n type Err = ManyError;\n", "file_path": "src/many/src/types/identity.rs", "rank": 36, "score": 68689.22831850429 }, { "content": " let pk = Sha3_224::digest(&public_key(key).unwrap().to_vec().unwrap());\n\n Self(InnerIdentity::public_key(pk.into()))\n\n }\n\n\n\n pub fn subresource(key: &CoseKey, subid: u32) -> Result<Self, ManyError> {\n\n if subid > MAX_SUBRESOURCE_ID {\n\n Err(ManyError::invalid_identity_subid())\n\n } else {\n\n let pk = Sha3_224::digest(&public_key(key).unwrap().to_vec().unwrap());\n\n Ok(Self(InnerIdentity::subresource_unchecked(pk.into(), subid)))\n\n }\n\n }\n\n\n\n pub const fn is_anonymous(&self) -> bool {\n\n self.0.is_anonymous()\n\n }\n\n pub const fn is_public_key(&self) -> bool {\n\n self.0.is_public_key()\n\n }\n\n pub const fn is_subresource(&self) -> bool {\n", "file_path": "src/many/src/types/identity.rs", "rank": 37, "score": 68688.97734129312 }, { "content": " }\n\n }\n\n .map_err(|_e| minicbor::decode::Error::Message(\"Could not decode identity from bytes\"))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Identity {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::de::Deserializer<'de>,\n\n {\n\n struct Visitor;\n\n impl<'de> serde::de::Visitor<'de> for Visitor {\n\n type Value = Identity;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"identity string or bytes\")\n\n }\n\n\n\n fn visit_borrowed_str<E>(self, v: &'de str) -> Result<Self::Value, E>\n", "file_path": "src/many/src/types/identity.rs", "rank": 38, "score": 68688.66254616289 }, { "content": " }\n\n}\n\n\n\nimpl std::fmt::Display for Identity {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(&self.0.to_string())\n\n }\n\n}\n\n\n\nimpl Encode for Identity {\n\n fn encode<W: Write>(\n\n &self,\n\n e: &mut Encoder<W>,\n\n ) -> Result<(), minicbor::encode::Error<W::Error>> {\n\n e.tag(minicbor::data::Tag::Unassigned(10000))?\n\n .bytes(&self.to_vec())?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/many/src/types/identity.rs", "rank": 39, "score": 68688.29538274408 }, { "content": " Ok(Self::public_key(slice))\n\n }\n\n }\n\n hi @ 0x80..=0xff => {\n\n if bytes.len() != 32 {\n\n Err(ManyError::invalid_identity())\n\n } else {\n\n let mut hash = [0; 28];\n\n let mut subid = [0; 4];\n\n hash.copy_from_slice(&bytes[1..29]);\n\n subid[0] = hi;\n\n subid[1..].copy_from_slice(&bytes[29..32]);\n\n Ok(Self::subresource_unchecked(hash, u32::from_be_bytes(subid)))\n\n }\n\n }\n\n x => Err(ManyError::invalid_identity_kind(x.to_string())),\n\n }\n\n }\n\n\n\n pub fn from_str(value: &str) -> Result<Self, ManyError> {\n", "file_path": "src/many/src/types/identity.rs", "rank": 40, "score": 68688.24522120327 }, { "content": " } else {\n\n false\n\n }\n\n }\n\n\n\n /// Check that another identity matches this one, ignoring any subresouce IDs.\n\n pub fn matches(&self, other: &Identity) -> bool {\n\n if self.is_anonymous() {\n\n other.is_anonymous()\n\n } else {\n\n // Extract public key hash of both.\n\n self.0.hash() == other.0.hash()\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"raw\")]\n\nimpl Identity {\n\n /// Create an identity from the raw value of a public key hash, without checking\n\n /// its validity.\n", "file_path": "src/many/src/types/identity.rs", "rank": 41, "score": 68687.7227528149 }, { "content": " self.0.is_subresource()\n\n }\n\n\n\n pub const fn subresource_id(&self) -> Option<u32> {\n\n self.0.subresource_id()\n\n }\n\n\n\n pub fn with_subresource_id(&self, subid: u32) -> Result<Self, ManyError> {\n\n if subid > MAX_SUBRESOURCE_ID {\n\n Err(ManyError::invalid_identity_subid())\n\n } else {\n\n Ok(self.with_subresource_id_unchecked(subid))\n\n }\n\n }\n\n\n\n pub const fn with_subresource_id_unchecked(&self, subid: u32) -> Self {\n\n if let Some(h) = self.0.hash() {\n\n Self(InnerIdentity::subresource_unchecked(h, subid))\n\n } else {\n\n Self::anonymous()\n", "file_path": "src/many/src/types/identity.rs", "rank": 42, "score": 68687.66907436724 }, { "content": "const MAX_IDENTITY_BYTE_LEN: usize = 32;\n\nconst SHA_OUTPUT_SIZE: usize = <Sha3_224 as Digest>::OutputSize::USIZE;\n\npub type PublicKeyHash = [u8; SHA_OUTPUT_SIZE];\n\n\n\n/// An identity in the TBD-Verse. This could be a server, network, user, DAO, automated\n\n/// process, etc.\n\n#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]\n\n#[must_use]\n\npub struct Identity(InnerIdentity);\n\n\n\nimpl Identity {\n\n pub fn from_bytes(bytes: &[u8]) -> Result<Self, ManyError> {\n\n InnerIdentity::try_from(bytes).map(Self)\n\n }\n\n\n\n pub const fn anonymous() -> Self {\n\n Self(InnerIdentity::anonymous())\n\n }\n\n\n\n pub fn public_key(key: &CoseKey) -> Self {\n", "file_path": "src/many/src/types/identity.rs", "rank": 43, "score": 68687.53084134626 }, { "content": " if !value.starts_with('m') {\n\n return Err(ManyError::invalid_identity_prefix(value[0..0].to_string()));\n\n }\n\n\n\n if &value[1..] == \"aa\" || &value[1..] == \"aaaa\" {\n\n Ok(Self::anonymous())\n\n } else {\n\n let data = &value[..value.len() - 2][1..];\n\n let data = base32::decode(base32::Alphabet::RFC4648 { padding: false }, data).unwrap();\n\n let result = Self::try_from(data.as_slice())?;\n\n\n\n if result.to_string() != value {\n\n Err(ManyError::invalid_identity())\n\n } else {\n\n Ok(result)\n\n }\n\n }\n\n }\n\n\n\n pub const fn to_byte_array(self) -> [u8; MAX_IDENTITY_BYTE_LEN] {\n", "file_path": "src/many/src/types/identity.rs", "rank": 44, "score": 68687.52369509538 }, { "content": " #[inline(always)]\n\n pub fn public_key_raw(hash: PublicKeyHash) -> Self {\n\n Self(InnerIdentity::public_key(hash))\n\n }\n\n\n\n /// Create an identity from the raw value of a public key hash and a subresource\n\n /// id. The hash isn't validated, but the subid is.\n\n #[inline(always)]\n\n pub fn subresource_raw(hash: PublicKeyHash, subid: u32) -> Self {\n\n Self(InnerIdentity::subresource_unchecked(hash, subid))\n\n }\n\n}\n\n\n\nimpl PartialEq<&str> for Identity {\n\n #[allow(clippy::cmp_owned)]\n\n fn eq(&self, other: &&str) -> bool {\n\n self.to_string() == *other\n\n }\n\n}\n\n\n", "file_path": "src/many/src/types/identity.rs", "rank": 45, "score": 68687.25571725871 }, { "content": " InnerIdentity::from_bytes(v).map_err(E::custom)\n\n }\n\n }\n\n\n\n impl<'de> serde::de::Deserialize<'de> for InnerIdentity {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::de::Deserializer<'de>,\n\n {\n\n if deserializer.is_human_readable() {\n\n deserializer.deserialize_string(HumanReadableInnerIdentityVisitor)\n\n } else {\n\n deserializer.deserialize_bytes(InnerIdentityVisitor)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n", "file_path": "src/many/src/types/identity.rs", "rank": 46, "score": 68687.24408848585 }, { "content": " impl<'de> serde::ser::Deserialize<'de> for Identity {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::ser::Deserializer<'de>,\n\n {\n\n let inner = InnerIdentity::deserialize(deserializer)?;\n\n Ok(Self(inner))\n\n }\n\n }\n\n\n\n struct HumanReadableInnerIdentityVisitor;\n\n\n\n impl serde::de::Visitor<'_> for HumanReadableInnerIdentityVisitor {\n\n type Value = InnerIdentity;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a textual MANY identity\")\n\n }\n\n\n\n fn visit_string<E>(self, v: String) -> Result<Self::Value, E>\n", "file_path": "src/many/src/types/identity.rs", "rank": 47, "score": 68687.17378531529 }, { "content": "\n\n#[cfg(feature = \"serde\")]\n\nmod serde {\n\n use crate::identity::{Identity, InnerIdentity};\n\n use serde::Deserialize;\n\n use std::fmt::Formatter;\n\n\n\n impl serde::ser::Serialize for Identity {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::ser::Serializer,\n\n {\n\n if serializer.is_human_readable() {\n\n serializer.serialize_str(&self.0.to_string())\n\n } else {\n\n serializer.serialize_bytes(&self.0.to_vec())\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/many/src/types/identity.rs", "rank": 48, "score": 68686.60795334367 }, { "content": " self.bytes.to_vec()\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n pub const fn is_anonymous(&self) -> bool {\n\n self.bytes[0] == 0\n\n }\n\n pub const fn is_public_key(&self) -> bool {\n\n self.bytes[0] == 1\n\n }\n\n pub const fn is_subresource(&self) -> bool {\n\n matches!(self.bytes[0], 0x80..=0xFF)\n\n }\n\n\n\n pub const fn subresource_id(&self) -> Option<u32> {\n\n match self.bytes[0] {\n\n x @ 0x80..=0xFF => {\n\n let high = ((x & 0x7F) as u32) << 24;\n", "file_path": "src/many/src/types/identity.rs", "rank": 49, "score": 68686.42971588059 }, { "content": " where\n\n E: serde::de::Error,\n\n {\n\n InnerIdentity::from_str(v.as_str()).map_err(E::custom)\n\n }\n\n }\n\n\n\n struct InnerIdentityVisitor;\n\n\n\n impl serde::de::Visitor<'_> for InnerIdentityVisitor {\n\n type Value = InnerIdentity;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a byte buffer\")\n\n }\n\n\n\n fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n", "file_path": "src/many/src/types/identity.rs", "rank": 50, "score": 68685.81899681085 }, { "content": " .to_ascii_lowercase(),\n\n )\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for InnerIdentity {\n\n type Error = ManyError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n InnerIdentity::from_str(value.as_str())\n\n }\n\n}\n\n\n\nimpl TryFrom<&[u8]> for InnerIdentity {\n\n type Error = ManyError;\n\n\n\n fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {\n\n Self::from_bytes(bytes)\n\n }\n\n}\n", "file_path": "src/many/src/types/identity.rs", "rank": 51, "score": 68685.79753965008 }, { "content": "// TODO: Implement a SubresourceId type. Best way here is to use num-* crates.\n\nuse crate::cose_helpers::public_key;\n\nuse crate::message::ManyError;\n\nuse coset::{CborSerializable, CoseKey};\n\nuse minicbor::data::Type;\n\nuse minicbor::encode::Write;\n\nuse minicbor::{Decode, Decoder, Encode, Encoder};\n\nuse serde::Deserialize;\n\nuse sha3::digest::generic_array::typenum::Unsigned;\n\nuse sha3::{Digest, Sha3_224};\n\nuse std::convert::TryFrom;\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::str::FromStr;\n\n\n\npub mod cose;\n\npub use cose::CoseKeyIdentity;\n\n\n\n/// Subresource IDs are 31 bit integers.\n\npub const MAX_SUBRESOURCE_ID: u32 = 0x7FFF_FFFF;\n\n\n", "file_path": "src/many/src/types/identity.rs", "rank": 52, "score": 68685.57468724319 }, { "content": " _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for InnerIdentity {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let data = self.to_vec();\n\n let mut crc = crc_any::CRCu16::crc16();\n\n crc.digest(&data);\n\n\n\n let crc = crc.get_crc().to_be_bytes();\n\n write!(\n\n f,\n\n \"m{}{}\",\n\n base32::encode(base32::Alphabet::RFC4648 { padding: false }, &data)\n\n .to_ascii_lowercase(),\n\n base32::encode(base32::Alphabet::RFC4648 { padding: false }, &crc)\n\n .get(0..2)\n\n .unwrap()\n", "file_path": "src/many/src/types/identity.rs", "rank": 53, "score": 68683.99410240649 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn matches_key() {\n\n let id = eddsa_identity();\n\n assert!(id.identity.matches_key(id.key.as_ref()));\n\n }\n\n}\n", "file_path": "src/many/src/types/identity.rs", "rank": 54, "score": 68683.96697455617 }, { "content": " // Anything else if by default inequal.\n\n (_, _) => false,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for InnerIdentity {\n\n fn default() -> Self {\n\n InnerIdentity::anonymous()\n\n }\n\n}\n\n\n\nimpl InnerIdentity {\n\n pub const fn anonymous() -> Self {\n\n Self {\n\n bytes: [0; MAX_IDENTITY_BYTE_LEN],\n\n }\n\n }\n\n\n\n pub const fn public_key(hash: [u8; SHA_OUTPUT_SIZE]) -> Self {\n", "file_path": "src/many/src/types/identity.rs", "rank": 55, "score": 68682.55332942304 }, { "content": " let mut bytes = [0; MAX_IDENTITY_BYTE_LEN];\n\n bytes[0] = 1;\n\n let mut len = SHA_OUTPUT_SIZE;\n\n while len > 0 {\n\n len -= 1;\n\n bytes[1 + len] = hash[len];\n\n }\n\n Self { bytes }\n\n }\n\n\n\n pub(crate) const fn subresource_unchecked(hash: [u8; SHA_OUTPUT_SIZE], id: u32) -> Self {\n\n // Get a public key and add the resource id.\n\n let mut bytes = Self::public_key(hash).bytes;\n\n bytes[0] = 0x80 + ((id & 0x7F00_0000) >> 24) as u8;\n\n bytes[(SHA_OUTPUT_SIZE + 1)] = ((id & 0x00FF_0000) >> 16) as u8;\n\n bytes[(SHA_OUTPUT_SIZE + 2)] = ((id & 0x0000_FF00) >> 8) as u8;\n\n bytes[(SHA_OUTPUT_SIZE + 3)] = (id & 0x0000_00FF) as u8;\n\n Self { bytes }\n\n }\n\n\n", "file_path": "src/many/src/types/identity.rs", "rank": 56, "score": 68682.39234055887 }, { "content": " }\n\n }\n\n\n\n pub const fn can_sign(&self) -> bool {\n\n self.is_public_key() || self.is_subresource()\n\n }\n\n\n\n pub const fn can_be_source(&self) -> bool {\n\n self.is_anonymous() || self.is_public_key() || self.is_subresource()\n\n }\n\n\n\n pub const fn can_be_dest(&self) -> bool {\n\n self.is_public_key() || self.is_subresource()\n\n }\n\n\n\n pub fn to_vec(self) -> Vec<u8> {\n\n self.0.to_vec()\n\n }\n\n\n\n pub fn to_byte_array(self) -> [u8; MAX_IDENTITY_BYTE_LEN] {\n", "file_path": "src/many/src/types/identity.rs", "rank": 57, "score": 68682.309927445 }, { "content": " use crate::types::identity::cose::tests::{ecdsa_256_identity, eddsa_identity};\n\n use crate::Identity;\n\n use std::str::FromStr;\n\n\n\n pub fn identity(seed: u32) -> Identity {\n\n #[rustfmt::skip]\n\n let bytes = [\n\n 1u8,\n\n 0, 0, 0, 0,\n\n 0, 0, 0, 0,\n\n 0, 0, 0, 0,\n\n 0, 0, 0, 0,\n\n 0, 0, 0, 0,\n\n 0, 0, 0, 0,\n\n (seed >> 24) as u8, (seed >> 16) as u8, (seed >> 8) as u8, (seed & 0xFF) as u8\n\n ];\n\n Identity::from_bytes(&bytes).unwrap()\n\n }\n\n\n\n #[test]\n", "file_path": "src/many/src/types/identity.rs", "rank": 58, "score": 68682.21008757297 }, { "content": " let mut low = (self.bytes[SHA_OUTPUT_SIZE + 1] as u32) << 16;\n\n low += (self.bytes[SHA_OUTPUT_SIZE + 2] as u32) << 8;\n\n low += self.bytes[SHA_OUTPUT_SIZE + 3] as u32;\n\n Some(high + low)\n\n }\n\n _ => None,\n\n }\n\n }\n\n\n\n pub const fn hash(&self) -> Option<[u8; SHA_OUTPUT_SIZE]> {\n\n match self.bytes[0] {\n\n 1 | 0x80..=0xFF => {\n\n let mut hash = [0; SHA_OUTPUT_SIZE];\n\n let mut len = SHA_OUTPUT_SIZE;\n\n while len > 0 {\n\n len -= 1;\n\n hash[len] = self.bytes[1 + len];\n\n }\n\n Some(hash)\n\n }\n", "file_path": "src/many/src/types/identity.rs", "rank": 59, "score": 68682.00384445349 }, { "content": " &hex::decode(\"80c8aead03f915f128f0fa7ff696c656eaa93db87bd9aa73df693acb22000002\")\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(a, b);\n\n assert_eq!(b.with_subresource_id(2).unwrap(), c);\n\n }\n\n\n\n proptest::proptest! {\n\n #[test]\n\n fn subresource_id_fuzzy(subid: u32) {\n\n let a = Identity::from_str(\"mahek5lid7ek7ckhq7j77nfwgk3vkspnyppm2u467ne5mwiqys\")\n\n .unwrap()\n\n .with_subresource_id(subid);\n\n\n\n if let Ok(id) = a {\n\n let b = Identity::from_str(&id.to_string());\n\n assert_eq!(a, b);\n\n } else {\n", "file_path": "src/many/src/types/identity.rs", "rank": 60, "score": 68681.673467639 }, { "content": " self.0.to_byte_array()\n\n }\n\n\n\n pub fn matches_key(&self, key: Option<&CoseKey>) -> bool {\n\n if self.is_anonymous() {\n\n key.is_none()\n\n } else if self.is_public_key() || self.is_subresource() {\n\n if let Some(cose_key) = key {\n\n let key_hash: PublicKeyHash =\n\n Sha3_224::digest(&public_key(cose_key).unwrap().to_vec().unwrap()).into();\n\n\n\n self.0\n\n .hash()\n\n .unwrap() // TODO: CAN THIS FAIL?\n\n .iter()\n\n .zip(key_hash.iter())\n\n .all(|(a, b)| a == b)\n\n } else {\n\n false\n\n }\n", "file_path": "src/many/src/types/identity.rs", "rank": 61, "score": 68681.28232703345 }, { "content": " where\n\n E: serde::de::Error,\n\n {\n\n Identity::from_str(v).map_err(E::custom)\n\n }\n\n\n\n fn visit_borrowed_bytes<E>(self, v: &'de [u8]) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Identity::from_bytes(v).map_err(E::custom)\n\n }\n\n }\n\n\n\n if deserializer.is_human_readable() {\n\n deserializer.deserialize_str(Visitor)\n\n } else {\n\n deserializer.deserialize_byte_buf(Visitor)\n\n }\n\n }\n", "file_path": "src/many/src/types/identity.rs", "rank": 62, "score": 68680.29174569948 }, { "content": " fn can_read_anonymous() {\n\n let a = Identity::anonymous();\n\n let a_str = a.to_string();\n\n let a2 = Identity::from_str(&a_str).unwrap();\n\n\n\n assert_eq!(a, a2);\n\n }\n\n\n\n #[test]\n\n fn byte_array_conversion() {\n\n let a = Identity::anonymous();\n\n let b = identity(1);\n\n let c = identity(2);\n\n\n\n assert_ne!(a.to_string(), b.to_string());\n\n assert_ne!(b.to_string(), c.to_string());\n\n assert_ne!(a.to_vec(), b.to_vec());\n\n assert_ne!(b.to_vec(), c.to_vec());\n\n\n\n assert_eq!(Identity::from_str(&a.to_string()), Ok(a));\n", "file_path": "src/many/src/types/identity.rs", "rank": 63, "score": 68679.69722217963 }, { "content": " assert_eq!(subid.leading_zeros(), 0);\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn from_pem_eddsa() {\n\n let id = eddsa_identity();\n\n assert_eq!(\n\n id.identity,\n\n \"maffbahksdwaqeenayy2gxke32hgb7aq4ao4wt745lsfs6wijp\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn from_pem_ecdsa() {\n\n let id = ecdsa_256_identity();\n\n assert_eq!(\n\n id.identity,\n\n \"magcncsncbfmfdvezjmfick47pwgefjnm6zcaghu7ffe3o3qtf\"\n", "file_path": "src/many/src/types/identity.rs", "rank": 64, "score": 68679.59180685422 }, { "content": " assert_eq!(Identity::from_str(&b.to_string()), Ok(b));\n\n assert_eq!(Identity::from_str(&c.to_string()), Ok(c));\n\n }\n\n\n\n #[test]\n\n fn textual_format_1() {\n\n let a = Identity::from_str(\"mahek5lid7ek7ckhq7j77nfwgk3vkspnyppm2u467ne5mwiqys\").unwrap();\n\n let b = Identity::from_bytes(\n\n &hex::decode(\"01c8aead03f915f128f0fa7ff696c656eaa93db87bd9aa73df693acb22\").unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(a, b);\n\n }\n\n\n\n #[test]\n\n fn textual_format_2() {\n\n let a =\n\n Identity::from_str(\"mqbfbahksdwaqeenayy2gxke32hgb7aq4ao4wt745lsfs6wiaaaaqnz\").unwrap();\n\n let b = Identity::from_bytes(\n", "file_path": "src/many/src/types/identity.rs", "rank": 65, "score": 68678.81309162361 }, { "content": " &hex::decode(\"804a101d521d810211a0c6346ba89bd1cc1f821c03b969ff9d5c8b2f59000001\")\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(a, b);\n\n }\n\n\n\n #[test]\n\n fn subresource_1() {\n\n let a = Identity::from_str(\"mahek5lid7ek7ckhq7j77nfwgk3vkspnyppm2u467ne5mwiqys\")\n\n .unwrap()\n\n .with_subresource_id(1)\n\n .unwrap();\n\n let b = Identity::from_bytes(\n\n &hex::decode(\"80c8aead03f915f128f0fa7ff696c656eaa93db87bd9aa73df693acb22000001\")\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n let c = Identity::from_bytes(\n", "file_path": "src/many/src/types/identity.rs", "rank": 66, "score": 68677.8613715825 }, { "content": " self.bytes\n\n }\n\n\n\n #[rustfmt::skip]\n\n pub fn to_vec(self) -> Vec<u8> {\n\n // This makes sure we actually have a Vec<u8> that's smaller than 32 bytes if\n\n // it can be.\n\n match self.bytes[0] {\n\n 0 => vec![0],\n\n 1 => {\n\n let pk = &self.bytes[1..=SHA_OUTPUT_SIZE];\n\n vec![\n\n 1,\n\n pk[ 0], pk[ 1], pk[ 2], pk[ 3], pk[ 4], pk[ 5], pk[ 6], pk[ 7],\n\n pk[ 8], pk[ 9], pk[10], pk[11], pk[12], pk[13], pk[14], pk[15],\n\n pk[16], pk[17], pk[18], pk[19], pk[20], pk[21], pk[22], pk[23],\n\n pk[24], pk[25], pk[26], pk[27],\n\n ]\n\n }\n\n 0x80..=0xFF => {\n", "file_path": "src/many/src/types/identity.rs", "rank": 67, "score": 68675.15999840794 }, { "content": "/// Build an ECDSA CoseKey\n\n///\n\n/// # Arguments\n\n///\n\n/// * `(x, y)` - Public key\n\n/// * `d` - Private key\n\npub fn ecdsa_cose_key((x, y): (Vec<u8>, Vec<u8>), d: Option<Vec<u8>>) -> CoseKey {\n\n let mut params: Vec<(Label, Value)> = Vec::from([\n\n (\n\n Label::Int(coset::iana::Ec2KeyParameter::Crv as i64),\n\n Value::from(coset::iana::EllipticCurve::P_256 as u64),\n\n ),\n\n (\n\n Label::Int(coset::iana::Ec2KeyParameter::X as i64),\n\n Value::Bytes(x),\n\n ),\n\n (\n\n Label::Int(coset::iana::Ec2KeyParameter::Y as i64),\n\n Value::Bytes(y),\n\n ),\n\n ]);\n\n let mut key_ops: BTreeSet<KeyOperation> =\n\n BTreeSet::from([KeyOperation::Assigned(coset::iana::KeyOperation::Verify)]);\n\n\n\n if let Some(d) = d {\n\n params.push((\n", "file_path": "src/many/src/cose_helpers.rs", "rank": 68, "score": 67582.54607014598 }, { "content": "\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n let identity: Identity = Identity::try_from(value).map_err(|e| e.to_string())?;\n\n if identity.is_anonymous() {\n\n Ok(Self {\n\n identity,\n\n key: None,\n\n hsm: false,\n\n })\n\n } else {\n\n Err(\"Identity must be anonymous\".to_string())\n\n }\n\n }\n\n}\n\n\n\nimpl AsRef<Identity> for CoseKeyIdentity {\n\n fn as_ref(&self) -> &Identity {\n\n &self.identity\n\n }\n\n}\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 69, "score": 66346.2340182087 }, { "content": " }\n\n }\n\n\n\n pub fn from_hsm(mechanism: HsmMechanismType) -> Result<Self, String> {\n\n let hsm = Hsm::get_instance().map_err(|e| e.to_string())?;\n\n let (raw_points, _) = hsm.ec_info(mechanism).map_err(|e| e.to_string())?;\n\n trace!(\"Creating NIST P-256 SEC1 encoded point\");\n\n let points = p256::EncodedPoint::from_bytes(raw_points).map_err(|e| e.to_string())?;\n\n\n\n let cose_key = ecdsa_cose_key(\n\n (points.x().unwrap().to_vec(), points.y().unwrap().to_vec()),\n\n None,\n\n );\n\n Self::from_key(cose_key, true)\n\n }\n\n\n\n pub fn from_pem(pem: &str) -> Result<Self, String> {\n\n let doc = pkcs8::PrivateKeyDocument::from_pem(pem).unwrap();\n\n let decoded = doc.decode();\n\n\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 70, "score": 66341.66341437487 }, { "content": "impl Signer<CoseKeyIdentitySignature> for CoseKeyIdentity {\n\n fn try_sign(&self, msg: &[u8]) -> Result<CoseKeyIdentitySignature, Error> {\n\n if let Some(cose_key) = self.key.as_ref() {\n\n match cose_key.alg {\n\n None => Err(Error::new()),\n\n Some(Algorithm::Assigned(coset::iana::Algorithm::ES256)) => {\n\n if self.hsm {\n\n let hsm = Hsm::get_instance().map_err(|e| {\n\n error!(\"HSM mutex poisoned {}\", e);\n\n Error::new()\n\n })?;\n\n\n\n // TODO: This operation should be done on the HSM, but cryptoki doesn't support it yet\n\n // See https://github.com/parallaxsecond/rust-cryptoki/issues/88\n\n trace!(\"Digesting message using SHA256 (CPU)\");\n\n let digest = sha2::Sha256::digest(msg);\n\n\n\n trace!(\"Singning message using HSM\");\n\n let msg_signature = hsm\n\n .sign(digest.as_slice(), &HsmMechanism::Ecdsa)\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 71, "score": 66341.63617786777 }, { "content": " identity: Identity::anonymous(),\n\n key: None,\n\n hsm: false,\n\n }\n\n }\n\n\n\n pub(crate) fn from_key(key: CoseKey, hsm: bool) -> Result<Self, String> {\n\n let identity = Identity::public_key(&key);\n\n if identity.is_anonymous() {\n\n Ok(Self {\n\n identity,\n\n key: None,\n\n hsm,\n\n })\n\n } else {\n\n Ok(Self {\n\n identity,\n\n key: Some(key),\n\n hsm,\n\n })\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 72, "score": 66341.59369659232 }, { "content": "impl Debug for CoseKeyIdentitySignature {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"CoseKeyIdentitySignature(0x{})\",\n\n hex::encode(&self.bytes)\n\n )\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for CoseKeyIdentitySignature {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.bytes\n\n }\n\n}\n\n\n\nimpl Signature for CoseKeyIdentitySignature {\n\n fn from_bytes(bytes: &[u8]) -> Result<Self, Error> {\n\n Ok(Self {\n\n bytes: bytes.to_vec(),\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 73, "score": 66340.57660731488 }, { "content": "\n\nimpl Verifier<CoseKeyIdentitySignature> for CoseKeyIdentity {\n\n fn verify(&self, msg: &[u8], signature: &CoseKeyIdentitySignature) -> Result<(), Error> {\n\n if let Some(cose_key) = self.key.as_ref() {\n\n match cose_key.alg {\n\n None => Err(Error::new()),\n\n Some(Algorithm::Assigned(coset::iana::Algorithm::ES256)) => {\n\n let params = BTreeMap::from_iter(cose_key.params.clone().into_iter());\n\n let x = params\n\n .get(&Label::Int(Ec2KeyParameter::X.to_i64()))\n\n .ok_or_else(Error::new)?\n\n .as_bytes()\n\n .ok_or_else(Error::new)?\n\n .as_slice();\n\n let y = params\n\n .get(&Label::Int(Ec2KeyParameter::Y.to_i64()))\n\n .ok_or_else(Error::new)?\n\n .as_bytes()\n\n .ok_or_else(Error::new)?\n\n .as_slice();\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 74, "score": 66338.17432414292 }, { "content": " pub fn ecdsa_256_identity() -> CoseKeyIdentity {\n\n let pem = \"-----BEGIN PRIVATE KEY-----\\n\\\n\n MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgNsLo2hVPeUZEOPCw\\n\\\n\n lLQbhLpwjUbt9BHXKCFMY0i+Wm6hRANCAATyM3MzaNX4ELK6bzqgNC/ODvGOUd60\\n\\\n\n 7A4yltVQLNKUxtTywYy2MIPV8ls1BlUp40zYmQfxCL3VANvZ62ofaMPv\\n\\\n\n -----END PRIVATE KEY-----\";\n\n\n\n CoseKeyIdentity::from_pem(pem).unwrap()\n\n }\n\n\n\n #[test]\n\n fn ecdsa_256_sign_verify() -> Result<(), Error> {\n\n let id = ecdsa_256_identity();\n\n\n\n let signature = id.sign(MSG);\n\n id.verify(MSG, &signature)\n\n }\n\n\n\n #[test]\n\n fn eddsa_256_sign_verify() -> Result<(), Error> {\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 75, "score": 66337.58415050808 }, { "content": " })\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct CoseKeyIdentity {\n\n pub identity: Identity,\n\n pub key: Option<CoseKey>,\n\n pub hsm: bool,\n\n}\n\n\n\nimpl Default for CoseKeyIdentity {\n\n fn default() -> Self {\n\n Self::anonymous()\n\n }\n\n}\n\n\n\nimpl CoseKeyIdentity {\n\n pub fn anonymous() -> Self {\n\n Self {\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 76, "score": 66337.24130048444 }, { "content": "use crate::cose_helpers::{ecdsa_cose_key, eddsa_cose_key, public_key};\n\nuse crate::hsm::{Hsm, HsmMechanism, HsmMechanismType};\n\nuse crate::Identity;\n\nuse coset::iana::{self, Ec2KeyParameter, EnumI64, OkpKeyParameter};\n\nuse coset::{Algorithm, CoseKey, KeyOperation, Label};\n\nuse ed25519_dalek::PublicKey;\n\nuse p256::pkcs8::FromPrivateKey;\n\nuse pkcs8::der::Document;\n\nuse sha2::Digest;\n\nuse signature::{Error, Signature, Signer, Verifier};\n\nuse std::collections::BTreeMap;\n\nuse std::convert::TryFrom;\n\nuse std::fmt::{Debug, Formatter};\n\nuse tracing::{error, trace};\n\n\n\n#[derive(Clone, Eq, PartialEq)]\n\npub struct CoseKeyIdentitySignature {\n\n bytes: Vec<u8>,\n\n}\n\n\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 77, "score": 66336.04599264597 }, { "content": " let pk = sk.public_key();\n\n let points: p256::EncodedPoint = pk.into();\n\n\n\n let cose_key = ecdsa_cose_key(\n\n (points.x().unwrap().to_vec(), points.y().unwrap().to_vec()),\n\n Some(sk.to_bytes().to_vec()),\n\n );\n\n Self::from_key(cose_key, false)\n\n } else {\n\n return Err(format!(\"Unknown algorithm OID: {}\", decoded.algorithm.oid));\n\n }\n\n }\n\n\n\n pub fn public_key(&self) -> Option<CoseKey> {\n\n public_key(self.key.as_ref()?).ok()\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for CoseKeyIdentity {\n\n type Error = String;\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 78, "score": 66334.48417265741 }, { "content": " .map_err(|e| {\n\n error!(\"Unable to sign message using HSM: {}\", e);\n\n Error::new()\n\n })?;\n\n trace!(\"Message signature is {}\", hex::encode(&msg_signature));\n\n\n\n trace!(\"Converting message signature to P256 structure\");\n\n let signature = p256::ecdsa::Signature::try_from(msg_signature.as_slice())\n\n .expect(\"Can't create P256 signature from message signature\");\n\n\n\n CoseKeyIdentitySignature::from_bytes(signature.as_ref())\n\n } else {\n\n if !cose_key\n\n .key_ops\n\n .contains(&KeyOperation::Assigned(iana::KeyOperation::Sign))\n\n {\n\n return Err(Error::new());\n\n }\n\n\n\n let params = BTreeMap::from_iter(cose_key.params.clone().into_iter());\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 79, "score": 66333.8338287133 }, { "content": " }\n\n // TODO: Raise a \"Algorithm not supported\" error\n\n _ => Err(Error::new()),\n\n }\n\n } else {\n\n Err(Error::new())\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use ed25519_dalek::Keypair;\n\n use rand_07::rngs::OsRng;\n\n\n\n use super::*;\n\n\n\n // MSG == FOOBAR\n\n const MSG: &[u8] = &[70, 79, 79, 66, 65, 82];\n\n\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 80, "score": 66332.5169320297 }, { "content": " pub fn generate_random_eddsa_identity() -> CoseKeyIdentity {\n\n let mut csprng = OsRng {};\n\n let keypair: Keypair = Keypair::generate(&mut csprng);\n\n\n\n let cose_key = eddsa_cose_key(\n\n keypair.public.to_bytes().to_vec(),\n\n Some(keypair.secret.to_bytes().to_vec()),\n\n );\n\n\n\n CoseKeyIdentity::from_key(cose_key, false).unwrap()\n\n }\n\n\n\n pub fn eddsa_identity() -> CoseKeyIdentity {\n\n let pem = \"-----BEGIN PRIVATE KEY-----\\n\\\n\n MC4CAQAwBQYDK2VwBCIEIHcoTY2RYa48O8ONAgfxEw+15MIyqSat0/QpwA1YxiPD\\n\\\n\n -----END PRIVATE KEY-----\";\n\n\n\n CoseKeyIdentity::from_pem(pem).unwrap()\n\n }\n\n\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 81, "score": 66331.47971432393 }, { "content": " .map_err(|e| {\n\n error!(\"Public key does not deserialize: {}\", e);\n\n Error::new()\n\n })?;\n\n public_key\n\n .verify_strict(msg, &ed25519::Signature::from_bytes(&signature.bytes)?)\n\n .map_err(|e| {\n\n error!(\"Verification failed (ed25519): {}\", e);\n\n Error::new()\n\n })\n\n }\n\n // TODO: Raise a \"Algorithm not supported\" error\n\n _ => Err(Error::new()),\n\n }\n\n } else {\n\n Err(Error::new())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 82, "score": 66329.64100206865 }, { "content": " let d = params\n\n .get(&Label::Int(Ec2KeyParameter::D.to_i64()))\n\n .ok_or_else(Error::new)?\n\n .as_bytes()\n\n .ok_or_else(Error::new)?\n\n .as_slice();\n\n\n\n let secret_key =\n\n p256::SecretKey::from_bytes(d).map_err(|_| Error::new())?;\n\n let signing_key: p256::ecdsa::SigningKey = secret_key.into();\n\n\n\n let signature: p256::ecdsa::Signature = signing_key.sign(msg);\n\n CoseKeyIdentitySignature::from_bytes(signature.as_ref())\n\n }\n\n }\n\n Some(Algorithm::Assigned(coset::iana::Algorithm::EdDSA)) => {\n\n if !cose_key\n\n .key_ops\n\n .contains(&KeyOperation::Assigned(iana::KeyOperation::Sign))\n\n {\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 83, "score": 66329.59185977333 }, { "content": " let id = eddsa_identity();\n\n\n\n let signature = id.sign(MSG);\n\n id.verify(MSG, &signature)\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn fail_ecdsa_512() {\n\n let pem = \"-----BEGIN PRIVATE KEY-----\\n\\\n\n MIHuAgEAMBAGByqGSM49AgEGBSuBBAAjBIHWMIHTAgEBBEIB2zGGfgHhqK9J8Eug\\n\\\n\n Sb5pnwnRA3OZ5Ks4eXEJJOeqeZu+8vYZbNuK9IY78JcmAI+syc3at1eVPtcAtTUr\\n\\\n\n qSTAkIehgYkDgYYABABVfJDnPyVOY0N1shZaB5kBPM6JcEb3BZRT8MR4qBp0zXwM\\n\\\n\n pyh7pdD9wxqsCYQVxl9FbiJSQZXzZTwmXsmTzO8X5AAS52WLB+7Ch+ddQW5UEqj6\\n\\\n\n Tptw8tbMJhJlD4IH7SDevF+gNetMicMQ1fIFyfCbaK0xxVoLwKJvtp7MIV46IZMC\\n\\\n\n aA==\\n\\\n\n -----END PRIVATE KEY-----\";\n\n CoseKeyIdentity::from_pem(pem).unwrap();\n\n }\n\n\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 84, "score": 66329.04005879565 }, { "content": " return Err(Error::new());\n\n }\n\n let params = BTreeMap::from_iter(cose_key.params.clone().into_iter());\n\n let x = params\n\n .get(&Label::Int(OkpKeyParameter::X.to_i64()))\n\n .ok_or_else(Error::new)?\n\n .as_bytes()\n\n .ok_or_else(Error::new)?\n\n .as_slice();\n\n let d = params\n\n .get(&Label::Int(OkpKeyParameter::D.to_i64()))\n\n .ok_or_else(Error::new)?\n\n .as_bytes()\n\n .ok_or_else(Error::new)?\n\n .as_slice();\n\n\n\n let kp = ed25519_dalek::Keypair::from_bytes(&vec![d, x].concat())\n\n .map_err(Error::from_source)?;\n\n let s = kp.sign(msg);\n\n CoseKeyIdentitySignature::from_bytes(&s.to_bytes())\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 85, "score": 66328.5842185084 }, { "content": " let points =\n\n p256::EncodedPoint::from_affine_coordinates(x.into(), y.into(), false);\n\n\n\n let verify_key = p256::ecdsa::VerifyingKey::from_encoded_point(&points)?;\n\n let signature = p256::ecdsa::Signature::from_der(&signature.bytes)\n\n .or_else(|_| p256::ecdsa::Signature::from_bytes(&signature.bytes))?;\n\n verify_key.verify(msg, &signature).map_err(|e| {\n\n error!(\"Key verify failed: {}\", e);\n\n Error::new()\n\n })\n\n }\n\n Some(Algorithm::Assigned(coset::iana::Algorithm::EdDSA)) => {\n\n let params = BTreeMap::from_iter(cose_key.params.clone().into_iter());\n\n let x = params\n\n .get(&Label::Int(OkpKeyParameter::X.to_i64()))\n\n .ok_or_else(Error::new)?;\n\n\n\n let public_key = ed25519_dalek::PublicKey::from_bytes(\n\n x.as_bytes().ok_or_else(Error::new)?.as_slice(),\n\n )\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 86, "score": 66327.55001516017 }, { "content": " if decoded.algorithm.oid == pkcs8::ObjectIdentifier::new(\"1.3.101.112\") {\n\n // Ed25519\n\n // Remove the 0420 header that's in all private keys in pkcs8 for some reason.\n\n let sk = ed25519_dalek::SecretKey::from_bytes(&decoded.private_key[2..])\n\n .map_err(|e| e.to_string())?;\n\n let pk: PublicKey = (&sk).into();\n\n let keypair: ed25519_dalek::Keypair = ed25519_dalek::Keypair {\n\n secret: sk,\n\n public: pk,\n\n };\n\n let keypair = ed25519_dalek::Keypair::from_bytes(&keypair.to_bytes()).unwrap();\n\n\n\n let cose_key = eddsa_cose_key(\n\n keypair.public.to_bytes().to_vec(),\n\n Some(keypair.secret.to_bytes().to_vec()),\n\n );\n\n Self::from_key(cose_key, false)\n\n } else if decoded.algorithm.oid == pkcs8::ObjectIdentifier::new(\"1.2.840.10045.2.1\") {\n\n // ECDSA\n\n let sk = p256::SecretKey::from_pkcs8_pem(pem).unwrap();\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 87, "score": 66325.09206610975 }, { "content": " #[test]\n\n #[should_panic]\n\n fn fail_ecdsa_384() {\n\n let pem = \"-----BEGIN PRIVATE KEY-----\\n\\\n\n MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDAo/RAjCOzB1SklJw3K\\n\\\n\n ASQqyjtuVQv7hruJgoy7EotHqD7kFS8c9dyOuoaNyx0V9HChZANiAAQil9Mt9nV4\\n\\\n\n LDxECgIOQvJJd3UcP1d2rTcBY8XMQDl51gLCvCp9c3v1tz9I/hRCEQcH/d96mNHn\\n\\\n\n SigsOU15Tt1NMHHgrucDBMeDrMZ+uUIDdZbfpvvh0gCtvmvvH5FLs/Y=\\n\\\n\n -----END PRIVATE KEY-----\";\n\n CoseKeyIdentity::from_pem(pem).unwrap();\n\n }\n\n}\n", "file_path": "src/many/src/types/identity/cose.rs", "rank": 88, "score": 66322.73155333204 }, { "content": "#[derive(Deserialize)]\n\n#[cfg_attr(test, derive(Serialize))]\n\nstruct ClientData {\n\n challenge: String,\n\n origin: String,\n\n r#type: String,\n\n}\n\n\n\nimpl CoseSign1RequestMessage {\n\n pub fn get_keyset(&self) -> Option<CoseKeySet> {\n\n let keyset = self\n\n .sign1\n\n .protected\n\n .header\n\n .rest\n\n .iter()\n\n .find(|(k, _)| k == &Label::Text(\"keyset\".to_string()))?\n\n .1\n\n .clone();\n\n\n\n if let Value::Bytes(ref bytes) = keyset {\n\n CoseKeySet::from_slice(bytes).ok()\n", "file_path": "src/many/src/message.rs", "rank": 89, "score": 57995.30674147673 }, { "content": "#[derive(Parser)]\n\nstruct Opts {\n\n /// Increase output logging verbosity to DEBUG level.\n\n #[clap(short, long, parse(from_occurrences))]\n\n verbose: i8,\n\n\n\n /// Suppress all output logging. Can be used multiple times to suppress more.\n\n #[clap(short, long, parse(from_occurrences))]\n\n quiet: i8,\n\n\n\n #[clap(subcommand)]\n\n subcommand: SubCommand,\n\n}\n\n\n", "file_path": "src/many-cli/src/main.rs", "rank": 90, "score": 57995.30674147673 }, { "content": "fn message(\n\n s: Url,\n\n to: Identity,\n\n key: CoseKeyIdentity,\n\n method: String,\n\n data: Vec<u8>,\n\n r#async: bool,\n\n) -> Result<(), anyhow::Error> {\n\n let client = ManyClient::new(s, to, key).unwrap();\n\n let response = client.call_raw(method, &data)?;\n\n\n\n show_response(response, client, r#async)\n\n}\n\n\n", "file_path": "src/many-cli/src/main.rs", "rank": 91, "score": 57609.10323207027 }, { "content": "fn main() {\n\n let Opts {\n\n verbose,\n\n quiet,\n\n subcommand,\n\n } = Opts::parse();\n\n let verbose_level = 2 + verbose - quiet;\n\n let log_level = match verbose_level {\n\n x if x > 3 => LevelFilter::TRACE,\n\n 3 => LevelFilter::DEBUG,\n\n 2 => LevelFilter::INFO,\n\n 1 => LevelFilter::WARN,\n\n 0 => LevelFilter::ERROR,\n\n x if x < 0 => LevelFilter::OFF,\n\n _ => unreachable!(),\n\n };\n\n tracing_subscriber::fmt().with_max_level(log_level).init();\n\n\n\n match subcommand {\n\n SubCommand::Id(o) => {\n", "file_path": "src/many-cli/src/main.rs", "rank": 92, "score": 57609.10323207027 }, { "content": "type TokenAmountStorage = num_bigint::BigUint;\n\n\n\n#[repr(transparent)]\n\n#[derive(Debug, Default, Hash, Clone, Ord, PartialOrd, Eq, PartialEq)]\n\npub struct TokenAmount(TokenAmountStorage);\n\n\n\nimpl TokenAmount {\n\n pub fn zero() -> Self {\n\n Self(0u8.into())\n\n }\n\n\n\n pub fn is_zero(&self) -> bool {\n\n self.0 == 0u8.into()\n\n }\n\n\n\n pub fn to_vec(&self) -> Vec<u8> {\n\n self.0.to_bytes_be()\n\n }\n\n}\n\n\n", "file_path": "src/many/src/types/ledger.rs", "rank": 93, "score": 57271.43560008494 }, { "content": "#[derive(Parser)]\n\nstruct ServerOpt {\n\n /// The location of a PEM file for the identity of this server.\n\n #[clap(long)]\n\n pem: PathBuf,\n\n\n\n /// The address and port to bind to for the MANY Http server.\n\n #[clap(long, short, default_value = \"127.0.0.1:8000\")]\n\n addr: SocketAddr,\n\n}\n\n\n", "file_path": "src/many-cli/src/main.rs", "rank": 94, "score": 56862.574609182644 }, { "content": "struct MessageOpt {\n\n /// A pem file to sign the message. If this is omitted, the message will be anonymous.\n\n #[clap(long)]\n\n pem: Option<PathBuf>,\n\n\n\n /// Timestamp.\n\n #[clap(long)]\n\n timestamp: Option<String>,\n\n\n\n /// If true, prints out the hex value of the message bytes.\n\n #[clap(long, conflicts_with(\"base64\"))]\n\n hex: bool,\n\n\n\n /// If true, prints out the base64 value of the message bytes.\n\n #[clap(long, conflicts_with(\"hex\"))]\n\n base64: bool,\n\n\n\n /// Show the async token and exit right away. By default, will poll for the\n\n /// result of the async operation.\n\n #[clap(long)]\n", "file_path": "src/many-cli/src/main.rs", "rank": 95, "score": 56862.574609182644 }, { "content": "fn show_response(\n\n response: ResponseMessage,\n\n client: ManyClient,\n\n r#async: bool,\n\n) -> Result<(), anyhow::Error> {\n\n let ResponseMessage {\n\n data, attributes, ..\n\n } = response;\n\n\n\n let payload = data?;\n\n if payload.is_empty() {\n\n let attr = attributes.get::<AsyncAttribute>().unwrap();\n\n info!(\"Async token: {}\", hex::encode(&attr.token));\n\n\n\n // Allow eprint/ln for showing the progress bar, when we're interactive.\n\n #[allow(clippy::print_stderr)]\n\n fn progress(str: &str, done: bool) {\n\n if atty::is(atty::Stream::Stderr) {\n\n if done {\n\n eprintln!(\"{}\", str);\n", "file_path": "src/many-cli/src/main.rs", "rank": 96, "score": 56477.58713860362 }, { "content": "fn encode_cose_sign1_from_payload(\n\n payload: Vec<u8>,\n\n cose_key: &CoseKeyIdentity,\n\n) -> Result<CoseSign1, String> {\n\n let mut protected = HeaderBuilder::new()\n\n .algorithm(Algorithm::EdDSA)\n\n .key_id(cose_key.identity.to_vec());\n\n\n\n // Add the keyset to the headers.\n\n if let Some(key) = cose_key.key.as_ref() {\n\n let mut keyset = CoseKeySet::default();\n\n let mut key_public = public_key(key)?;\n\n key_public.key_id = cose_key.identity.to_vec();\n\n keyset.0.push(key_public);\n\n\n\n protected = protected.text_value(\n\n \"keyset\".to_string(),\n\n Value::Bytes(keyset.to_vec().map_err(|e| e.to_string())?),\n\n );\n\n }\n", "file_path": "src/many/src/message.rs", "rank": 98, "score": 55418.53101435985 }, { "content": "trait ManyServerFallback: LowLevelManyRequestHandler + base::BaseModuleBackend {}\n\n\n\nimpl<M: LowLevelManyRequestHandler + base::BaseModuleBackend + 'static> ManyServerFallback for M {}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ManyModuleList {}\n\n\n\npub const MANYSERVER_DEFAULT_TIMEOUT: u64 = 300;\n\n\n\nthread_local! {\n\n /// List of allowed URLs to communicate with the server\n\n /// WebAuthn-only\n\n pub static ALLOWED_URLS: OnceCell<Option<Vec<ManyUrl>>> = OnceCell::new();\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct ManyServer {\n\n modules: Vec<Arc<dyn ManyModule + Send>>,\n\n method_cache: BTreeSet<String>,\n\n identity: CoseKeyIdentity,\n", "file_path": "src/many/src/server.rs", "rank": 99, "score": 42292.42923405533 } ]
Rust
src/types/record.rs
jmackie/fit
8103bfd7435e992d88165a1dd8c55947ab73d7c7
use bits::Bits; use byteorder::{ BigEndian, ByteOrder, LittleEndian, ReadBytesExt, }; use error::{ Error, Result, }; use profile; use std::{ collections::HashMap, convert::TryFrom, }; pub struct Record { pub header: Header, pub content: Message, } impl Record { pub(crate) fn decode<R: ReadBytesExt>( r: &mut R, local_mesgs: &HashMap<u8, Definition>, ) -> Result<Self> { let header = Header::decode(r).map_err(Error::decoding("header"))?; let content = match header { Header::Definition { .. } => { Message::Definition( Definition::decode(r) .map_err(Error::decoding("definition message"))?, ) }, Header::Data { local_mesg_num, } => { let definition = local_mesgs .get(&local_mesg_num) .ok_or(Error::missing_definition(local_mesg_num))?; match definition.arch { Architecture::LittleEndian => { Message::Data( Data::decode::<R, LittleEndian>(r, definition) .map_err(Error::decoding("data message"))?, ) }, Architecture::BigEndian => { Message::Data( Data::decode::<R, BigEndian>(r, definition) .map_err(Error::decoding("data message"))?, ) }, } }, Header::CompressedTimestamp { .. } => Message::CompressedTimestamp, }; Ok(Record { header, content, }) } } pub enum Header { Definition { local_mesg_num: u8, }, Data { local_mesg_num: u8, }, CompressedTimestamp { local_mesg_num: u8, time_offset: u8, }, } impl Header { fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { let byte = r.read_u8().map_err(Error::reading("byte"))?; if byte.bit_not_set(7) { if byte.bit_is_set(6) { Ok(Header::Definition { local_mesg_num: byte.bit_range(0, 3) }) } else { Ok(Header::Data { local_mesg_num: byte.bit_range(0, 3) }) } } else { Ok(Header::CompressedTimestamp { local_mesg_num: byte.bit_range(5, 6), time_offset: byte.bit_range(0, 4), }) } } pub fn local_mesg_num(&self) -> u8 { match self { Header::Definition { local_mesg_num, } => *local_mesg_num, Header::Data { local_mesg_num, } => *local_mesg_num, Header::CompressedTimestamp { local_mesg_num, .. } => *local_mesg_num, } } } pub enum Message { Definition(Definition), Data(Data), CompressedTimestamp, } #[derive(Debug, Clone)] pub struct Definition { arch: Architecture, global_mesg_num: u16, nfields: u8, field_defs: Vec<FieldDefinition>, } impl Definition { pub(super) fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { r.read_u8().map_err(Error::reading("reserved byte"))?; let arch = r .read_u8() .map_err(Error::reading("architecture byte")) .and_then(Architecture::try_from)?; let global_mesg_num = match arch { Architecture::LittleEndian => { r.read_u16::<LittleEndian>() .map_err(Error::reading("global message number"))? }, Architecture::BigEndian => { r.read_u16::<BigEndian>() .map_err(Error::reading("global message number"))? }, }; let nfields = r.read_u8().map_err(Error::reading("number of fields"))?; let mut field_defs = Vec::with_capacity(nfields as usize); for i in 0..nfields { let field_def = FieldDefinition::decode(r) .map_err(Error::reading(format!("field definition #{}", i)))?; field_defs.push(field_def); } Ok(Definition { arch, global_mesg_num, nfields, field_defs, }) } } #[derive(Debug, Clone)] pub struct FieldDefinition { num: u8, size: u8, _base_type_num: u8, } impl FieldDefinition { pub(super) fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { let num = r.read_u8().map_err(Error::reading("number"))?; let size = r.read_u8().map_err(Error::reading("size"))?; let _base_type_num = r.read_u8().map_err(Error::reading("base type"))?; Ok(FieldDefinition { num, size, _base_type_num, }) } } pub struct Data(pub Vec<profile::messages::Message>); impl Data { pub(super) fn decode<R: ReadBytesExt, T: ByteOrder>( r: &mut R, definition: &Definition, ) -> Result<Self> { let mut mesgs = Vec::with_capacity(definition.field_defs.len()); for field_def in definition.field_defs.iter() { let mut buffer = vec![0; field_def.size as usize]; r.read(&mut buffer).map_err(Error::reading("buffer"))?; let mesg = profile::messages::Message::decode::<T>( &buffer, definition.global_mesg_num, field_def.num, )?; mesgs.push(mesg); } Ok(Data(mesgs)) } } #[derive(Debug, Clone)] enum Architecture { LittleEndian = 0, BigEndian = 1, } impl TryFrom<u8> for Architecture { type Error = Error; fn try_from(n: u8) -> Result<Architecture> { match n { 0 => Ok(Architecture::LittleEndian), 1 => Ok(Architecture::BigEndian), _ => Err(Error::unknown_architecture(n)), } } }
use bits::Bits; use byteorder::{ BigEndian, ByteOrder, LittleEndian, ReadBytesExt, }; use error::{ Error, Result, }; use profile; use std::{ collections::HashMap, convert::TryFrom, }; pub struct Record { pub header: Header, pub content: Message, } impl Record { pub(crate) fn decode<R: ReadBytesExt>( r: &mut R, local_mesgs: &HashMap<u8, Definition>, ) -> Result<Self> { let header = Header::decode(r).map_err(Error::decoding("header"))?; let content = match header { Header::Definition { .. } => { Message::Definition( Definition::decode(r) .map_err(Error::decoding("definition message"))?, ) }, Header::Data { local_mesg_num, } => { let definition = local_mesgs .get(&local_mesg_num) .ok_or(Error::missing_definition(local_mesg_num))?; match definition.arch { Architecture::LittleEndian => { Message::Data( Data::decode::<R, LittleEndian>(r, definition) .map_err(Error::decoding("data message"))?, ) }, Architecture::BigEndian => { Message::Data( Data::decode::<R, BigEndian>(r, definition) .map_err(Error::decoding("data message"))?, ) }, } }, Header::CompressedTimestamp { .. } => Message::CompressedTimestamp, }; Ok(Record { header, content, }) } } pub enum Header { Definition { local_mesg_num: u8, }, Data { local_mesg_num: u8, }, CompressedTimestamp { local_mesg_num: u8, time_offset: u8, }, } impl Header { fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { let byte = r.read_u8().map_err(Error::reading("byte"))?; if byte.bit_not_set(7) { if byte.bit_is_set(6) { Ok(Header::Definition { local_mesg_num: byte.bit_range(0, 3) }) } else { Ok(Header::Data { local_mesg_num: byte.bit_range(0, 3) }) } } else { Ok(Header::CompressedTimestamp { local_mesg_num: byte.bit_range(5, 6), time_offset: byte.bit_range(0, 4), }) } } pub fn local_mesg_num(&self) -> u8 { match self { Header::Definition { local_mesg_num, } => *local_mesg_num, Header::Data { local_mesg_num, } => *local_mesg_num, Header::CompressedTimestamp { local_mesg_num, .. } => *local_mesg_num, } } } pub enum Message { Definition(Definition), Data(Data), CompressedTimestamp, } #[derive(Debug, Clone)] pub struct Definition { arch: Architecture, global_mesg_num: u16, nfields: u8, field_defs: Vec<FieldDefinition>, } impl Definition { pub(super) fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { r.read_u8().map_err(Error::reading("reserved byte"))?; let arch = r .read_u8() .map_err(Error::reading("architecture byte")) .and_then(Architecture::try_from)?; let global_mesg_num = match arch { Architecture::LittleEndian => { r.read_u16::<LittleEndian>() .map_err(Error::reading("global message number"))? },
} #[derive(Debug, Clone)] pub struct FieldDefinition { num: u8, size: u8, _base_type_num: u8, } impl FieldDefinition { pub(super) fn decode<R: ReadBytesExt>(r: &mut R) -> Result<Self> { let num = r.read_u8().map_err(Error::reading("number"))?; let size = r.read_u8().map_err(Error::reading("size"))?; let _base_type_num = r.read_u8().map_err(Error::reading("base type"))?; Ok(FieldDefinition { num, size, _base_type_num, }) } } pub struct Data(pub Vec<profile::messages::Message>); impl Data { pub(super) fn decode<R: ReadBytesExt, T: ByteOrder>( r: &mut R, definition: &Definition, ) -> Result<Self> { let mut mesgs = Vec::with_capacity(definition.field_defs.len()); for field_def in definition.field_defs.iter() { let mut buffer = vec![0; field_def.size as usize]; r.read(&mut buffer).map_err(Error::reading("buffer"))?; let mesg = profile::messages::Message::decode::<T>( &buffer, definition.global_mesg_num, field_def.num, )?; mesgs.push(mesg); } Ok(Data(mesgs)) } } #[derive(Debug, Clone)] enum Architecture { LittleEndian = 0, BigEndian = 1, } impl TryFrom<u8> for Architecture { type Error = Error; fn try_from(n: u8) -> Result<Architecture> { match n { 0 => Ok(Architecture::LittleEndian), 1 => Ok(Architecture::BigEndian), _ => Err(Error::unknown_architecture(n)), } } }
Architecture::BigEndian => { r.read_u16::<BigEndian>() .map_err(Error::reading("global message number"))? }, }; let nfields = r.read_u8().map_err(Error::reading("number of fields"))?; let mut field_defs = Vec::with_capacity(nfields as usize); for i in 0..nfields { let field_def = FieldDefinition::decode(r) .map_err(Error::reading(format!("field definition #{}", i)))?; field_defs.push(field_def); } Ok(Definition { arch, global_mesg_num, nfields, field_defs, }) }
function_block-function_prefix_line
[ { "content": "/// Attempt to open the types worksheet.\n\npub fn open_sheet<R>(workbook: &mut R) -> Result<Sheet>\n\nwhere\n\n R: calamine::Reader,\n\n{\n\n workbook\n\n .worksheet_range(WORKSHEET_NAME)\n\n .ok_or(Error::missing_sheet(WORKSHEET_NAME))?\n\n .map_err(Error::bad_sheet(WORKSHEET_NAME))\n\n .map(Sheet)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Message {\n\n name: String,\n\n fields: Vec<Field>,\n\n comment: Option<String>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Field {\n\n name: String,\n\n def_num: u8,\n\n type_: String, // Either<Type, BaseType>\n\n scale: Option<f64>,\n\n offset: Option<f64>,\n\n units: Option<String>,\n\n refs: Option<Vec<(String, String)>>,\n\n comment: Option<String>,\n\n}\n\n\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 0, "score": 132946.27655824134 }, { "content": "/// Attempt to open the types worksheet.\n\npub fn open_sheet<R>(workbook: &mut R) -> Result<Sheet>\n\nwhere\n\n R: calamine::Reader,\n\n{\n\n workbook\n\n .worksheet_range(WORKSHEET_NAME)\n\n .ok_or(Error::missing_sheet(WORKSHEET_NAME))?\n\n .map_err(Error::bad_sheet(WORKSHEET_NAME))\n\n .map(Sheet)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Type {\n\n pub name: String,\n\n pub base_type: String,\n\n pub values: Vec<Value>,\n\n pub comment: Option<String>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Value {\n\n pub name: String,\n\n pub value: u64,\n\n pub comment: Option<String>,\n\n}\n\n\n", "file_path": "profile-gen/src/worksheet/types.rs", "rank": 1, "score": 117071.758954996 }, { "content": "fn generate_message_decode_impl(\n\n numbered_messages: &[(u64, Message)],\n\n) -> TokenStream {\n\n let match_arms = numbered_messages.iter().map(|(n, mesg)| {\n\n let num = Literal::u64_unsuffixed(*n);\n\n let name = Ident::new(&mesg.name, Span::call_site());\n\n quote! { #num => #name::decode::<T>(buffer, field_def_num).map(Message::#name) }\n\n });\n\n\n\n quote! {\n\n impl Message {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n mesg_num: u16,\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match mesg_num {\n\n #(#match_arms,)*\n\n _ => Ok(Message::Unknown {\n\n data: buffer.to_vec(),\n\n mesg_num,\n\n field_def_num\n\n }),\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 2, "score": 116094.1526646478 }, { "content": "fn generate_message_inner_decode_impl(message: &Message) -> TokenStream {\n\n let message_name = Ident::new(&message.name, Span::call_site());\n\n\n\n let match_arms = message.fields.iter().filter_map(|field| {\n\n let field_name = Ident::new(&field.name, Span::call_site());\n\n let field_def_num = Literal::u8_unsuffixed(field.def_num);\n\n\n\n let value = {\n\n let namespace = if KNOWN_BASE_TYPES.contains(&field.type_) {\n\n Ident::new(\"base\", Span::call_site())\n\n }\n\n else {\n\n Ident::new(\"types\", Span::call_site())\n\n };\n\n\n\n let member = match KNOWN_BASE_TYPES\n\n .get(&field.type_)\n\n .map(String::as_str)\n\n {\n\n // Handle this type belonging to the base types.\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 3, "score": 102224.0942011683 }, { "content": "fn current_position<R: Seek>(r: &mut R) -> Result<u64> {\n\n r.seek(SeekFrom::Current(0)).map_err(Error::seek)\n\n}\n", "file_path": "src/types/file.rs", "rank": 4, "score": 99304.96426200637 }, { "content": "fn generate_message_enum(messages: &[Message]) -> TokenStream {\n\n let variants = messages.iter().map(|mesg| {\n\n let name = Ident::new(&mesg.name, Span::call_site());\n\n quote! { #name(#name) }\n\n });\n\n\n\n quote! {\n\n /// All the FIT message types.\n\n #[derive(Debug)]\n\n pub enum Message {\n\n #(#variants,)*\n\n Unknown {\n\n data: Vec<u8>,\n\n mesg_num: u16,\n\n field_def_num: u8,\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 6, "score": 90295.4170379075 }, { "content": "pub fn generate_module(\n\n sdk_version: &str,\n\n messages: &[Message],\n\n mesg_nums: HashMap<String, u64>,\n\n) -> Result<TokenStream> {\n\n // Module header:\n\n let mut tokens = quote! {\n\n #![doc=\"Generated for FIT SDK profile version: \"]\n\n #![doc=#sdk_version]\n\n\n\n use byteorder::ByteOrder;\n\n use error;\n\n use profile;\n\n use types;\n\n\n\n /// The actual data of a `Message`.\n\n #[derive(Debug)]\n\n pub struct Field<T> {\n\n value: T,\n\n scale: Option<f64>,\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 7, "score": 89098.31840367198 }, { "content": "fn generate_type_enum_decode_impl(ty: &Type) -> TokenStream {\n\n let type_name = Ident::new(&ty.name, Span::call_site());\n\n\n\n let base_type = match KNOWN_BASE_TYPES\n\n .get(&ty.base_type)\n\n .map(String::as_str)\n\n {\n\n // Do we have a valid base type?\n\n Some(\"string\") => Ident::new(\"Utf8String\", Span::call_site()),\n\n Some(\"bytes\") => Ident::new(\"Byte\", Span::call_site()),\n\n Some(ty) => Ident::new(&util::uppercase_first(ty), Span::call_site()),\n\n None => panic!(\"unknown base type: {}\", ty.base_type),\n\n };\n\n\n\n let match_arms = ty.values.iter().map(|val| {\n\n let name = Ident::new(&val.name, Span::call_site());\n\n let value = Literal::u64_unsuffixed(val.value);\n\n quote! { #value => Ok(#type_name::#name) }\n\n });\n\n\n", "file_path": "profile-gen/src/worksheet/types.rs", "rank": 8, "score": 84200.38529468566 }, { "content": "/// Extract `Message`s from the \"Messages\" worksheet.\n\npub fn extract(sheet: &Sheet) -> Vec<Message> {\n\n sheet\n\n .rows()\n\n // Ignore the header.\n\n .skip(1)\n\n // Extract the interesting rows.\n\n .filter_map(|raw| {\n\n let row = Row::from(raw);\n\n match row {\n\n Row::Header {\n\n ..\n\n } => Some(row),\n\n Row::Field {\n\n ..\n\n } => Some(row),\n\n _ => None,\n\n }\n\n })\n\n .peekable()\n\n .batching(|iter| {\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 9, "score": 83864.59721556447 }, { "content": "fn try_main(opt: Options) -> Result<(), failure::Error> {\n\n // Open the Profile.xlsx file passed in by the caller\n\n let mut workbook = open_workbook(opt.profile_xlsx)?;\n\n\n\n // Try and generate the module token streams\n\n let (types_tokens, mesgs_tokens) =\n\n generate_modules(&mut workbook, &opt.fit_sdk_version)?;\n\n\n\n // Create and write the types module\n\n let types_module_path = opt.output_dir.join(opt.types_module_path);\n\n File::create(&types_module_path)?\n\n .write_all(&types_tokens.to_string().into_bytes())?;\n\n println!(\"✓ {}\", types_module_path.to_string_lossy());\n\n\n\n // Create and write the messages module\n\n let mesgs_module_path = opt.output_dir.join(opt.mesgs_module_path);\n\n File::create(&mesgs_module_path)?\n\n .write_all(&mesgs_tokens.to_string().into_bytes())?;\n\n println!(\"✓ {}\", mesgs_module_path.to_string_lossy());\n\n\n\n // All done, nothing to return\n\n Ok(())\n\n}\n\n\n", "file_path": "profile-gen/src/main.rs", "rank": 10, "score": 77541.86226094163 }, { "content": "fn generate_message_inner(message: &Message) -> TokenStream {\n\n let name = Ident::new(&message.name, Span::call_site());\n\n let comment = match message.comment {\n\n Some(ref comment) => quote! { #[doc=#comment] },\n\n None => TokenStream::new(),\n\n };\n\n\n\n let variants = message.fields.iter().filter_map(|field| {\n\n let comment = match field.comment {\n\n Some(ref comment) => quote! { #[doc=#comment] },\n\n None => TokenStream::new(),\n\n };\n\n let variant = Ident::new(&field.name, Span::call_site());\n\n let namespace = if KNOWN_BASE_TYPES.contains(&field.type_) {\n\n Ident::new(\"base\", Span::call_site())\n\n }\n\n else {\n\n Ident::new(\"types\", Span::call_site())\n\n };\n\n let member =\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 11, "score": 70713.76841194725 }, { "content": "/// Return a prettily formatted error, including its entire\n\n/// causal chain.\n\nfn pretty_error(err: &failure::Error) -> String {\n\n let mut pretty = err.to_string();\n\n let mut prev = err.as_fail();\n\n while let Some(next) = prev.cause() {\n\n pretty.push_str(\": \");\n\n pretty.push_str(&next.to_string());\n\n prev = next;\n\n }\n\n pretty\n\n}\n", "file_path": "profile-gen/src/main.rs", "rank": 12, "score": 70231.07442242162 }, { "content": "#[derive(Debug)]\n\nenum Row<'a> {\n\n GroupBanner {\n\n name: String,\n\n },\n\n Header {\n\n mesg_name: String,\n\n comment: Option<String>,\n\n },\n\n Field {\n\n def_num: u8,\n\n name: String,\n\n type_: String,\n\n scale: Option<f64>,\n\n offset: Option<f64>,\n\n units: Option<String>,\n\n ref_field_names: Option<Vec<String>>,\n\n ref_field_values: Option<Vec<String>>,\n\n comment: Option<String>,\n\n },\n\n Empty,\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 13, "score": 69723.36202419121 }, { "content": "pub fn generate_module(\n\n sdk_version: &str,\n\n types: &[Type],\n\n) -> Result<TokenStream> {\n\n let mut tokens = quote! {\n\n #![doc=\"Generated for FIT SDK profile version: \"]\n\n #![doc=#sdk_version]\n\n\n\n use byteorder::ByteOrder;\n\n use error;\n\n use profile;\n\n };\n\n\n\n for ty in types {\n\n tokens.extend(match ty.values.len() {\n\n 0 => generate_type_prim(&ty),\n\n // TODO: For now we just capture the comment associated with this\n\n // \"Min\" value, but it should really be represented in\n\n // the type.\n\n 1 if ty.values[0].name == \"Min\" => {\n", "file_path": "profile-gen/src/worksheet/types.rs", "rank": 14, "score": 67914.41297889328 }, { "content": "fn generate_type_prim_decode_impl(ty: &Type) -> TokenStream {\n\n let type_name = Ident::new(&ty.name, Span::call_site());\n\n let decode_body = match ty.base_type.as_str() {\n\n \"uint8\" => {\n\n quote! {\n\n Ok(#type_name(buffer[0]))\n\n }\n\n },\n\n \"uint32\" => {\n\n quote! {\n\n Ok(#type_name(T::read_u32(buffer)))\n\n }\n\n },\n\n _ => quote! { unimplemented!() },\n\n };\n\n\n\n quote! {\n\n impl #type_name {\n\n pub(crate) fn decode<T: ByteOrder>(buffer: &[u8]) -> error::Result<Self> {\n\n #decode_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "profile-gen/src/worksheet/types.rs", "rank": 15, "score": 66254.67308138532 }, { "content": "/// Return a prettily formatted error, including its entire\n\n/// causal chain.\n\nfn pretty_error(err: &failure::Error) -> String {\n\n let mut pretty = err.to_string();\n\n let mut prev = err.as_fail();\n\n while let Some(next) = prev.cause() {\n\n pretty.push_str(\": \");\n\n pretty.push_str(&next.to_string());\n\n prev = next;\n\n }\n\n pretty\n\n}\n", "file_path": "examples/avg_power.rs", "rank": 16, "score": 59895.23136136905 }, { "content": "/// Extract `Type`s from a worksheet.\n\npub fn extract(sheet: &Sheet) -> Vec<Type> {\n\n sheet\n\n .rows()\n\n // Ignore the header.\n\n .skip(1)\n\n // Extract the interesting rows.\n\n .filter_map(|raw| {\n\n let row = Row::from(raw);\n\n match row {\n\n Row::Header {\n\n ..\n\n } => Some(row),\n\n Row::Value {\n\n ..\n\n } => Some(row),\n\n _ => None,\n\n }\n\n })\n\n .peekable()\n\n .batching(|iter| {\n", "file_path": "profile-gen/src/worksheet/types.rs", "rank": 17, "score": 55757.175764766325 }, { "content": "/// Comments are converted to rust comments, so need to\n\n/// escape things that look like markdown.\n\nfn sanitize_comment(comment: &str) -> String {\n\n comment.replace(\"[\", \"\\\\[\").replace(\"]\", \"\\\\]\")\n\n}\n", "file_path": "profile-gen/src/worksheet/messages.rs", "rank": 18, "score": 55142.30028419853 }, { "content": "fn generate_type_enum(ty: &Type) -> TokenStream {\n\n let name = Ident::new(&ty.name, Span::call_site());\n\n let comment = match ty.comment {\n\n Some(ref comment) => quote! { #[doc=#comment] },\n\n None => TokenStream::new(),\n\n };\n\n\n\n // NOTE: ty.values.len() could be 0\n\n let variants = ty.values.iter().filter_map(|val| {\n\n // Not all Types have an Unknown value, so we insert one.\n\n if val.name == \"Unknown\" {\n\n return None\n\n };\n\n\n\n let name = Ident::new(&val.name, Span::call_site());\n\n let value = Literal::u64_unsuffixed(val.value);\n\n let comment = match val.comment {\n\n Some(ref comment) => quote! { #[doc=#comment] },\n\n None => TokenStream::new(),\n\n };\n", "file_path": "profile-gen/src/worksheet/types.rs", "rank": 19, "score": 52539.18441671816 }, { "content": "/// Command line options, all in a single `struct`.\n\nstruct Options {\n\n profile_xlsx: PathBuf,\n\n output_dir: PathBuf,\n\n types_module_path: PathBuf,\n\n mesgs_module_path: PathBuf,\n\n fit_sdk_version: String,\n\n}\n\n\n\n// TODO: should this be TryFrom?\n\nimpl<'a> From<clap::ArgMatches<'a>> for Options {\n\n fn from(matches: clap::ArgMatches<'a>) -> Self {\n\n Options {\n\n profile_xlsx: PathBuf::from(\n\n matches.value_of(\"profile_xlsx\").expect(\"required argument\"),\n\n ),\n\n output_dir: PathBuf::from(\n\n matches.value_of(\"output_dir\").expect(\"required argument\"),\n\n ),\n\n types_module_path: PathBuf::from(\n\n matches.value_of(\"types_module_path\").unwrap_or(\"types.rs\"),\n", "file_path": "profile-gen/src/main.rs", "rank": 20, "score": 50130.78320260208 }, { "content": "fn main() {\n\n // CLI is specified in a yaml file. We load the matches and\n\n // then use the `From<clap::ArgMatches>` impl to get a neat\n\n // struct of parsed options (see `Options` type below).\n\n let yaml = load_yaml!(\"cli.yaml\");\n\n let opt = clap::App::from_yaml(yaml).get_matches().into();\n\n if let Err(err) = try_main(opt) {\n\n // Print the error, including all of its underlying causes\n\n eprintln!(\"{}\", pretty_error(&err));\n\n\n\n // If we get a non-empty backtrace (e.g., RUST_BACKTRACE=1\n\n // is set), then show it.\n\n let backtrace = err.backtrace().to_string();\n\n if !backtrace.trim().is_empty() {\n\n eprintln!(\"{}\", backtrace);\n\n }\n\n process::exit(1);\n\n }\n\n // process::exit(0);\n\n}\n\n\n", "file_path": "profile-gen/src/main.rs", "rank": 21, "score": 50035.564963471894 }, { "content": " MessageNumber(Field<profile::base::Uint16>),\n\n #[doc = \"Index of external mesg\"]\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl MemoGlob {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 250 => {\n\n Ok(MemoGlob::PartIndex(Field {\n\n value: profile::base::Uint32::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n", "file_path": "src/profile/messages.rs", "rank": 22, "score": 49117.29328681568 }, { "content": " Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl UserProfile {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(UserProfile::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n", "file_path": "src/profile/messages.rs", "rank": 23, "score": 49117.23042173688 }, { "content": " PartNumber(Field<profile::base::Utf8String>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl Software {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(Software::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 3 => {\n\n Ok(Software::Version(Field {\n\n value: profile::base::Uint16::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 24, "score": 49116.9780181883 }, { "content": " Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl SegmentLap {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(SegmentLap::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 253 => {\n", "file_path": "src/profile/messages.rs", "rank": 25, "score": 49115.74305082066 }, { "content": " }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum VideoDescription {\n\n #[doc = \"Long descriptions will be split into multiple parts\"]\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n #[doc = \"Total number of description parts\"]\n\n MessageCount(Field<profile::base::Uint16>),\n\n Text(Field<profile::base::Utf8String>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl VideoDescription {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n", "file_path": "src/profile/messages.rs", "rank": 26, "score": 49115.514088711534 }, { "content": " })\n\n },\n\n }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum CadenceZone {\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n HighValue(Field<profile::base::Uint8>),\n\n Name(Field<profile::base::Utf8String>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl CadenceZone {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(CadenceZone::MessageIndex(Field {\n", "file_path": "src/profile/messages.rs", "rank": 27, "score": 49115.414643324984 }, { "content": " data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl VideoTitle {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(VideoTitle::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n\n Ok(VideoTitle::MessageCount(Field {\n", "file_path": "src/profile/messages.rs", "rank": 28, "score": 49115.017732568274 }, { "content": " HeartRateSource(Field<profile::base::Uint8>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl DiveSettings {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(DiveSettings::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n", "file_path": "src/profile/messages.rs", "rank": 29, "score": 49114.92074568475 }, { "content": "pub enum SpeedZone {\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n HighValue(Field<profile::base::Uint16>),\n\n Name(Field<profile::base::Utf8String>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl SpeedZone {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(SpeedZone::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n", "file_path": "src/profile/messages.rs", "rank": 30, "score": 49114.77599163918 }, { "content": "pub enum AntRx {\n\n Timestamp(Field<profile::types::DateTime>),\n\n FractionalTimestamp(Field<profile::base::Uint16>),\n\n MesgId(Field<profile::base::Bytes>),\n\n MesgData(Field<profile::base::Bytes>),\n\n ChannelNumber(Field<profile::base::Uint8>),\n\n Data(Field<profile::base::Bytes>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl AntRx {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(AntRx::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n", "file_path": "src/profile/messages.rs", "rank": 31, "score": 49114.68069976156 }, { "content": " #[doc = \"Number of the frame that the timestamp and timestamp_ms \\\n\n correlate to\"]\n\n FrameNumber(Field<profile::base::Uint32>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl VideoFrame {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(VideoFrame::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n", "file_path": "src/profile/messages.rs", "rank": 32, "score": 49114.24598560122 }, { "content": " field_def_num,\n\n })\n\n },\n\n }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum ExerciseTitle {\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n ExerciseCategory(Field<profile::types::ExerciseCategory>),\n\n ExerciseName(Field<profile::base::Uint16>),\n\n WktStepName(Field<profile::base::Utf8String>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl ExerciseTitle {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n", "file_path": "src/profile/messages.rs", "rank": 33, "score": 49113.98912127358 }, { "content": " }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum FileCapabilities {\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n Type(Field<profile::types::File>),\n\n Flags(Field<profile::types::FileFlags>),\n\n Directory(Field<profile::base::Utf8String>),\n\n MaxCount(Field<profile::base::Uint16>),\n\n MaxSize(Field<profile::base::Uint32>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl FileCapabilities {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n", "file_path": "src/profile/messages.rs", "rank": 34, "score": 49113.52345228783 }, { "content": "#[derive(Debug)]\n\npub enum FieldCapabilities {\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n File(Field<profile::types::File>),\n\n MesgNum(Field<profile::types::MesgNum>),\n\n FieldNum(Field<profile::base::Uint8>),\n\n Count(Field<profile::base::Uint16>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl FieldCapabilities {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(FieldCapabilities::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n", "file_path": "src/profile/messages.rs", "rank": 35, "score": 49113.4644702239 }, { "content": " HighBpm(Field<profile::base::Uint8>),\n\n Calories(Field<profile::base::Uint16>),\n\n FatCalories(Field<profile::base::Uint8>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl MetZone {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(MetZone::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n", "file_path": "src/profile/messages.rs", "rank": 36, "score": 49113.46132451892 }, { "content": " MesgNum(Field<profile::types::MesgNum>),\n\n CountType(Field<profile::types::MesgCount>),\n\n Count(Field<profile::base::Uint16>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl MesgCapabilities {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(MesgCapabilities::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n", "file_path": "src/profile/messages.rs", "rank": 37, "score": 49113.46132451892 }, { "content": "pub enum Capabilities {\n\n #[doc = \"Use language_bits_x types where x is index of array.\"]\n\n Languages(Field<profile::base::Uint8z>),\n\n #[doc = \"Use sport_bits_x types where x is index of array.\"]\n\n Sports(Field<profile::types::SportBits0>),\n\n WorkoutsSupported(Field<profile::types::WorkoutCapabilities>),\n\n ConnectivitySupported(Field<profile::types::ConnectivityCapabilities>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Capabilities {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(Capabilities::Languages(Field {\n", "file_path": "src/profile/messages.rs", "rank": 38, "score": 49113.40268993537 }, { "content": " HeartRateType(Field<profile::types::HrType>),\n\n Status(Field<profile::types::BpStatus>),\n\n #[doc = \"Associates this blood pressure message to a user. This \\\n\n corresponds to the index of the user profile message in the \\\n\n blood pressure file.\"]\n\n UserProfileIndex(Field<profile::types::MessageIndex>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl BloodPressure {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(BloodPressure::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 39, "score": 49112.86480168645 }, { "content": " ExerciseName(Field<profile::base::Uint16>),\n\n ExerciseWeight(Field<profile::base::Uint16>),\n\n WeightDisplayUnit(Field<profile::types::FitBaseUnit>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl WorkoutStep {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(WorkoutStep::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n", "file_path": "src/profile/messages.rs", "rank": 40, "score": 49112.82424315258 }, { "content": " MetabolicAge(Field<profile::base::Uint8>),\n\n VisceralFatRating(Field<profile::base::Uint8>),\n\n #[doc = \"Associates this weight scale message to a user. This \\\n\n corresponds to the index of the user profile message in the \\\n\n weight scale file.\"]\n\n UserProfileIndex(Field<profile::types::MessageIndex>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl WeightScale {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(WeightScale::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 41, "score": 49112.67250339281 }, { "content": " data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl MagnetometerData {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(MagnetometerData::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n\n }))\n\n },\n\n 0 => {\n\n Ok(MagnetometerData::TimestampMs(Field {\n", "file_path": "src/profile/messages.rs", "rank": 42, "score": 49112.52445219259 }, { "content": " data: buffer.to_vec(),\n\n field_def_num,\n\n })\n\n },\n\n }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum Sport {\n\n Sport(Field<profile::types::Sport>),\n\n SubSport(Field<profile::types::SubSport>),\n\n Name(Field<profile::base::Utf8String>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl Sport {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n", "file_path": "src/profile/messages.rs", "rank": 43, "score": 49112.49734093593 }, { "content": " field_def_num: u8,\n\n },\n\n}\n\nimpl Lap {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(Lap::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 253 => {\n\n Ok(Lap::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 44, "score": 49112.36252568138 }, { "content": "}\n\nimpl HrmProfile {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(HrmProfile::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n\n Ok(HrmProfile::Enabled(Field {\n\n value: profile::base::Bool::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n", "file_path": "src/profile/messages.rs", "rank": 45, "score": 49112.14286865477 }, { "content": "}\n\n#[doc = \"Heart rate variability\"]\n\n#[derive(Debug)]\n\npub enum Hrv {\n\n #[doc = \"Time between beats\"]\n\n Time(Field<profile::base::Uint16>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Hrv {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(Hrv::Time(Field {\n\n value: profile::base::Uint16::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 46, "score": 49112.08974224431 }, { "content": "}\n\nimpl Goal {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(Goal::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n\n Ok(Goal::Sport(Field {\n\n value: profile::types::Sport::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n", "file_path": "src/profile/messages.rs", "rank": 47, "score": 49112.01709702904 }, { "content": " Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Set {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(Set::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n", "file_path": "src/profile/messages.rs", "rank": 48, "score": 49111.95885420002 }, { "content": " Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl TimestampCorrelation {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(TimestampCorrelation::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n\n }))\n\n },\n\n 0 => {\n", "file_path": "src/profile/messages.rs", "rank": 49, "score": 49111.69944292734 }, { "content": " }\n\n}\n\n#[derive(Debug)]\n\npub enum WorkoutSession {\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n Sport(Field<profile::types::Sport>),\n\n SubSport(Field<profile::types::SubSport>),\n\n NumValidSteps(Field<profile::base::Uint16>),\n\n FirstStepIndex(Field<profile::base::Uint16>),\n\n PoolLength(Field<profile::base::Uint16>),\n\n PoolLengthUnit(Field<profile::types::DisplayMeasure>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl WorkoutSession {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n", "file_path": "src/profile/messages.rs", "rank": 50, "score": 49111.6852074208 }, { "content": " }\n\n}\n\n#[derive(Debug)]\n\npub enum Video {\n\n Url(Field<profile::base::Utf8String>),\n\n HostingProvider(Field<profile::base::Utf8String>),\n\n #[doc = \"Playback time of video\"]\n\n Duration(Field<profile::base::Uint32>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Video {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n", "file_path": "src/profile/messages.rs", "rank": 51, "score": 49111.58014628197 }, { "content": " Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl WeatherAlert {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(WeatherAlert::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n", "file_path": "src/profile/messages.rs", "rank": 52, "score": 49111.44360596972 }, { "content": " field_def_num: u8,\n\n },\n\n}\n\nimpl SegmentFile {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(SegmentFile::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n\n Ok(SegmentFile::FileUuid(Field {\n\n value: profile::base::Utf8String::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 53, "score": 49111.242203709575 }, { "content": " Calories(Field<profile::base::Uint32>),\n\n Sport(Field<profile::types::Sport>),\n\n #[doc = \"Includes pauses\"]\n\n ElapsedTime(Field<profile::base::Uint32>),\n\n Sessions(Field<profile::base::Uint16>),\n\n ActiveTime(Field<profile::base::Uint32>),\n\n SportIndex(Field<profile::base::Uint8>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Totals {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(Totals::MessageIndex(Field {\n", "file_path": "src/profile/messages.rs", "rank": 54, "score": 49111.21833444129 }, { "content": " Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl SegmentId {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(SegmentId::Name(Field {\n\n value: profile::base::Utf8String::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n", "file_path": "src/profile/messages.rs", "rank": 55, "score": 49111.19126775596 }, { "content": " TransmissionType(Field<profile::base::Uint8z>),\n\n DeviceIndex(Field<profile::types::DeviceIndex>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl AntChannelId {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(AntChannelId::ChannelNumber(Field {\n\n value: profile::base::Uint8::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n\n Ok(AntChannelId::DeviceType(Field {\n", "file_path": "src/profile/messages.rs", "rank": 56, "score": 49111.1329430948 }, { "content": " }\n\n}\n\n#[derive(Debug)]\n\npub enum SlaveDevice {\n\n Manufacturer(Field<profile::types::Manufacturer>),\n\n Product(Field<profile::base::Uint16>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl SlaveDevice {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(SlaveDevice::Manufacturer(Field {\n\n value: profile::types::Manufacturer::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n", "file_path": "src/profile/messages.rs", "rank": 57, "score": 49111.069062418304 }, { "content": " EndTimestampMs(Field<profile::base::Uint16>),\n\n #[doc = \"Start of clip in video time\"]\n\n ClipStart(Field<profile::base::Uint32>),\n\n #[doc = \"End of clip in video time\"]\n\n ClipEnd(Field<profile::base::Uint32>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl VideoClip {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(VideoClip::ClipNumber(Field {\n\n value: profile::base::Uint16::decode::<T>(buffer)?,\n\n scale: None,\n", "file_path": "src/profile/messages.rs", "rank": 58, "score": 49111.064749583886 }, { "content": " },\n\n }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum ExdDataFieldConfiguration {\n\n ScreenIndex(Field<profile::base::Uint8>),\n\n ConceptField(Field<profile::base::Bytes>),\n\n FieldId(Field<profile::base::Uint8>),\n\n ConceptCount(Field<profile::base::Uint8>),\n\n DisplayType(Field<profile::types::ExdDisplayType>),\n\n Title(Field<profile::base::Utf8String>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl ExdDataFieldConfiguration {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n", "file_path": "src/profile/messages.rs", "rank": 59, "score": 49110.85620463451 }, { "content": " SerialNumber(Field<profile::base::Uint32z>),\n\n #[doc = \"Only set for files that are can be created/erased.\"]\n\n TimeCreated(Field<profile::types::DateTime>),\n\n #[doc = \"Only set for files that are not created/erased.\"]\n\n Number(Field<profile::base::Uint16>),\n\n #[doc = \"Optional free form string to indicate the devices name or model\"]\n\n ProductName(Field<profile::base::Utf8String>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl FileId {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(FileId::Type(Field {\n", "file_path": "src/profile/messages.rs", "rank": 60, "score": 49110.62577619345 }, { "content": " PoolLength(Field<profile::base::Uint16>),\n\n PoolLengthUnit(Field<profile::types::DisplayMeasure>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Workout {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 4 => {\n\n Ok(Workout::Sport(Field {\n\n value: profile::types::Sport::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n", "file_path": "src/profile/messages.rs", "rank": 61, "score": 49110.592734888785 }, { "content": " EnhancedAltitude(Field<profile::base::Uint32>),\n\n EnhancedSpeed(Field<profile::base::Uint32>),\n\n Heading(Field<profile::base::Uint16>),\n\n #[doc = \"Used to correlate UTC to system time if the timestamp of the \\\n\n message is in system time. This UTC time is derived from the \\\n\n GPS data.\"]\n\n UtcTimestamp(Field<profile::types::DateTime>),\n\n #[doc = \"velocity\\\\[0\\\\] is lon velocity. Velocity\\\\[1\\\\] is lat \\\n\n velocity. Velocity\\\\[2\\\\] is altitude velocity.\"]\n\n Velocity(Field<profile::base::Sint16>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl GpsMetadata {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n", "file_path": "src/profile/messages.rs", "rank": 62, "score": 49110.50947812039 }, { "content": " AntChannelId(AntChannelId),\n\n AntRx(AntRx),\n\n AntTx(AntTx),\n\n ExdScreenConfiguration(ExdScreenConfiguration),\n\n ExdDataFieldConfiguration(ExdDataFieldConfiguration),\n\n ExdDataConceptConfiguration(ExdDataConceptConfiguration),\n\n FieldDescription(FieldDescription),\n\n DeveloperDataId(DeveloperDataId),\n\n DiveSummary(DiveSummary),\n\n Unknown { data: Vec<u8>, mesg_num: u16, field_def_num: u8 },\n\n}\n\nimpl Message {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n mesg_num: u16,\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match mesg_num {\n\n 0 => {\n\n FileId::decode::<T>(buffer, field_def_num).map(Message::FileId)\n", "file_path": "src/profile/messages.rs", "rank": 63, "score": 49110.502671087466 }, { "content": " }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum FileCreator {\n\n SoftwareVersion(Field<profile::base::Uint16>),\n\n HardwareVersion(Field<profile::base::Uint8>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl FileCreator {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(FileCreator::SoftwareVersion(Field {\n\n value: profile::base::Uint16::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n", "file_path": "src/profile/messages.rs", "rank": 64, "score": 49110.45020658312 }, { "content": "impl HrZone {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(HrZone::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n\n Ok(HrZone::HighBpm(Field {\n\n value: profile::base::Uint8::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"bpm\"),\n", "file_path": "src/profile/messages.rs", "rank": 65, "score": 49110.42877167037 }, { "content": " Depth(Field<profile::base::Uint32>),\n\n #[doc = \"0 if above water\"]\n\n NextStopDepth(Field<profile::base::Uint32>),\n\n NextStopTime(Field<profile::base::Uint32>),\n\n TimeToSurface(Field<profile::base::Uint32>),\n\n NdlTime(Field<profile::base::Uint32>),\n\n CnsLoad(Field<profile::base::Uint8>),\n\n N2Load(Field<profile::base::Uint16>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Record {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n", "file_path": "src/profile/messages.rs", "rank": 66, "score": 49110.37622513741 }, { "content": " })\n\n },\n\n }\n\n }\n\n}\n\n#[doc = \"Must be logged before field description\"]\n\n#[derive(Debug)]\n\npub enum DeveloperDataId {\n\n DeveloperId(Field<profile::base::Bytes>),\n\n ApplicationId(Field<profile::base::Bytes>),\n\n ManufacturerId(Field<profile::types::Manufacturer>),\n\n DeveloperDataIndex(Field<profile::base::Uint8>),\n\n ApplicationVersion(Field<profile::base::Uint32>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl DeveloperDataId {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n", "file_path": "src/profile/messages.rs", "rank": 67, "score": 49110.3603922306 }, { "content": "impl OhrSettings {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(OhrSettings::Enabled(Field {\n\n value: profile::types::Switch::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n _ => {\n\n Ok(OhrSettings::Unknown {\n\n data: buffer.to_vec(),\n\n field_def_num,\n\n })\n\n },\n", "file_path": "src/profile/messages.rs", "rank": 68, "score": 49110.28602891848 }, { "content": " data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl WeatherConditions {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(WeatherConditions::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n\n Ok(WeatherConditions::WeatherReport(Field {\n", "file_path": "src/profile/messages.rs", "rank": 69, "score": 49110.21546449462 }, { "content": " in baro_cal\"]\n\n SampleTimeOffset(Field<profile::base::Uint16>),\n\n #[doc = \"These are the raw ADC reading. The samples may span across \\\n\n seconds. A conversion will need to be done on this data once \\\n\n read.\"]\n\n BaroPres(Field<profile::base::Uint32>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl BarometerData {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(BarometerData::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 70, "score": 49110.138838699284 }, { "content": " CalibratedGyroX(Field<profile::base::Float32>),\n\n #[doc = \"Calibrated gyro reading\"]\n\n CalibratedGyroY(Field<profile::base::Float32>),\n\n #[doc = \"Calibrated gyro reading\"]\n\n CalibratedGyroZ(Field<profile::base::Float32>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl GyroscopeData {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(GyroscopeData::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n", "file_path": "src/profile/messages.rs", "rank": 71, "score": 49110.089233635714 }, { "content": " AvgSwimmingCadence(Field<profile::base::Uint8>),\n\n EventGroup(Field<profile::base::Uint8>),\n\n TotalCalories(Field<profile::base::Uint16>),\n\n LengthType(Field<profile::types::LengthType>),\n\n PlayerScore(Field<profile::base::Uint16>),\n\n OpponentScore(Field<profile::base::Uint16>),\n\n #[doc = \"stroke_type enum used as the index\"]\n\n StrokeCount(Field<profile::base::Uint16>),\n\n #[doc = \"zone number used as the index\"]\n\n ZoneCount(Field<profile::base::Uint16>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Length {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n", "file_path": "src/profile/messages.rs", "rank": 72, "score": 49110.086261550474 }, { "content": " Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl AntTx {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(AntTx::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n\n }))\n\n },\n\n 0 => {\n\n Ok(AntTx::FractionalTimestamp(Field {\n\n value: profile::base::Uint16::decode::<T>(buffer)?,\n\n scale: Some(32768.0),\n", "file_path": "src/profile/messages.rs", "rank": 73, "score": 49109.95955466659 }, { "content": " SdmAntId(Field<profile::base::Uint16z>),\n\n SdmCalFactor(Field<profile::base::Uint16>),\n\n Odometer(Field<profile::base::Uint32>),\n\n #[doc = \"Use footpod for speed source instead of GPS\"]\n\n SpeedSource(Field<profile::base::Bool>),\n\n SdmAntIdTransType(Field<profile::base::Uint8z>),\n\n #[doc = \"Rollover counter that can be used to extend the odometer\"]\n\n OdometerRollover(Field<profile::base::Uint8>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl SdmProfile {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n", "file_path": "src/profile/messages.rs", "rank": 74, "score": 49109.88986965195 }, { "content": " #[doc = \"Calibration factor divisor\"]\n\n CalibrationDivisor(Field<profile::base::Uint32>),\n\n #[doc = \"Level shift value used to shift the ADC value back into range\"]\n\n LevelShift(Field<profile::base::Uint32>),\n\n #[doc = \"Internal Calibration factor\"]\n\n OffsetCal(Field<profile::base::Sint32>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl OneDSensorCalibration {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(OneDSensorCalibration::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 75, "score": 49109.70994549209 }, { "content": " Descent(Field<profile::base::Uint32>),\n\n ModerateActivityMinutes(Field<profile::base::Uint16>),\n\n VigorousActivityMinutes(Field<profile::base::Uint16>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Monitoring {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(Monitoring::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n", "file_path": "src/profile/messages.rs", "rank": 76, "score": 49109.65708361564 }, { "content": "#![doc = \"Generated for FIT SDK profile version: \"]\n\n#![doc = \"20.66.00\"]\n\nuse byteorder::ByteOrder;\n\nuse error;\n\nuse profile;\n\nuse types;\n\n#[doc = r\" The actual data of a `Message`.\"]\n\n#[derive(Debug)]\n\npub struct Field<T> {\n\n value: T,\n\n scale: Option<f64>,\n\n offset: Option<f64>,\n\n pub units: Option<&'static str>,\n\n}\n\nimpl types::field::Field for Field<profile::base::Float64> {\n\n type Value = f64;\n\n\n\n fn value(&self) -> Self::Value {\n\n self.value.0 / self.scale.unwrap_or(1.0) - self.offset.unwrap_or(0.0)\n\n }\n", "file_path": "src/profile/messages.rs", "rank": 77, "score": 49109.52512326726 }, { "content": " SurfaceInterval(Field<profile::base::Uint32>),\n\n StartCns(Field<profile::base::Uint8>),\n\n EndCns(Field<profile::base::Uint8>),\n\n StartN2(Field<profile::base::Uint16>),\n\n EndN2(Field<profile::base::Uint16>),\n\n O2Toxicity(Field<profile::base::Uint16>),\n\n DiveNumber(Field<profile::base::Uint32>),\n\n BottomTime(Field<profile::base::Uint32>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl DiveSummary {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n", "file_path": "src/profile/messages.rs", "rank": 78, "score": 49109.5195598564 }, { "content": " FractionalTimestamp(Field<profile::base::Uint16>),\n\n Time256(Field<profile::base::Uint8>),\n\n FilteredBpm(Field<profile::base::Uint8>),\n\n EventTimestamp(Field<profile::base::Uint32>),\n\n EventTimestamp12(Field<profile::base::Bytes>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl Hr {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(Hr::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n", "file_path": "src/profile/messages.rs", "rank": 79, "score": 49109.14144684554 }, { "content": " AttitudeStageComplete(Field<profile::base::Uint8>),\n\n #[doc = \"Track Angle/Heading Range 0 - 2pi\"]\n\n Track(Field<profile::base::Uint16>),\n\n Validity(Field<profile::types::AttitudeValidity>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl AviationAttitude {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(AviationAttitude::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n", "file_path": "src/profile/messages.rs", "rank": 80, "score": 49108.86878707546 }, { "content": " SourceType(Field<profile::types::SourceType>),\n\n #[doc = \"Optional free form string to indicate the devices name or model\"]\n\n ProductName(Field<profile::base::Utf8String>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl DeviceInfo {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(DeviceInfo::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n", "file_path": "src/profile/messages.rs", "rank": 81, "score": 49108.72628760907 }, { "content": "pub enum ExdDataConceptConfiguration {\n\n ScreenIndex(Field<profile::base::Uint8>),\n\n ConceptField(Field<profile::base::Bytes>),\n\n FieldId(Field<profile::base::Uint8>),\n\n ConceptIndex(Field<profile::base::Uint8>),\n\n DataPage(Field<profile::base::Uint8>),\n\n ConceptKey(Field<profile::base::Uint8>),\n\n Scaling(Field<profile::base::Uint8>),\n\n DataUnits(Field<profile::types::ExdDataUnits>),\n\n Qualifier(Field<profile::types::ExdQualifiers>),\n\n Descriptor(Field<profile::types::ExdDescriptors>),\n\n IsSigned(Field<profile::base::Bool>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl ExdDataConceptConfiguration {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n", "file_path": "src/profile/messages.rs", "rank": 82, "score": 49108.668040034805 }, { "content": " pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(ObdiiData::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n\n }))\n\n },\n\n 0 => {\n\n Ok(ObdiiData::TimestampMs(Field {\n\n value: profile::base::Uint16::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"ms\"),\n\n }))\n", "file_path": "src/profile/messages.rs", "rank": 83, "score": 49108.285931166145 }, { "content": " },\n\n}\n\nimpl Schedule {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(Schedule::Manufacturer(Field {\n\n value: profile::types::Manufacturer::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n\n Ok(Schedule::Product(Field {\n\n value: profile::base::Uint16::decode::<T>(buffer)?,\n\n scale: None,\n", "file_path": "src/profile/messages.rs", "rank": 84, "score": 49108.21155289412 }, { "content": " ActivityType(Field<profile::types::ActivityType>),\n\n #[doc = \"Indexed by activity_type\"]\n\n CyclesToDistance(Field<profile::base::Uint16>),\n\n #[doc = \"Indexed by activity_type\"]\n\n CyclesToCalories(Field<profile::base::Uint16>),\n\n RestingMetabolicRate(Field<profile::base::Uint16>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl MonitoringInfo {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(MonitoringInfo::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n", "file_path": "src/profile/messages.rs", "rank": 85, "score": 49108.170533015946 }, { "content": " pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(CoursePoint::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n\n Ok(CoursePoint::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n", "file_path": "src/profile/messages.rs", "rank": 86, "score": 49108.14820206108 }, { "content": " _ => {\n\n Ok(DiveAlarm::Unknown {\n\n data: buffer.to_vec(),\n\n field_def_num,\n\n })\n\n },\n\n }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum DiveGas {\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n HeliumContent(Field<profile::base::Uint8>),\n\n OxygenContent(Field<profile::base::Uint8>),\n\n Status(Field<profile::types::DiveGasStatus>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl DiveGas {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n", "file_path": "src/profile/messages.rs", "rank": 87, "score": 49108.07303595318 }, { "content": "impl Event {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(Event::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n\n }))\n\n },\n\n 0 => {\n\n Ok(Event::Event(Field {\n\n value: profile::types::Event::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n", "file_path": "src/profile/messages.rs", "rank": 88, "score": 49107.552494712174 }, { "content": " pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(SegmentLeaderboardEntry::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n\n Ok(SegmentLeaderboardEntry::Name(Field {\n\n value: profile::base::Utf8String::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n", "file_path": "src/profile/messages.rs", "rank": 89, "score": 49107.54068523918 }, { "content": " CourseDownloadEnabled(Field<profile::base::Bool>),\n\n WorkoutDownloadEnabled(Field<profile::base::Bool>),\n\n GpsEphemerisDownloadEnabled(Field<profile::base::Bool>),\n\n IncidentDetectionEnabled(Field<profile::base::Bool>),\n\n GrouptrackEnabled(Field<profile::base::Bool>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Connectivity {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(Connectivity::BluetoothEnabled(Field {\n\n value: profile::base::Bool::decode::<T>(buffer)?,\n\n scale: None,\n", "file_path": "src/profile/messages.rs", "rank": 90, "score": 49107.47040351582 }, { "content": " buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(FieldDescription::DeveloperDataIndex(Field {\n\n value: profile::base::Uint8::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n\n Ok(FieldDescription::FieldDefinitionNumber(Field {\n\n value: profile::base::Uint8::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n", "file_path": "src/profile/messages.rs", "rank": 91, "score": 49107.37296813831 }, { "content": " }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum ZonesTarget {\n\n MaxHeartRate(Field<profile::base::Uint8>),\n\n ThresholdHeartRate(Field<profile::base::Uint8>),\n\n FunctionalThresholdPower(Field<profile::base::Uint16>),\n\n HrCalcType(Field<profile::types::HrZoneCalc>),\n\n PwrCalcType(Field<profile::types::PwrZoneCalc>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl ZonesTarget {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 1 => {\n\n Ok(ZonesTarget::MaxHeartRate(Field {\n", "file_path": "src/profile/messages.rs", "rank": 92, "score": 49107.2128569081 }, { "content": "impl Activity {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(Activity::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n\n Ok(Activity::TotalTimerTime(Field {\n\n value: profile::base::Uint32::decode::<T>(buffer)?,\n\n scale: Some(1000.0),\n\n offset: None,\n\n units: Some(\"s\"),\n", "file_path": "src/profile/messages.rs", "rank": 93, "score": 49107.1270296072 }, { "content": " #[doc = \"Accumulated distance along the segment at the described point\"]\n\n Distance(Field<profile::base::Uint32>),\n\n #[doc = \"Accumulated altitude along the segment at the described point\"]\n\n Altitude(Field<profile::base::Uint16>),\n\n #[doc = \"Accumualted time each leader board member required to reach the \\\n\n described point. This value is zero for all leader board members \\\n\n at the starting point of the segment.\"]\n\n LeaderTime(Field<profile::base::Uint32>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl SegmentPoint {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n", "file_path": "src/profile/messages.rs", "rank": 94, "score": 49106.18651281357 }, { "content": "}\n\nimpl StressLevel {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 0 => {\n\n Ok(StressLevel::StressLevelValue(Field {\n\n value: profile::base::Sint16::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 1 => {\n\n Ok(StressLevel::StressLevelTime(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n", "file_path": "src/profile/messages.rs", "rank": 95, "score": 49106.108960448124 }, { "content": " MaxLevMotorPower(Field<profile::base::Uint16>),\n\n #[doc = \"lev battery consumption during session\"]\n\n LevBatteryConsumption(Field<profile::base::Uint8>),\n\n AvgVerticalRatio(Field<profile::base::Uint16>),\n\n AvgStanceTimeBalance(Field<profile::base::Uint16>),\n\n AvgStepLength(Field<profile::base::Uint16>),\n\n TotalAnaerobicTrainingEffect(Field<profile::base::Uint8>),\n\n AvgVam(Field<profile::base::Uint16>),\n\n Unknown {\n\n data: Vec<u8>,\n\n field_def_num: u8,\n\n },\n\n}\n\nimpl Session {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n", "file_path": "src/profile/messages.rs", "rank": 96, "score": 49105.588638475005 }, { "content": " _ => {\n\n Ok(Connectivity::Unknown {\n\n data: buffer.to_vec(),\n\n field_def_num,\n\n })\n\n },\n\n }\n\n }\n\n}\n\n#[derive(Debug)]\n\npub enum WatchfaceSettings {\n\n MessageIndex(Field<profile::types::MessageIndex>),\n\n Mode(Field<profile::types::WatchfaceMode>),\n\n Layout(Field<profile::base::Bytes>),\n\n Unknown { data: Vec<u8>, field_def_num: u8 },\n\n}\n\nimpl WatchfaceSettings {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n", "file_path": "src/profile/messages.rs", "rank": 97, "score": 49105.581457655746 }, { "content": "impl ThreeDSensorCalibration {\n\n pub(crate) fn decode<T: ByteOrder>(\n\n buffer: &[u8],\n\n field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 253 => {\n\n Ok(ThreeDSensorCalibration::Timestamp(Field {\n\n value: profile::types::DateTime::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"s\"),\n\n }))\n\n },\n\n 0 => {\n\n Ok(ThreeDSensorCalibration::SensorType(Field {\n\n value: profile::types::SensorType::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n", "file_path": "src/profile/messages.rs", "rank": 98, "score": 49105.33822304469 }, { "content": " field_def_num: u8,\n\n ) -> error::Result<Self> {\n\n match field_def_num {\n\n 254 => {\n\n Ok(DiveGas::MessageIndex(Field {\n\n value: profile::types::MessageIndex::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: None,\n\n }))\n\n },\n\n 0 => {\n\n Ok(DiveGas::HeliumContent(Field {\n\n value: profile::base::Uint8::decode::<T>(buffer)?,\n\n scale: None,\n\n offset: None,\n\n units: Some(\"percent\"),\n\n }))\n\n },\n\n 1 => {\n", "file_path": "src/profile/messages.rs", "rank": 99, "score": 49104.920937781426 } ]
Rust
src/matcher.rs
SpectralOps/service-policy-kit
a1d1b8eab9981b21b87349c5232b4c893a723933
use crate::data::{Cause, HeaderList, Response, Violation}; use fancy_regex::Regex; use std::collections::HashMap; pub struct RegexMatcher { pub kind: String, } impl RegexMatcher { pub fn new(kind: &str) -> Self { Self { kind: kind.to_string(), } } fn match_field( &self, name: &str, wire_field: &Option<String>, recorded_field: &Option<String>, ) -> Option<Violation> { if let Some(recorded_value) = recorded_field { if wire_field.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: name.to_string(), on: Some(name.to_string()), wire: None, recorded: recorded_value.to_string(), }); } let match_re = Regex::new(recorded_value).unwrap(); if !match_re.is_match(wire_field.as_ref().unwrap()).unwrap() { return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: name.to_string(), on: Some(name.to_string()), wire: wire_field.clone(), recorded: recorded_value.to_string(), }); } } None } fn match_headers( &self, wire_headers: &Option<HashMap<String, HeaderList>>, recorded_headers: &Option<HashMap<String, HeaderList>>, ) -> Option<Violation> { if let Some(recorded_headers) = recorded_headers { if wire_headers.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: "headers".to_string(), on: Some("all headers".to_string()), wire: None, recorded: format!("{:?}", recorded_headers), }); } let wire_headers = wire_headers.as_ref().unwrap(); let matches_headers = recorded_headers.iter().find(|(k, vs)| { let k = k.to_lowercase(); if !wire_headers.contains_key(k.as_str()) { return true; } let wire_header_values = &wire_headers[k.as_str()]; !vs.iter().any(|v| { let v_re = Regex::new(v.as_str()).unwrap(); wire_header_values .iter() .any(|wv| v_re.is_match(wv).unwrap()) }) }); if let Some(matches_headers) = matches_headers { let (key, _) = matches_headers; let key = key.to_lowercase(); return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: "headers".to_string(), on: Some(key.to_string()), wire: Some(format!( "{:?}", wire_headers.get(key.as_str()).unwrap_or(&vec![]) )), recorded: format!("{:?}", matches_headers.1), }); } } None } fn match_vars( &self, wire_vars: &Option<HashMap<String, String>>, recorded_vars: &Option<HashMap<String, String>>, ) -> Option<Violation> { if let Some(recorded_vars) = recorded_vars { if wire_vars.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: "vars".to_string(), on: Some("all vars".to_string()), wire: None, recorded: format!("{:?}", recorded_vars), }); } let wire_vars = wire_vars.as_ref().unwrap(); let badly_matched_vars = recorded_vars.iter().find(|(k, v)| { let k = k.to_lowercase(); if !wire_vars.contains_key(k.as_str()) { return true; } let wire_var = &wire_vars[k.as_str()]; let v_re = Regex::new(v.as_str()).unwrap(); !v_re.is_match(wire_var).unwrap() }); if let Some(badly_matched_vars) = badly_matched_vars { let (key, _) = badly_matched_vars; let key = key.to_lowercase(); return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: "vars".to_string(), on: Some(key.to_string()), wire: Some(format!( "{:?}", wire_vars.get(key.as_str()).unwrap_or(&"".to_string()) )), recorded: format!("{:?}", badly_matched_vars.1), }); } } None } pub fn is_match( &self, wire_response: &Response, recorded_response: Option<&Response>, ) -> Vec<Violation> { if let Some(recorded_response) = recorded_response { vec![ self.match_field("body", &wire_response.body, &recorded_response.body), self.match_field( "status_code", &wire_response.status_code, &recorded_response.status_code, ), self.match_headers(&wire_response.headers, &recorded_response.headers), self.match_vars(&wire_response.vars, &recorded_response.vars), ] .into_iter() .flatten() .collect::<Vec<_>>() } else { vec![Violation { kind: self.kind.clone(), cause: Cause::RecordedMissing, subject: "response".to_string(), on: None, wire: None, recorded: format!("{:?}", wire_response), }] } } }
use crate::data::{Cause, HeaderList, Response, Violation}; use fancy_regex::Regex; use std::collections::HashMap; pub struct RegexMatcher { pub kind: String, } impl RegexMatcher { pub fn new(kind: &str) -> Self { Self { kind: kind.to_string(), } } fn match_field( &self, name: &str, wire_field: &Option<String>, recorded_field: &Option<String>, ) -> Option<Violation> { if let Some(recorded_value) = recorded_field { if wire_field.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: name.to_string(), on: Some(name.to_string()), wire: None, recorded: recorded_value.to_string(), }); } let match_re = Regex::new(recorded_value).unwrap(); if !match_re.is_match(wire_field.as_ref().unwrap()).unwrap() { return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: name.to_string(), on: Some(name.to_string()), wire: wire_field.clone(), recorded: recorded_value.to_string(), }); } } None }
fn match_vars( &self, wire_vars: &Option<HashMap<String, String>>, recorded_vars: &Option<HashMap<String, String>>, ) -> Option<Violation> { if let Some(recorded_vars) = recorded_vars { if wire_vars.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: "vars".to_string(), on: Some("all vars".to_string()), wire: None, recorded: format!("{:?}", recorded_vars), }); } let wire_vars = wire_vars.as_ref().unwrap(); let badly_matched_vars = recorded_vars.iter().find(|(k, v)| { let k = k.to_lowercase(); if !wire_vars.contains_key(k.as_str()) { return true; } let wire_var = &wire_vars[k.as_str()]; let v_re = Regex::new(v.as_str()).unwrap(); !v_re.is_match(wire_var).unwrap() }); if let Some(badly_matched_vars) = badly_matched_vars { let (key, _) = badly_matched_vars; let key = key.to_lowercase(); return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: "vars".to_string(), on: Some(key.to_string()), wire: Some(format!( "{:?}", wire_vars.get(key.as_str()).unwrap_or(&"".to_string()) )), recorded: format!("{:?}", badly_matched_vars.1), }); } } None } pub fn is_match( &self, wire_response: &Response, recorded_response: Option<&Response>, ) -> Vec<Violation> { if let Some(recorded_response) = recorded_response { vec![ self.match_field("body", &wire_response.body, &recorded_response.body), self.match_field( "status_code", &wire_response.status_code, &recorded_response.status_code, ), self.match_headers(&wire_response.headers, &recorded_response.headers), self.match_vars(&wire_response.vars, &recorded_response.vars), ] .into_iter() .flatten() .collect::<Vec<_>>() } else { vec![Violation { kind: self.kind.clone(), cause: Cause::RecordedMissing, subject: "response".to_string(), on: None, wire: None, recorded: format!("{:?}", wire_response), }] } } }
fn match_headers( &self, wire_headers: &Option<HashMap<String, HeaderList>>, recorded_headers: &Option<HashMap<String, HeaderList>>, ) -> Option<Violation> { if let Some(recorded_headers) = recorded_headers { if wire_headers.is_none() { return Some(Violation { kind: self.kind.clone(), cause: Cause::WireMissing, subject: "headers".to_string(), on: Some("all headers".to_string()), wire: None, recorded: format!("{:?}", recorded_headers), }); } let wire_headers = wire_headers.as_ref().unwrap(); let matches_headers = recorded_headers.iter().find(|(k, vs)| { let k = k.to_lowercase(); if !wire_headers.contains_key(k.as_str()) { return true; } let wire_header_values = &wire_headers[k.as_str()]; !vs.iter().any(|v| { let v_re = Regex::new(v.as_str()).unwrap(); wire_header_values .iter() .any(|wv| v_re.is_match(wv).unwrap()) }) }); if let Some(matches_headers) = matches_headers { let (key, _) = matches_headers; let key = key.to_lowercase(); return Some(Violation { kind: self.kind.clone(), cause: Cause::Mismatch, subject: "headers".to_string(), on: Some(key.to_string()), wire: Some(format!( "{:?}", wire_headers.get(key.as_str()).unwrap_or(&vec![]) )), recorded: format!("{:?}", matches_headers.1), }); } } None }
function_block-full_function
[ { "content": "pub fn diff_text(expected: &str, actual: &str) -> (String, String, String) {\n\n let expected = format!(\"{:?}\", expected);\n\n let expected = &expected[1..expected.len() - 1];\n\n\n\n let actual = format!(\"{:?}\", actual);\n\n let actual = &actual[1..actual.len() - 1];\n\n\n\n let Changeset { diffs, .. } = Changeset::new(expected, actual, \" \");\n\n let diff = diffs\n\n .into_iter()\n\n .map(|diff| match diff {\n\n difference::Difference::Same(s) => s,\n\n difference::Difference::Rem(s) => format!(\"{}\", style(s).bold().white().on_green()),\n\n difference::Difference::Add(s) => format!(\"{}\", style(s).bold().white().on_red()),\n\n })\n\n .filter(|s| !s.is_empty())\n\n .collect::<Vec<String>>()\n\n .join(\" != \");\n\n\n\n (expected.to_string(), actual.to_string(), diff)\n", "file_path": "src/reporters/console_output.rs", "rank": 0, "score": 127725.2712670404 }, { "content": "// fmtstring: {{var}} -> var is being replaced with real name to create the placeholder:\n\n// ?q={{host}}, -> {{'var'->host}} -> {{host}} -> ?q=v\n\nfn render_with_vars(text: String, vars: &HashMap<String, String>, fmtstring: &str) -> String {\n\n vars.iter().fold(text, |acc, (k, v)| {\n\n acc.replace(fmtstring.replace(\"var\", k).as_str(), v)\n\n })\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct VarInfo {\n\n pub expr: Option<String>,\n\n #[serde(default)]\n\n pub kind: String,\n\n #[serde(default)]\n\n pub from: String,\n\n #[serde(default)]\n\n pub default: Option<String>,\n\n}\n\n\n", "file_path": "src/data.rs", "rank": 1, "score": 95167.3185244833 }, { "content": "fn error_violation(err: String) -> Vec<Violation> {\n\n vec![Violation {\n\n kind: NAME.to_string(),\n\n cause: Cause::Error,\n\n on: Some(\"response\".to_string()),\n\n subject: \"request\".to_string(),\n\n wire: Some(format!(\"error: {}\", err)),\n\n recorded: \"\".to_string(),\n\n }]\n\n}\n\nimpl Check for Cert {\n\n fn name(&self) -> &str {\n\n NAME\n\n }\n\n fn perform(&self, _context: &mut Context, inter: &Interaction) -> CheckResult {\n\n let mut violations = vec![];\n\n if inter.cert.is_some() {\n\n let now = Instant::now();\n\n let connector = TlsConnector::new().unwrap();\n\n let url = match reqwest::Url::parse(inter.request.uri.as_str()) {\n", "file_path": "src/cert.rs", "rank": 2, "score": 89863.44304723295 }, { "content": "pub fn extract(response: &Response, infos: &HashMap<String, VarInfo>) -> HashMap<String, String> {\n\n let jsonres = json!({\n\n \"body\": response.body,\n\n \"headers\": response.headers,\n\n \"status\": response.status_code,\n\n });\n\n let mut res = HashMap::new();\n\n infos.iter().for_each(|(k, v)| {\n\n res.insert(k.to_string(), extract_var(&jsonres, v));\n\n });\n\n res\n\n}\n", "file_path": "src/vars.rs", "rank": 3, "score": 84468.28846546594 }, { "content": "pub fn extract_var(v: &serde_json::Value, info: &VarInfo) -> String {\n\n let blank = json!(\"\");\n\n let v = if info.kind == \"json\" {\n\n json!({\n\n \"body\": serde_json::from_str(v.get(\"body\").unwrap().as_str().unwrap()).unwrap_or_else(|_| json!({})),\n\n \"headers\":v.get(\"headers\").unwrap(),\n\n \"status\":v.get(\"status\").unwrap(),\n\n })\n\n } else {\n\n v.clone()\n\n };\n\n\n\n let final_value = v\n\n .pointer(info.from.as_str())\n\n .cloned()\n\n .unwrap_or_else(|| info.default.as_ref().map_or(blank, |v| json!(v)));\n\n let str_value = if final_value.is_string() {\n\n final_value.as_str().unwrap().to_string()\n\n } else if final_value.is_number() {\n\n format!(\"{}\", final_value.as_i64().unwrap())\n", "file_path": "src/vars.rs", "rank": 4, "score": 75643.96855058258 }, { "content": "pub fn create_reporter<'a>(config: &HashMap<String, ReporterConfig>) -> Reporter<'a> {\n\n Reporter::new(config)\n\n}\n", "file_path": "src/reporters/mod.rs", "rank": 5, "score": 72119.17883248314 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct Message<'a> {\n\n request: &'a Request,\n\n responses: &'a ResponseBag,\n\n}\n\n\n", "file_path": "src/data.rs", "rank": 6, "score": 41812.86065949551 }, { "content": "fn main() {\n\n let opts = RunOptions::default();\n\n let runner = SequenceRunner::from_opts(&opts);\n\n\n\n let sequence: SequenceInteractions = serde_yaml::from_str(\n\n r#\"\n\nhttp_interactions:\n\n- request:\n\n id: step one\n\n uri: http://example.com\n\n response:\n\n status_code: \"200\"\n\n\"#,\n\n )\n\n .unwrap();\n\n let mut context = Context::new();\n\n let res = runner.run(&mut context, &sequence.http_interactions);\n\n exit(if res.ok { 0 } else { 1 })\n\n}\n", "file_path": "examples/quick-start.rs", "rank": 7, "score": 41094.99938390723 }, { "content": "fn get_vars_from_cmd(\n\n cmd: &str,\n\n request: &Request,\n\n response_bag: &ResponseBag,\n\n) -> HashMap<String, String> {\n\n let message = Message {\n\n request,\n\n responses: response_bag,\n\n };\n\n let serialized_message = serde_json::to_string(&message).unwrap();\n\n debug!(\"Executing {}\", cmd);\n\n debug!(\"with stdin:\\n{}\", serialized_message);\n\n match Popen::create(\n\n &[cmd],\n\n PopenConfig {\n\n stdout: Redirection::Pipe,\n\n stdin: Redirection::Pipe,\n\n ..Default::default()\n\n },\n\n ) {\n", "file_path": "src/data.rs", "rank": 8, "score": 39852.393330980005 }, { "content": "pub trait Sender {\n\n fn send(&self, interaction: &Interaction) -> AnyResult<Response>;\n\n}\n\n\n\npub struct PrepareOpts {\n\n pub var_placeholder_open: String,\n\n pub var_placeholder_close: String,\n\n}\n\n\n", "file_path": "src/data.rs", "rank": 9, "score": 39077.27747086436 }, { "content": "pub trait Check {\n\n fn name(&self) -> &str;\n\n fn perform(&self, _context: &mut Context, _interaction: &Interaction) -> CheckResult;\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct CheckResult {\n\n pub request: Request,\n\n pub response: Option<Response>,\n\n pub violations: Vec<Violation>,\n\n pub duration: Option<Duration>,\n\n pub error: Option<String>,\n\n pub kind: String,\n\n}\n\nimpl CheckResult {\n\n pub fn invalid_err(kind: &str, interaction: &Interaction, text: &str) -> Self {\n\n CheckResult {\n\n request: interaction.request.clone(),\n\n response: None,\n\n violations: vec![],\n", "file_path": "src/data.rs", "rank": 10, "score": 39077.27747086436 }, { "content": "pub trait ReporterOutput: Sync {\n\n fn start(&mut self, _interaction: &Interaction) {}\n\n fn report(&mut self, _interaction: &Interaction, _check_results: &CheckResult) {}\n\n fn end(&mut self, _interactions: &[Interaction], _results: &[CheckResult]) {}\n\n}\n\n\n\npub type HeaderList = Vec<String>;\n\npub type ReporterConfig = HashMap<String, String>;\n\npub type ResponseBag = HashMap<String, Response>;\n\npub type VarsBag = HashMap<String, String>;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Config {\n\n var_braces: Option<String>,\n\n}\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Context {\n\n pub vars_bag: VarsBag,\n\n pub response_bag: ResponseBag,\n\n pub config: Config,\n", "file_path": "src/data.rs", "rank": 11, "score": 34969.70822435203 }, { "content": " violations.push(Violation {\n\n kind: NAME.to_string(),\n\n cause: Cause::Mismatch,\n\n on: None,\n\n subject: \"issuer\".to_string(),\n\n wire: Some(issuer),\n\n recorded: match_re.to_string(),\n\n });\n\n }\n\n }\n\n\n\n if let Some(subject_expr) = inter.cert.as_ref().unwrap().subject.as_ref() {\n\n let subject = format!(\"{}\", c.tbs_certificate.subject);\n\n let match_re = Regex::new(subject_expr).unwrap();\n\n if !match_re.is_match(&subject).unwrap() {\n\n violations.push(Violation {\n\n kind: NAME.to_string(),\n\n cause: Cause::Mismatch,\n\n on: None,\n\n subject: \"subject\".to_string(),\n", "file_path": "src/cert.rs", "rank": 14, "score": 24.238567863070955 }, { "content": " let p95 = h.percentile(95.0).unwrap();\n\n if p95 > benchmark.p95_ms {\n\n violations.push(Violation {\n\n kind: NAME.to_string(),\n\n cause: Cause::Mismatch,\n\n on: None,\n\n subject: \"p95\".to_string(),\n\n wire: Some(p95.to_string()),\n\n recorded: benchmark.p95_ms.to_string(),\n\n })\n\n }\n\n // verify matching before considering as bench candidate\n\n let p99 = h.percentile(99.0).unwrap();\n\n if p99 > benchmark.p99_ms {\n\n violations.push(Violation {\n\n kind: NAME.to_string(),\n\n cause: Cause::Mismatch,\n\n on: None,\n\n subject: \"p99\".to_string(),\n\n wire: Some(p99.to_string()),\n", "file_path": "src/bench.rs", "rank": 15, "score": 24.049039192609534 }, { "content": " recorded: benchmark.p99_ms.to_string(),\n\n })\n\n }\n\n\n\n let avg = h.mean().unwrap();\n\n if avg > benchmark.avg_ms {\n\n violations.push(Violation {\n\n kind: NAME.to_string(),\n\n cause: Cause::Mismatch,\n\n on: None,\n\n subject: \"avg\".to_string(),\n\n wire: Some(avg.to_string()),\n\n recorded: benchmark.avg_ms.to_string(),\n\n })\n\n }\n\n\n\n if total > u128::from(benchmark.time_ms) {\n\n violations.push(Violation {\n\n kind: NAME.to_string(),\n\n cause: Cause::Mismatch,\n", "file_path": "src/bench.rs", "rank": 16, "score": 23.64519911905254 }, { "content": "use crate::data::{Cause, Check, CheckResult, Context, Interaction, Sender, Violation};\n\nuse histogram::Histogram;\n\nuse log::*;\n\nuse std::time::{Duration, Instant};\n\npub struct Bench<'a> {\n\n sender: &'a dyn Sender,\n\n}\n\nimpl<'a> Bench<'a> {\n\n pub fn new(sender: &'a dyn Sender) -> Self {\n\n Self { sender }\n\n }\n\n}\n\n\n\npub const NAME: &str = \"bench\";\n\nimpl<'a> Check for Bench<'a> {\n\n fn name(&self) -> &str {\n\n NAME\n\n }\n\n fn perform(&self, context: &mut Context, inter: &Interaction) -> CheckResult {\n\n let mut violations = vec![];\n", "file_path": "src/bench.rs", "rank": 22, "score": 21.0378778967894 }, { "content": " on: None,\n\n subject: \"time\".to_string(),\n\n wire: Some(total.to_string()),\n\n recorded: benchmark.time_ms.to_string(),\n\n })\n\n }\n\n\n\n CheckResult {\n\n kind: NAME.to_string(),\n\n request: inter.request,\n\n violations,\n\n response: None,\n\n duration: None,\n\n error: None,\n\n }\n\n } else {\n\n CheckResult::invalid(NAME, inter)\n\n }\n\n }\n\n}\n", "file_path": "src/bench.rs", "rank": 23, "score": 20.617417252580406 }, { "content": " wire: Some(subject),\n\n recorded: match_re.to_string(),\n\n });\n\n }\n\n }\n\n\n\n CheckResult {\n\n kind: NAME.to_string(),\n\n request: inter.request.clone(),\n\n violations,\n\n response: None,\n\n duration: Some(now.elapsed()),\n\n error: None,\n\n }\n\n } else {\n\n CheckResult::invalid(NAME, inter)\n\n }\n\n }\n\n}\n", "file_path": "src/cert.rs", "rank": 24, "score": 20.316598866406487 }, { "content": " let mut o = ConsoleOutput::new_with_buffer(buffer, true);\n\n o.start(&inter);\n\n assert_eq!(\n\n o.buffer.to_string(),\n\n \"• postbin:validation: \\u{1b}[35mstarted\\u{1b}[0m\\n\"\n\n );\n\n let fake_result = CheckResult {\n\n kind: \"content\".to_string(),\n\n request: inter.request.clone(),\n\n response: None,\n\n duration: Some(Duration::new(2, 0)),\n\n error: None,\n\n violations: vec![Violation {\n\n kind: \"content\".to_string(),\n\n cause: Cause::WireMissing,\n\n subject: \"content\".to_string(),\n\n on: None,\n\n wire: None,\n\n recorded: \"\".to_string(),\n\n }],\n", "file_path": "src/reporters/console_output.rs", "rank": 25, "score": 20.285467280531613 }, { "content": "use chrono::prelude::*;\n\nuse native_tls::TlsConnector;\n\n\n\nuse crate::data::{Cause, Check, CheckResult, Context, Interaction, Violation};\n\nuse std::net::TcpStream;\n\nuse std::time::Instant;\n\n\n\nuse fancy_regex::Regex;\n\nuse x509_parser::parse_x509_der;\n\npub const NAME: &str = \"cert\";\n\npub struct Cert {}\n\n\n\nimpl Default for Cert {\n\n fn default() -> Self {\n\n Self {}\n\n }\n\n}\n\nimpl Cert {\n\n pub fn new() -> Self {\n\n Cert::default()\n\n }\n\n}\n", "file_path": "src/cert.rs", "rank": 26, "score": 19.353102170755225 }, { "content": " pub cause: Cause,\n\n pub subject: String,\n\n pub on: Option<String>,\n\n pub wire: Option<String>,\n\n pub recorded: String,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub enum Cause {\n\n WireMissing,\n\n RecordedMissing,\n\n Mismatch,\n\n Error,\n\n}\n\n\n", "file_path": "src/data.rs", "rank": 27, "score": 18.845480693093123 }, { "content": " duration: Some(Duration::new(0, 0)),\n\n error: Some(text.to_string()),\n\n kind: kind.to_string(),\n\n }\n\n }\n\n pub fn invalid(kind: &str, interaction: &Interaction) -> Self {\n\n CheckResult {\n\n request: interaction.request.clone(),\n\n response: None,\n\n violations: vec![],\n\n duration: Some(Duration::new(0, 0)),\n\n error: Some(\"Invalid check\".to_string()),\n\n kind: kind.to_string(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub struct Violation {\n\n pub kind: String,\n", "file_path": "src/data.rs", "rank": 28, "score": 18.182794664165193 }, { "content": " + chrono::Duration::days(inter.cert.as_ref().unwrap().max_days as i64)\n\n {\n\n violations.push(Violation {\n\n kind: NAME.to_string(),\n\n cause: Cause::Mismatch,\n\n on: None,\n\n subject: \"expiry\".to_string(),\n\n wire: Some(format!(\n\n \"{:?}, ({} days left)\",\n\n dt,\n\n (dt - chrono::Utc::now()).num_days()\n\n )),\n\n recorded: format!(\"> {} days\", inter.cert.as_ref().unwrap().max_days),\n\n })\n\n }\n\n\n\n if let Some(issuer_expr) = inter.cert.as_ref().unwrap().issuer.as_ref() {\n\n let issuer = format!(\"{}\", c.tbs_certificate.issuer);\n\n let match_re = Regex::new(issuer_expr).unwrap();\n\n if !match_re.is_match(&issuer).unwrap() {\n", "file_path": "src/cert.rs", "rank": 29, "score": 16.54096654239767 }, { "content": "use std::time::Instant;\n\n\n\nuse crate::data::{Check, CheckResult, Context, Interaction, Sender};\n\nuse crate::matcher::RegexMatcher;\n\npub const NAME: &str = \"content\";\n\npub struct ContentCheck<'a> {\n\n sender: &'a dyn Sender,\n\n}\n\nimpl<'a> ContentCheck<'a> {\n\n pub fn new(sender: &'a dyn Sender) -> Self {\n\n ContentCheck { sender }\n\n }\n\n}\n\n\n\nimpl<'a> Check for ContentCheck<'a> {\n\n fn name(&self) -> &str {\n\n NAME\n\n }\n\n fn perform(&self, context: &mut Context, interaction: &Interaction) -> CheckResult {\n\n if interaction.response.is_some() {\n", "file_path": "src/content.rs", "rank": 30, "score": 15.714572252016133 }, { "content": "}\n\n\n\nimpl ConsoleOutput {\n\n pub fn new(config: &HashMap<String, String>) -> ConsoleOutput {\n\n let buf = \"\".to_string();\n\n ConsoleOutput::new_with_buffer(buf, config.contains_key(\"verbose\"))\n\n }\n\n pub fn new_with_buffer(buffer: String, verbose: bool) -> ConsoleOutput {\n\n ConsoleOutput { buffer, verbose }\n\n }\n\n fn _print_diff_short(&self, benchdiffs: &[Violation]) -> String {\n\n let mut out = \"\".to_string();\n\n benchdiffs.iter().for_each(|b| {\n\n out.push_str(\n\n format!(\n\n \" {} {}: {}\\n\",\n\n style(\"expected:\").green(),\n\n b.subject,\n\n b.recorded\n\n )\n", "file_path": "src/reporters/console_output.rs", "rank": 31, "score": 14.64861106649152 }, { "content": " .as_str(),\n\n );\n\n out.push_str(\n\n format!(\n\n \" {} {}: {:?}\\n\",\n\n style(\"got:\").red(),\n\n b.subject,\n\n b.wire\n\n )\n\n .as_str(),\n\n );\n\n });\n\n out\n\n }\n\n fn _print_diff(&self, filter: &str, matchdiffs: &[Violation]) -> String {\n\n let mut out = \"\".to_string();\n\n\n\n let diffs = matchdiffs\n\n .iter()\n\n .filter(|m| m.subject == filter)\n", "file_path": "src/reporters/console_output.rs", "rank": 32, "score": 14.269583075560943 }, { "content": " return CheckResult {\n\n kind: NAME.to_string(),\n\n request: inter.request.clone(),\n\n violations: error_violation(format!(\"{}\", err)),\n\n response: None,\n\n duration: Some(now.elapsed()),\n\n error: None,\n\n }\n\n }\n\n };\n\n let cert = stream.peer_certificate().unwrap().unwrap();\n\n cert.to_der().unwrap()\n\n }\n\n Err(err) => {\n\n return CheckResult {\n\n kind: NAME.to_string(),\n\n request: inter.request.clone(),\n\n violations: error_violation(format!(\"{}\", err)),\n\n response: None,\n\n duration: Some(now.elapsed()),\n", "file_path": "src/cert.rs", "rank": 33, "score": 13.547035123179391 }, { "content": "use crate::reporters::create_reporter;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::content::ContentCheck;\n\nuse crate::data::{Check, CheckResult, Context, Interaction, ReporterConfig, Sender};\n\nuse crate::sender::{SenderBuilder, SenderOptions};\n\n\n\npub struct RunOptions {\n\n pub sender: Box<dyn Sender>,\n\n pub flip: bool,\n\n pub reporters: HashMap<String, ReporterConfig>,\n\n}\n\nimpl Default for RunOptions {\n\n fn default() -> Self {\n\n RunOptions::build(None, false, Some(\"console\".into()), true)\n\n }\n\n}\n\n\n\nimpl RunOptions {\n\n pub fn build(\n", "file_path": "src/runner.rs", "rank": 34, "score": 13.456132580740244 }, { "content": " response: Some(resp),\n\n violations: vs,\n\n duration: Some(now.elapsed()),\n\n error: None,\n\n }\n\n }\n\n Err(err) => CheckResult {\n\n kind: NAME.to_string(),\n\n request: interaction.request.clone(),\n\n response: None,\n\n violations: vec![],\n\n duration: Some(now.elapsed()),\n\n error: Some(err.to_string()),\n\n },\n\n }\n\n } else {\n\n CheckResult::invalid(self.name(), interaction)\n\n }\n\n }\n\n}\n", "file_path": "src/content.rs", "rank": 35, "score": 13.14550228614291 }, { "content": "use crate::data::{Interaction, Request, Response};\n\nuse anyhow::Result as AnyResult;\n\nuse openapi;\n\nuse std::collections::HashMap;\n\nuse std::io::Read;\n\n\n\n// WIP: OpenAPI support\n\npub struct OpenAPI {\n\n #[allow(dead_code)]\n\n opts: HashMap<String, String>,\n\n}\n\n\n\nimpl OpenAPI {\n\n pub fn new(opts: HashMap<String, String>) -> Self {\n\n Self { opts }\n\n }\n\n // XXX 'top' is not used\n\n pub fn discover<R: Read>(&self, source: &mut R, _top: i32) -> AnyResult<Vec<Interaction>> {\n\n let interactions = match openapi::from_reader(source) {\n\n Ok(spec) => spec\n", "file_path": "src/discovery/open_api.rs", "rank": 36, "score": 12.675514763353345 }, { "content": "use crate::data::{Interaction, Response, Sender};\n\nuse anyhow::Result as AnyResult;\n\nuse log::*;\n\nuse reqwest::header::{HeaderMap, HeaderName, HeaderValue};\n\nuse rusoto_core::credential::AwsCredentials;\n\nuse rusoto_core::signature::SignedRequest;\n\nuse rusoto_core::Region;\n\n\n\nuse std::collections::HashMap;\n\nuse std::str::FromStr;\n\nuse std::time::Duration;\n\n\n\npub struct SenderOptions {\n\n pub dry_run: Option<String>,\n\n}\n\npub struct SenderBuilder {}\n\nimpl SenderBuilder {\n\n pub fn build(opts: SenderOptions) -> Box<dyn Sender> {\n\n let s: Box<dyn Sender> = match opts.dry_run {\n\n Some(examples_key) => Box::new(DrySender::new(&examples_key)),\n", "file_path": "src/sender.rs", "rank": 37, "score": 12.630540916082824 }, { "content": "use crate::data::{CheckResult, Interaction, ReporterOutput, Violation};\n\n\n\nuse console::style;\n\nuse console::Term;\n\nuse difference::Changeset;\n\nuse std::collections::HashMap;\n\nuse std::fmt::Write;\n\nconst FAIL_SIGN: &str = \"✗\";\n\nconst SUCCESS_SIGN: &str = \"✔\";\n\npub struct ConsoleOutput {\n\n buffer: String,\n\n verbose: bool,\n\n}\n\n\n", "file_path": "src/reporters/console_output.rs", "rank": 38, "score": 12.298848040673505 }, { "content": " Ok(url) => url,\n\n Err(err) => {\n\n return CheckResult {\n\n kind: NAME.to_string(),\n\n request: inter.request.clone(),\n\n violations: error_violation(format!(\"{}\", err)),\n\n response: None,\n\n duration: Some(now.elapsed()),\n\n error: Some(err.to_string()),\n\n }\n\n }\n\n };\n\n\n\n let der = match TcpStream::connect(format!(\"{}:443\", url.host().unwrap())) {\n\n Ok(stream) => {\n\n let stream = match connector\n\n .connect(format!(\"{}\", url.host().unwrap()).as_str(), stream)\n\n {\n\n Ok(stream) => stream,\n\n Err(err) => {\n", "file_path": "src/cert.rs", "rank": 39, "score": 11.968380772022995 }, { "content": " pub examples: Option<HashMap<String, Response>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub benchmark: Option<Benchmark>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub cert: Option<CertificateDetail>,\n\n}\n\nimpl Interaction {\n\n pub fn sequence_interactions_from_yaml(content: &str) -> AnyResult<Vec<Interaction>> {\n\n let result: SequenceInteractions = serde_yaml::from_str(content)?;\n\n Ok(result.http_interactions)\n\n }\n\n pub fn from_yaml(content: &str) -> AnyResult<Interaction> {\n\n let result = serde_yaml::from_str(content)?;\n\n Ok(result)\n\n }\n\n pub fn types(&self) -> Vec<&str> {\n\n let mut v = vec![];\n\n if self.benchmark.is_some() {\n\n v.push(\"benchmark\");\n\n }\n", "file_path": "src/data.rs", "rank": 40, "score": 11.825112751069302 }, { "content": " }\n\n }\n\n None => str_value,\n\n }\n\n}\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use maplit::hashmap;\n\n\n\n #[test]\n\n fn test_vars() {\n\n let mut infos: HashMap<String, VarInfo> = HashMap::new();\n\n infos.insert(\n\n \"auth\".into(),\n\n VarInfo {\n\n expr: Some(\"Auth (.*)\".into()),\n\n kind: \"regex\".into(),\n\n from: \"/body\".into(),\n\n default: None,\n", "file_path": "src/vars.rs", "rank": 41, "score": 11.706145177033967 }, { "content": "pub struct DrySender {\n\n example: String,\n\n}\n\n\n\nimpl DrySender {\n\n pub fn new(example: &str) -> Self {\n\n DrySender {\n\n example: example.to_string(),\n\n }\n\n }\n\n}\n\n\n\nimpl Sender for DrySender {\n\n fn send(&self, inter: &Interaction) -> AnyResult<Response> {\n\n let request = &inter.request;\n\n if let Some(examples) = &inter.examples {\n\n if let Some(ex) = examples.get(&self.example) {\n\n return Ok(ex.clone());\n\n } else {\n\n eprintln!(\"dry_send not found example: {}\", self.example);\n", "file_path": "src/sender.rs", "rank": 42, "score": 11.633886052251533 }, { "content": " #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub headers: Option<HashMap<String, HeaderList>>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub form: Option<HashMap<String, String>>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub body: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub uri_list: Option<Vec<String>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub vars_command: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub vars: Option<HashMap<String, VarInfo>>,\n\n}\n\nimpl Request {\n\n pub fn get_id(&self) -> String {\n\n self.id\n\n .as_ref()\n\n .map_or_else(|| \"request\".to_string(), std::string::ToString::to_string)\n", "file_path": "src/data.rs", "rank": 43, "score": 11.177173880095392 }, { "content": " let ok = if self.flip {\n\n results.iter().all(|r| !r.violations.is_empty())\n\n && results.iter().all(|r| r.error.is_none())\n\n } else {\n\n results.iter().all(|r| r.violations.is_empty())\n\n && results.iter().all(|r| r.error.is_none())\n\n };\n\n RunnerReport { ok, results }\n\n }\n\n}\n\n\n\npub struct RunnerReport {\n\n pub ok: bool,\n\n pub results: Vec<CheckResult>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use mockito::{mock, server_address};\n\n use super::*;\n", "file_path": "src/runner.rs", "rank": 44, "score": 11.073859096187958 }, { "content": "}\n\nimpl Default for Context {\n\n fn default() -> Self {\n\n Context {\n\n vars_bag: HashMap::new(),\n\n response_bag: HashMap::new(),\n\n config: Config { var_braces: None },\n\n }\n\n }\n\n}\n\nimpl Context {\n\n pub fn new() -> Self {\n\n Context::default()\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Runner {\n\n pub exit_on_failure: bool,\n\n}\n", "file_path": "src/data.rs", "rank": 45, "score": 11.062998248092658 }, { "content": " None => Box::new(ReqwestSender::new()),\n\n };\n\n s\n\n }\n\n}\n\n\n\npub struct ReqwestSender {}\n\nimpl Default for ReqwestSender {\n\n fn default() -> Self {\n\n ReqwestSender {}\n\n }\n\n}\n\nimpl ReqwestSender {\n\n pub fn new() -> Self {\n\n ReqwestSender::default()\n\n }\n\n}\n\nimpl Sender for ReqwestSender {\n\n fn send(&self, inter: &Interaction) -> AnyResult<Response> {\n\n let request = &inter.request;\n", "file_path": "src/sender.rs", "rank": 46, "score": 11.007529949068674 }, { "content": " let res = inter.send(self.sender);\n\n match res {\n\n Ok(_) => {}\n\n Err(err) => {\n\n return CheckResult {\n\n kind: NAME.to_string(),\n\n request: inter.request,\n\n violations: vec![],\n\n response: None,\n\n duration: Some(now.elapsed()),\n\n error: Some(err.to_string()),\n\n };\n\n }\n\n }\n\n let t = now.elapsed();\n\n let res = t.as_millis();\n\n h.increment(res as u64).unwrap();\n\n total += res;\n\n }\n\n\n", "file_path": "src/bench.rs", "rank": 47, "score": 10.796846975742524 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::data::{Context, Interaction, Violation};\n\n use crate::runner::SequenceRunner;\n\n use crate::sender::{SenderBuilder, SenderOptions};\n\n\n\n use mockito::{mock, server_address};\n\n use serde_json::json;\n\n use std::collections::HashMap;\n\n\n\n const ITC_SIMPLE: &str = include_str!(\"fixtures/simple.yaml\");\n\n const ITC_JSON: &str = include_str!(\"fixtures/json.yaml\");\n\n const ITC_WITH_DEFAULTS: &str = include_str!(\"fixtures/with-defaults.yaml\");\n\n\n\n fn run_interactions(itc: &str) -> Vec<Violation> {\n\n let interactions = Interaction::sequence_interactions_from_yaml(itc).unwrap();\n\n let mut ctx = Context::new();\n\n ctx.vars_bag\n\n .insert(\"host\".to_string(), server_address().to_string());\n", "file_path": "src/lib.rs", "rank": 49, "score": 10.630983104177119 }, { "content": " let success = res.violations.is_empty();\n\n let test_name = format!(\"[{}] {}\", res.kind, res.request.get_id());\n\n if success {\n\n cases.push(TestCase::success(\n\n test_name.as_str(),\n\n JUnitDuration::from_std(res.duration.unwrap()).unwrap(),\n\n ));\n\n } else {\n\n cases.push(TestCase::failure(\n\n test_name.as_str(),\n\n JUnitDuration::from_std(res.duration.unwrap()).unwrap(),\n\n \"ERROR\",\n\n serde_yaml::to_string(&res.violations).unwrap().as_str(),\n\n ));\n\n }\n\n });\n\n suite.add_testcases(cases.into_iter());\n\n let mut junit_report = Report::new();\n\n junit_report.add_testsuite(suite);\n\n let mut out: Vec<u8> = Vec::new();\n", "file_path": "src/reporters/junit_output.rs", "rank": 50, "score": 10.559732685832136 }, { "content": " .collect::<Vec<_>>();\n\n if !diffs.is_empty() {\n\n out.push_str(format!(\"{}:\\n\", style(filter).bold()).as_str());\n\n diffs.iter().for_each(|m| {\n\n out.push_str(\n\n format!(\n\n \" {} {}: {}\\n\",\n\n style(\"expected:\").green(),\n\n m.on.as_ref().unwrap_or(&\"N/A\".to_string()),\n\n m.recorded\n\n )\n\n .as_str(),\n\n );\n\n out.push_str(\n\n format!(\n\n \" {} {}: {:?}\\n\",\n\n style(\"got:\").red(),\n\n m.on.as_ref().unwrap_or(&\"N/A\".to_string()),\n\n m.wire\n\n )\n", "file_path": "src/reporters/console_output.rs", "rank": 51, "score": 10.424362842500523 }, { "content": "pub use super::console_output::ConsoleOutput;\n\npub use super::json_output::JsonOutput;\n\npub use super::junit_output::JUnitOutput;\n\nuse crate::data::{CheckResult, Interaction, ReporterConfig, ReporterOutput};\n\nuse std::collections::HashMap;\n\nuse std::marker::Sync;\n\npub struct Reporter<'a> {\n\n pub outputs: Vec<Box<dyn ReporterOutput + 'a>>,\n\n}\n\nunsafe impl<'a> Sync for Reporter<'a> {}\n\n\n\nimpl<'a> Reporter<'a> {\n\n pub fn new(reporters: &HashMap<String, ReporterConfig>) -> Self {\n\n Reporter {\n\n outputs: reporters\n\n .iter()\n\n .map(|(key, cfg)| match key.as_ref() {\n\n \"json\" => Box::new(JsonOutput::new(cfg)) as Box<dyn ReporterOutput>,\n\n \"console\" => Box::new(ConsoleOutput::new(cfg)) as Box<dyn ReporterOutput>,\n\n \"junit\" => Box::new(JUnitOutput::new(cfg)) as Box<dyn ReporterOutput>,\n", "file_path": "src/reporters/reporter.rs", "rank": 52, "score": 10.411159388097138 }, { "content": " if !check_results.violations.is_empty() {\n\n writeln!(\n\n self.buffer,\n\n \"{} {}: {} {}\",\n\n style(FAIL_SIGN).red(),\n\n interaction.request.get_id(),\n\n style(\"failed\").red(),\n\n style(format!(\"{}ms\", check_results.duration.unwrap().as_millis())).dim(),\n\n )\n\n .unwrap();\n\n if self.verbose {\n\n check_results.violations.iter().for_each(|v| {\n\n let (_, _, diff) = diff_text(\n\n &v.recorded,\n\n &v.wire.clone().unwrap_or_else(|| \"\".to_string()),\n\n );\n\n writeln!(self.buffer, \" {}: {}\", v.subject, diff).unwrap();\n\n })\n\n }\n\n } else if check_results.error.is_some() {\n", "file_path": "src/reporters/console_output.rs", "rank": 54, "score": 9.989319177624584 }, { "content": " sender: &'a dyn Sender,\n\n flip: bool,\n\n reporters: HashMap<String, ReporterConfig>,\n\n}\n\n\n\nimpl<'a> SequenceRunner<'a> {\n\n pub fn new(\n\n sender: &'a dyn Sender,\n\n flip: bool,\n\n reporters: HashMap<String, ReporterConfig>,\n\n ) -> Self {\n\n SequenceRunner {\n\n flip,\n\n sender,\n\n reporters,\n\n }\n\n }\n\n\n\n pub fn from_opts(run_opts: &'a RunOptions) -> Self {\n\n SequenceRunner {\n", "file_path": "src/runner.rs", "rank": 55, "score": 9.972540610839719 }, { "content": " .as_str(),\n\n );\n\n let (_, _, diff) = diff_text(\n\n &m.recorded,\n\n &m.wire.clone().unwrap_or_else(|| \"\".to_string()),\n\n );\n\n out.push_str(format!(\" diff: {}\\n\", diff).as_str());\n\n })\n\n }\n\n out\n\n }\n\n\n\n fn overwrite_previous_term(&self) {\n\n let term = Term::stdout();\n\n if term.is_term() {\n\n term.clear_last_lines(1).unwrap();\n\n }\n\n }\n\n\n\n fn buffer_to_term(&self) {\n", "file_path": "src/reporters/console_output.rs", "rank": 56, "score": 9.841836419752877 }, { "content": " if let Some(benchmark) = &inter.benchmark {\n\n let mut h = Histogram::new();\n\n let mut total: u128 = 0;\n\n\n\n let prepared = inter.prepare_with(context);\n\n if prepared.is_err() {\n\n return CheckResult {\n\n kind: NAME.to_string(),\n\n request: inter.request.clone(),\n\n violations: vec![],\n\n response: None,\n\n duration: Some(Duration::new(0, 0)),\n\n error: Some(prepared.err().unwrap().to_string()),\n\n };\n\n }\n\n\n\n let inter = prepared.ok().unwrap();\n\n for _ in 0..benchmark.times {\n\n debug!(\"Bench request: {:?}\", &inter.request);\n\n let now = Instant::now();\n", "file_path": "src/bench.rs", "rank": 57, "score": 9.641728549525183 }, { "content": "pub struct Request {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub id: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub desc: Option<String>,\n\n pub uri: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub params: Option<Vec<Param>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub timeout_ms: Option<u64>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub method: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub basic_auth: Option<BasicAuth>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub aws_auth: Option<AWSAuth>,\n\n\n", "file_path": "src/data.rs", "rank": 58, "score": 9.41195752876445 }, { "content": " }\n\n pub fn get_desc(&self) -> String {\n\n format!(\n\n \"{} ({})\",\n\n self.desc.as_ref().unwrap_or(&\"\".to_string()),\n\n self.uri\n\n )\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\npub struct Response {\n\n pub request_id: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub headers: Option<HashMap<String, HeaderList>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub status_code: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub body: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "src/data.rs", "rank": 59, "score": 9.372067312090621 }, { "content": "\n\n const ITC_OK: &str = include_str!(\"fixtures/ok.yaml\");\n\n const ITC_OK_THEN_ERROR: &str = include_str!(\"fixtures/ok-then-error.yaml\");\n\n\n\n fn execute_test(seq: &str, flip: bool) -> RunnerReport {\n\n let interactions = Interaction::sequence_interactions_from_yaml(seq).unwrap();\n\n let mut ctx = Context::new();\n\n ctx.vars_bag\n\n .insert(\"host\".to_string(), server_address().to_string());\n\n\n\n let sender = SenderBuilder::build(SenderOptions { dry_run: None });\n\n let runner = SequenceRunner::new(sender.as_ref(), flip, HashMap::new());\n\n let report = runner.run(&mut ctx, &interactions);\n\n return report;\n\n }\n\n\n\n #[test]\n\n fn test_runner_return_status_no_violations() {\n\n let report = execute_test(ITC_OK, false);\n\n assert_eq!(report.ok, true);\n", "file_path": "src/runner.rs", "rank": 60, "score": 9.096341682269864 }, { "content": "use crate::data::{CheckResult, Interaction, ReporterConfig, ReporterOutput};\n\nuse junit_report::{Duration as JUnitDuration, Report, TestCase, TestSuite};\n\nuse std::fs;\n\npub struct JUnitOutput {\n\n config: ReporterConfig,\n\n}\n\nunsafe impl Sync for JUnitOutput {}\n\n\n\nimpl JUnitOutput {\n\n pub fn new(config: &ReporterConfig) -> JUnitOutput {\n\n JUnitOutput {\n\n config: config.clone(),\n\n }\n\n }\n\n}\n\nimpl ReporterOutput for JUnitOutput {\n\n fn end(&mut self, _interactions: &[Interaction], results: &[CheckResult]) {\n\n let mut suite = TestSuite::new(\"Violation Checks\");\n\n let mut cases = vec![];\n\n results.iter().for_each(|res| {\n", "file_path": "src/reporters/junit_output.rs", "rank": 61, "score": 8.969175010252016 }, { "content": " pub name: String,\n\n pub desc: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct BasicAuth {\n\n pub user: String,\n\n pub password: Option<String>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct AWSAuth {\n\n pub region: Option<String>,\n\n pub endpoint: Option<String>,\n\n pub service: String,\n\n pub key: String,\n\n pub secret: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n", "file_path": "src/data.rs", "rank": 62, "score": 8.88734122380394 }, { "content": " }\n\n if let Some(form) = &request.form {\n\n rq_builder = rq_builder.form(form);\n\n }\n\n if let Some(aws) = &request.aws_auth {\n\n let credentials = AwsCredentials::new(aws.key.clone(), aws.secret.clone(), None, None);\n\n let default_region = \"us-east-1\".to_string();\n\n let reg_str = aws.region.as_ref().unwrap_or(&default_region);\n\n\n\n let region: Region = Region::from_str(reg_str).unwrap_or_else(|_| Region::Custom {\n\n name: reg_str.to_string(),\n\n endpoint: aws.endpoint.clone().unwrap_or_default(),\n\n });\n\n\n\n let mut headers = HeaderMap::new();\n\n\n\n // note the path is '/' because at this point we only care about checking service-level access\n\n let mut signed_request =\n\n SignedRequest::new(method.as_str(), aws.service.as_str(), &region, \"/\");\n\n\n", "file_path": "src/sender.rs", "rank": 63, "score": 8.842999679078185 }, { "content": " },\n\n );\n\n infos.insert(\n\n \"person_name\".into(),\n\n VarInfo {\n\n expr: Some(\"(.*)\".into()),\n\n from: \"/body/person/name\".into(),\n\n kind: \"json\".into(),\n\n default: None,\n\n },\n\n );\n\n infos.insert(\n\n \"token\".into(),\n\n VarInfo {\n\n expr: Some(\"Bearer (.*)\".into()),\n\n kind: \"regex\".into(),\n\n from: \"/headers/Authentication/0\".into(),\n\n default: Some(\"\".into()),\n\n },\n\n );\n", "file_path": "src/vars.rs", "rank": 64, "score": 8.496073848047459 }, { "content": "use crate::data::{CheckResult, Interaction, ReporterConfig, ReporterOutput};\n\nuse serde_json;\n\n\n\n#[derive(Serialize)]\n\npub struct EndEvent<'a> {\n\n interactions: &'a [Interaction],\n\n results: &'a [CheckResult],\n\n}\n\npub struct JsonOutput {}\n\nunsafe impl Sync for JsonOutput {}\n\n\n\nimpl JsonOutput {\n\n pub fn new(_config: &ReporterConfig) -> JsonOutput {\n\n JsonOutput {}\n\n }\n\n}\n\nimpl ReporterOutput for JsonOutput {\n\n fn end(&mut self, interactions: &[Interaction], results: &[CheckResult]) {\n\n println!(\n\n \"{}\",\n\n serde_json::to_value(&EndEvent {\n\n interactions,\n\n results,\n\n })\n\n .unwrap()\n\n );\n\n }\n\n}\n", "file_path": "src/reporters/json_output.rs", "rank": 65, "score": 8.171167773064907 }, { "content": "impl Default for Runner {\n\n fn default() -> Runner {\n\n Runner {\n\n exit_on_failure: false,\n\n }\n\n }\n\n}\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct SequenceInteractions {\n\n pub http_interactions: Vec<Interaction>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Interaction {\n\n pub request: Request,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub response: Option<Response>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub invalid: Option<Response>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "src/data.rs", "rank": 66, "score": 8.11178657447601 }, { "content": "<p align=\"center\">\n\n<br/>\n\n<br/>\n\n <img src=\"media/spk.png\" width=\"355\"/>\n\n<br/>\n\n<br/>\n\n</p>\n\n<p align=\"center\">\n\n<hr/>\n\n</p>\n\n\n\n<p align=\"center\">\n\n<img src=\"https://github.com/spectralops/service-policy-kit/actions/workflows/build.yml/badge.svg\"/>\n\n\n\n</p>\n\n\n\n# Service Policy Kit\n\n\n\n`service_policy_kit` is a Rust based toolkit for verifying HTTP services against policies. You can:\n\n\n\n* Build a complete testing framework on your own, with `service_policy_kit` taking care of all the expectation logic\n\n* Run fuzzing tests against your services\n\n* Integrate in your code to perform custom readiness/self checks\n\n* Build your own tools (CLIs) that perform service validation and security testing of different kinds\n\n\n\n## Quick Start\n\n\n\nAdd to `Cargo.toml`\n\n\n\n\n\n```ini\n\nservice_policy_kit = \"0.2.0\"\n\n```\n\n\n\n## Example\n\n\n\nHere's a full-blown policy runner that you can reuse:\n\n\n\n```rust\n\nuse serde_yaml;\n\nuse service_policy_kit::data::{Context, SequenceInteractions};\n\nuse service_policy_kit::runner::{RunOptions, SequenceRunner};\n\nuse std::process::exit;\n\n\n\nfn main() {\n\n let opts = RunOptions::default();\n\n let runner = SequenceRunner::from_opts(&opts);\n\n\n\n let sequence: SequenceInteractions = serde_yaml::from_str(\n\n r#\"\n\nhttp_interactions:\n\n- request:\n\n id: step one\n\n uri: http://example.com\n\n response:\n\n status_code: \"200\"\n\n\"#,\n\n )\n\n .unwrap();\n\n let mut context = Context::new();\n\n let res = runner.run(&mut context, &sequence.http_interactions);\n\n exit(if res.ok { 0 } else { 1 })\n\n}\n\n```\n\n\n\nYou can run it by cloning this repo, and then:\n\n\n\n```\n\ncargo run --example quick-start\n\n```\n\n\n\nYou should get:\n\n\n\n```yaml\n\n$ cargo run --examples quick-start\n\n\n\n✔ step one: ok 288ms\n\n\n\nRan 1 interactions with 1 checks in 288ms\n\n\n\nSuccess: 1\n\nFailure: 0\n\n Error: 0\n\nSkipped: 0\n\n```\n\n\n\n\n", "file_path": "README.md", "rank": 67, "score": 7.948337139994347 }, { "content": "}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Benchmark {\n\n pub times: u64,\n\n pub avg_ms: u64,\n\n pub p95_ms: u64,\n\n pub p99_ms: u64,\n\n pub time_ms: u64,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct CertificateDetail {\n\n pub max_days: u64,\n\n pub subject: Option<String>,\n\n pub issuer: Option<String>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Param {\n", "file_path": "src/data.rs", "rank": 68, "score": 7.848566171699158 }, { "content": " request_id: None,\n\n body: Some(json!({\"person\": {\"name\": \"joe\"}}).to_string()),\n\n status_code: Some(\"200\".into()),\n\n headers: Some(HashMap::new()),\n\n vars: None,\n\n };\n\n let vars = extract(&resp, &infos);\n\n assert_eq!(vars.get(\"person_name\").unwrap(), \"joe\", \"person_name\");\n\n\n\n let resp = Response {\n\n request_id: None,\n\n body: Some(\"Auth 1337\".into()),\n\n status_code: Some(\"200\".into()),\n\n headers: Some(HashMap::new()),\n\n vars: None,\n\n };\n\n let vars = extract(&resp, &infos);\n\n assert_eq!(vars.get(\"auth\").unwrap(), \"1337\", \"auth\");\n\n\n\n let headers = hashmap! {\n", "file_path": "src/vars.rs", "rank": 69, "score": 7.669302348886047 }, { "content": " pub vars: Option<HashMap<String, String>>,\n\n}\n\n\n\nimpl Response {\n\n pub fn save_vars(&self, context: &mut Context) {\n\n let vars_bag = &mut context.vars_bag;\n\n if let Some(vars) = &self.vars {\n\n vars.iter().for_each(|(k, v)| {\n\n vars_bag.insert(k.clone(), v.clone());\n\n })\n\n }\n\n }\n\n pub fn save_response(&self, context: &mut Context) {\n\n if let Some(request_id) = self.request_id.as_ref() {\n\n let response_bag = &mut context.response_bag;\n\n response_bag.insert(request_id.clone(), self.clone());\n\n }\n\n }\n\n}\n", "file_path": "src/data.rs", "rank": 70, "score": 7.2396911436351825 }, { "content": " headers.insert(k.to_string(), vec![]);\n\n }\n\n headers\n\n .get_mut(&k)\n\n .unwrap()\n\n .push(value.to_str().unwrap().to_string());\n\n }\n\n });\n\n let resp = Response {\n\n status_code: Some(rq_resp.status().to_string()),\n\n headers: Some(headers),\n\n request_id: Some(request.get_id()),\n\n vars: None,\n\n body: Some(rq_resp.text().unwrap()),\n\n };\n\n\n\n Ok(resp)\n\n }\n\n}\n\n\n", "file_path": "src/sender.rs", "rank": 71, "score": 6.9553564258153635 }, { "content": " dry_run: Option<String>,\n\n flip: bool,\n\n reporter: Option<String>,\n\n verbose: bool,\n\n ) -> Self {\n\n let sender = SenderBuilder::build(SenderOptions { dry_run });\n\n let mut reporters = HashMap::new();\n\n let mut rc = HashMap::new();\n\n if verbose {\n\n rc.insert(\"verbose\".to_string(), \"true\".to_string());\n\n }\n\n reporters.insert(reporter.unwrap_or_else(|| \"console\".to_string()), rc);\n\n RunOptions {\n\n sender,\n\n reporters,\n\n flip,\n\n }\n\n }\n\n}\n\npub struct SequenceRunner<'a> {\n", "file_path": "src/runner.rs", "rank": 72, "score": 6.583253078433899 }, { "content": " let now = Instant::now();\n\n // main func should always return check result\n\n // match here and move err into CheckResult.err\n\n let r = interaction.send_with_context(self.sender, context);\n\n match r {\n\n Ok(resp) => {\n\n let matcher = RegexMatcher::new(NAME);\n\n let vs = matcher.is_match(&resp, interaction.invalid.as_ref());\n\n if vs.is_empty() {\n\n return CheckResult::invalid_err(\n\n self.name(),\n\n interaction,\n\n \"matched invalid response\",\n\n );\n\n }\n\n let vs = matcher.is_match(&resp, interaction.response.as_ref());\n\n\n\n CheckResult {\n\n kind: NAME.to_string(),\n\n request: interaction.request.clone(),\n", "file_path": "src/content.rs", "rank": 73, "score": 6.115854925097677 }, { "content": " cert: None,\n\n examples: None,\n\n })\n\n })\n\n .flatten()\n\n .collect::<Vec<_>>(),\n\n Err(_e) => vec![], // XXX not used\n\n };\n\n Ok(interactions)\n\n }\n\n}\n", "file_path": "src/discovery/open_api.rs", "rank": 74, "score": 6.099713714929905 }, { "content": " && c.violations.is_empty())\n\n .count()\n\n )\n\n .green(),\n\n style(\n\n results\n\n .iter()\n\n .filter(|c| c.error.is_none()\n\n && c.response.is_some()\n\n && !c.violations.is_empty())\n\n .count()\n\n )\n\n .red(),\n\n style(results.iter().filter(|c| c.error.is_some()).count()).red(),\n\n style(\n\n results\n\n .iter()\n\n .filter(|c| c.error.is_none() && c.response.is_none())\n\n .count()\n\n )\n", "file_path": "src/reporters/console_output.rs", "rank": 75, "score": 6.0864806172598875 }, { "content": " .dim(),\n\n )\n\n .unwrap();\n\n }\n\n }\n\n\n\n self.buffer_to_term();\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::data::Cause;\n\n use std::time::Duration;\n\n\n\n #[test]\n\n fn clicolor_behavior() {\n\n let inter = Interaction::from_yaml(\n\n r#\"\n", "file_path": "src/reporters/console_output.rs", "rank": 76, "score": 6.036360277849282 }, { "content": " fn test_failing_interaction_with_vars() {\n\n let _m1 = mock(\"GET\", \"/one\").with_body(\"next: two\").create();\n\n\n\n // note: FAILING 'four' is not what we expect in interaction.\n\n let _m2 = mock(\"GET\", \"/two\").with_body(\"four\").create();\n\n\n\n let results = run_interactions(ITC_SIMPLE);\n\n assert_eq!(results[0].wire.clone().unwrap(), \"four\");\n\n assert_eq!(results[0].recorded, \"three\");\n\n }\n\n\n\n #[test]\n\n fn test_json_api() {\n\n let _m1 = mock(\"GET\", \"/api\")\n\n .with_body(json!({\"person\":{\"name\":\"joe\"}}).to_string())\n\n .create();\n\n let _m2 = mock(\"GET\", \"/joe\").with_body(\"hello, joe\").create();\n\n let results = run_interactions(ITC_JSON);\n\n println!(\"{:?}\", results);\n\n assert_eq!(results.len(), 0);\n", "file_path": "src/lib.rs", "rank": 77, "score": 5.987357319128131 }, { "content": " if self.response.is_some() {\n\n v.push(\"verify\");\n\n }\n\n if self.cert.is_some() {\n\n v.push(\"cert\");\n\n }\n\n v\n\n }\n\n pub fn prepare_with(&self, context: &mut Context) -> AnyResult<Self> {\n\n self.ensure_requirements(context)?;\n\n let mut res = self.clone();\n\n let mut req = res.request;\n\n let fmtstring = context\n\n .config\n\n .var_braces\n\n .clone()\n\n .unwrap_or_else(|| \"{{var}}\".to_string());\n\n let responses = &context.response_bag;\n\n let response_vars = &context.vars_bag;\n\n\n", "file_path": "src/data.rs", "rank": 78, "score": 5.947462870712853 }, { "content": "mod console_output;\n\nmod json_output;\n\nmod junit_output;\n\nmod reporter;\n\npub use self::reporter::Reporter;\n\nuse crate::data::ReporterConfig;\n\nuse std::collections::HashMap;\n\n\n", "file_path": "src/reporters/mod.rs", "rank": 79, "score": 5.838086056912822 }, { "content": " eprintln!(\"examples: {:?}\", inter.examples);\n\n }\n\n }\n\n // no example was given\n\n Ok(Response {\n\n request_id: Some(request.get_id()),\n\n headers: None,\n\n status_code: Some(\"200\".to_string()),\n\n body: Some(\"{ \\\"ok\\\": true }\".to_string()),\n\n vars: None,\n\n })\n\n }\n\n}\n", "file_path": "src/sender.rs", "rank": 80, "score": 5.701700565937941 }, { "content": " }\n\n\n\n #[test]\n\n fn test_runner_return_status_with_violations() {\n\n let _m1 = mock(\"GET\", \"/api/ok\").with_status(400).create();\n\n let report = execute_test(ITC_OK, false);\n\n assert_eq!(report.ok, false);\n\n }\n\n\n\n #[test]\n\n fn test_runner_return_status_with_some_violations() {\n\n let _m1 = mock(\"GET\", \"/api/ok\").with_status(200).create();\n\n let _m2 = mock(\"GET\", \"/api/error\").with_status(200).create();\n\n let report = execute_test(ITC_OK_THEN_ERROR, false);\n\n assert_eq!(report.ok, false);\n\n }\n\n\n\n #[test]\n\n fn test_runner_flip_return_status_no_violations() {\n\n let _m1 = mock(\"GET\", \"/api/ok\").create();\n", "file_path": "src/runner.rs", "rank": 81, "score": 5.662822725615816 }, { "content": " form: None,\n\n uri: format!(\"http://{{{{host}}}}{}\", path),\n\n id: None,\n\n desc: None,\n\n timeout_ms: None,\n\n headers: None,\n\n body: None,\n\n uri_list: None,\n\n vars_command: None,\n\n vars: None,\n\n },\n\n response: Some(Response {\n\n headers: None,\n\n status_code: Some(\"200\".to_string()),\n\n body: None,\n\n vars: None,\n\n request_id: None,\n\n }),\n\n invalid: None,\n\n benchmark: None,\n", "file_path": "src/discovery/open_api.rs", "rank": 82, "score": 5.648899845850592 }, { "content": " HeaderValue::from_str(v.clone().as_str()).unwrap(),\n\n );\n\n });\n\n });\n\n rq_builder = rq_builder.headers(headersmap);\n\n };\n\n\n\n if let Some(body) = &request.body {\n\n rq_builder = rq_builder.body(reqwest::blocking::Body::from(body.to_string()));\n\n }\n\n\n\n // GO!\n\n let rq_resp = rq_builder.send()?;\n\n\n\n // from_reqest -> RQResponse\n\n let mut headers: HashMap<String, Vec<String>> = HashMap::new();\n\n rq_resp.headers().iter().for_each(|(key, value)| {\n\n if value.to_str().is_ok() {\n\n let k = key.to_string();\n\n if !headers.contains_key(&k) {\n", "file_path": "src/sender.rs", "rank": 83, "score": 5.62408894261117 }, { "content": "#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde;\n\nextern crate serde_yaml;\n\n\n\nextern crate env_logger;\n\n\n\nextern crate histogram;\n\nextern crate log;\n\nextern crate reqwest;\n\npub mod bench;\n\npub mod cert;\n\npub mod content;\n\npub mod data;\n\npub mod discovery;\n\npub mod matcher;\n\npub mod reporters;\n\npub mod runner;\n\npub mod sender;\n\npub mod vars;\n", "file_path": "src/lib.rs", "rank": 84, "score": 5.545961350027163 }, { "content": " } else if final_value.is_boolean() {\n\n format!(\"{}\", final_value.as_bool().unwrap())\n\n } else if final_value.is_null() {\n\n \"\".to_string()\n\n } else if final_value.is_f64() {\n\n format!(\"{}\", final_value.as_f64().unwrap())\n\n } else if final_value.is_u64() {\n\n format!(\"{}\", final_value.as_u64().unwrap())\n\n } else {\n\n format!(\"{}\", final_value)\n\n };\n\n match &info.expr {\n\n Some(expr) => {\n\n let re = fancy_regex::Regex::new(expr).unwrap();\n\n let caps = re.captures(str_value.as_str()).unwrap();\n\n if let Some(c) = caps {\n\n let cap = if c.len() > 1 { c.get(1) } else { c.get(0) };\n\n cap.unwrap().as_str().to_string()\n\n } else {\n\n \"\".to_string()\n", "file_path": "src/vars.rs", "rank": 85, "score": 5.374958645784131 }, { "content": " .paths\n\n .iter()\n\n .map(|(path, item)| {\n\n let verb = if item.get.is_some() {\n\n \"get\"\n\n } else if item.post.is_some() {\n\n \"post\"\n\n } else if item.delete.is_some() {\n\n \"delete\"\n\n } else if item.put.is_some() {\n\n \"put\"\n\n } else {\n\n return None;\n\n };\n\n Some(Interaction {\n\n request: Request {\n\n params: None,\n\n method: Some(verb.to_string()),\n\n basic_auth: None,\n\n aws_auth: None,\n", "file_path": "src/discovery/open_api.rs", "rank": 86, "score": 5.244639557394113 }, { "content": "\n\n pub fn ensure_requirements(&self, context: &Context) -> AnyResult<()> {\n\n if let Some(params) = self.request.params.as_ref() {\n\n let missing_params = params\n\n .iter()\n\n .filter(|p| !context.vars_bag.contains_key(&p.name))\n\n .collect::<Vec<_>>();\n\n if !missing_params.is_empty() {\n\n return Err(anyhow!(\n\n \"Missing required params:\\n{}\",\n\n missing_params\n\n .iter()\n\n .map(|p| format!(\"name: {}\\ndescription: {}\\n\", p.name, p.desc))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n ));\n\n }\n\n }\n\n Ok(())\n\n }\n", "file_path": "src/data.rs", "rank": 87, "score": 5.097887715965012 }, { "content": " Ok(mut p) => {\n\n let (out, _err) = p.communicate(Some(&serialized_message)).unwrap();\n\n\n\n if let Some(_exit_status) = p.poll() {\n\n if let Some(output) = out {\n\n debug!(\"got:\\n{}\", output);\n\n let vars: HashMap<String, String> =\n\n serde_json::from_str(output.as_str()).unwrap();\n\n debug!(\"into vars:\\n{:?}\", vars);\n\n return vars;\n\n }\n\n // the process has finished\n\n } else {\n\n // it is still running, terminate it\n\n p.terminate().unwrap();\n\n }\n\n }\n\n Err(err) => error!(\"error executing vars command '{}': {}\", cmd, err),\n\n }\n\n HashMap::new()\n\n}\n\n\n", "file_path": "src/data.rs", "rank": 88, "score": 5.078942672769006 }, { "content": " ]\n\n .iter()\n\n .for_each(|h| {\n\n headers.insert(\n\n h.to_string().parse::<HeaderName>().unwrap(),\n\n String::from_utf8_lossy(&rh.get(*h).unwrap()[0])\n\n .parse()\n\n .unwrap(),\n\n );\n\n });\n\n\n\n rq_builder = rq_builder.headers(headers);\n\n }\n\n\n\n if let Some(headers) = &request.headers {\n\n let mut headersmap = HeaderMap::new();\n\n headers.iter().for_each(|(key, val)| {\n\n val.iter().for_each(|v| {\n\n headersmap.insert(\n\n key.to_lowercase().parse::<HeaderName>().unwrap(),\n", "file_path": "src/sender.rs", "rank": 89, "score": 5.061997615765108 }, { "content": " let report = execute_test(ITC_OK, true);\n\n assert_eq!(report.ok, false);\n\n }\n\n\n\n #[test]\n\n fn test_runner_flip_return_status_with_violations() {\n\n let _m1 = mock(\"GET\", \"/api/ok\").with_status(400).create();\n\n let report = execute_test(ITC_OK, true);\n\n assert_eq!(report.ok, true);\n\n }\n\n\n\n #[test]\n\n fn test_runner_flip_return_status_with_some_violations() {\n\n let _m1 = mock(\"GET\", \"/api/ok\").with_status(200).create();\n\n let _m2 = mock(\"GET\", \"/api/error\").with_status(200).create();\n\n let report = execute_test(ITC_OK_THEN_ERROR, true);\n\n assert_eq!(report.ok, false);\n\n }\n\n}\n", "file_path": "src/runner.rs", "rank": 90, "score": 4.79085446331145 }, { "content": " \"Authentication\".to_string() => vec![\"Bearer 000foobar000\".to_string()],\n\n };\n\n let resp = Response {\n\n request_id: None,\n\n body: Some(\"hello world\".into()),\n\n status_code: Some(\"200\".into()),\n\n headers: Some(headers),\n\n vars: None,\n\n };\n\n let vars = extract(&resp, &infos);\n\n assert_eq!(vars.get(\"token\").unwrap(), \"000foobar000\", \"token\");\n\n assert_eq!(\n\n vars.get(\"body_default\").unwrap(),\n\n \"meh-body\",\n\n \"body_default\"\n\n );\n\n assert_eq!(\n\n vars.get(\"headers_default\").unwrap(),\n\n \"meh-headers\",\n\n \"headers_default\"\n\n )\n\n }\n\n}\n", "file_path": "src/vars.rs", "rank": 91, "score": 4.780875593545083 }, { "content": " let term = Term::stdout();\n\n term.write_str(&self.buffer).unwrap();\n\n term.flush().unwrap();\n\n }\n\n}\n\nimpl ReporterOutput for ConsoleOutput {\n\n fn start(&mut self, interaction: &Interaction) {\n\n self.buffer.clear();\n\n writeln!(\n\n self.buffer,\n\n \"• {}: {}\",\n\n interaction.request.get_id(),\n\n style(\"started\").magenta()\n\n )\n\n .unwrap();\n\n\n\n self.buffer_to_term();\n\n }\n\n fn report(&mut self, interaction: &Interaction, check_results: &CheckResult) {\n\n self.buffer.clear();\n", "file_path": "src/reporters/console_output.rs", "rank": 92, "score": 4.014834038324562 }, { "content": "use crate::vars::extract;\n\nuse anyhow::anyhow;\n\nuse anyhow::Result as AnyResult;\n\nuse log::*;\n\nuse std::collections::HashMap;\n\nuse std::time::Duration;\n\nuse subprocess::{Popen, PopenConfig, Redirection};\n\n\n", "file_path": "src/data.rs", "rank": 93, "score": 3.9595478982696184 }, { "content": "## Capabilities\n\n\n\n* :white_check_mark: &nbsp;Flexible design: Use the runner for any purpose, sequence or individual interactions\n\n* :white_check_mark: &nbsp;Contextual flows: interactions can extract, define and pass variables to the next ones\n\n* :white_check_mark: &nbsp;Out of the box reporters: saves you some boilerplate work\n\nMultiple checks included: content, benchmark, certificates\n\n* :white_check_mark: &nbsp;Discovery (WIP): given recorded API interactions, or an API spec, automatically generate interactions.\n\n\n\n\n\n## Concepts\n\n\n\nThere are a few concepts that make up `service_policy_kit`: `Interaction`, `Expectation`, `Check`, `Violation` and `Runners`.\n\n\n\n### Interaction\n\n\n\nAn interaction is a definition of calling an external service, and the expected responses per check type.\n\n\n\n```rust\n\nInteraction {\n\n request,\n\n response,\n\n examples,\n\n benchmark,\n\n cert,\n\n}\n\n```\n\n\n\n### Expectation (Policy)\n\n\n\nAn expectation is a set of expected matchers for all of the parts that are extracted from an interaction response.\n\n\n\n\n\nEach of the fields take regular expressions and are matched against a live response accordingly.\n\n\n\n```rust\n\nResponse {\n\n headers,\n\n status_code,\n\n body,\n\n vars,\n\n}\n\n```\n\n\n\n\n\n\n\n### Check\n\n\n\nA check is an abstract action over a response. For example, running content expectation, a benchmark, or any other policy against a service.\n\n\n\n\n\n### Violation\n\n\n\nAny check can output violation. A successful check has no violations.\n\n### Runners\n\n\n\nA runner takes a set of _interactions_ and execute these. For example, the included `SequenceRunner` will always execute interactions in a sequence, extracting variables from one interaction and passing it to the next one via `Context`.\n\n\n\n# Thanks\n\n\n\nTo all [Contributors](https://github.com/spectralops/service-policy-kit/graphs/contributors) - you make this happen, thanks!\n\n\n\n\n\n# Copyright\n\n\n\nCopyright (c) 2021 [@jondot](http://twitter.com/jondot). See [LICENSE](LICENSE.txt) for further details.\n\n\n\n\n", "file_path": "README.md", "rank": 94, "score": 3.890805005684745 }, { "content": "use crate::data::{Response, VarInfo};\n\nuse serde_json;\n\nuse serde_json::json;\n\nuse std::collections::HashMap;\n\n\n", "file_path": "src/vars.rs", "rank": 95, "score": 3.8057490259828497 }, { "content": "use serde_yaml;\n\nuse service_policy_kit::data::{Context, SequenceInteractions};\n\nuse service_policy_kit::runner::{RunOptions, SequenceRunner};\n\nuse std::process::exit;\n\n\n", "file_path": "examples/quick-start.rs", "rank": 96, "score": 3.707903890638611 }, { "content": "\n\n let sender = SenderBuilder::build(SenderOptions { dry_run: None });\n\n let runner = SequenceRunner::new(sender.as_ref(), false, HashMap::new());\n\n let report = runner.run(&mut ctx, &interactions);\n\n report\n\n .results\n\n .iter()\n\n .flat_map(|r| r.violations.clone())\n\n .collect::<Vec<_>>()\n\n }\n\n\n\n #[test]\n\n fn test_passing_interaction_with_vars() {\n\n let _m1 = mock(\"GET\", \"/one\").with_body(\"next: two\").create();\n\n let _m2 = mock(\"GET\", \"/two\").with_body(\"three\").create();\n\n let results = run_interactions(ITC_SIMPLE);\n\n assert_eq!(results.len(), 0);\n\n }\n\n\n\n #[test]\n", "file_path": "src/lib.rs", "rank": 97, "score": 3.2277113683425243 }, { "content": "pub mod open_api;\n", "file_path": "src/discovery/mod.rs", "rank": 98, "score": 3.0739656611922657 }, { "content": " infos.insert(\n\n \"body_default\".into(),\n\n VarInfo {\n\n expr: Some(\"(.*)\".into()),\n\n kind: \"regex\".into(),\n\n from: \"body\".into(),\n\n default: Some(\"meh-body\".into()),\n\n },\n\n );\n\n infos.insert(\n\n \"headers_default\".into(),\n\n VarInfo {\n\n expr: Some(\"(.*)\".into()),\n\n kind: \"regex\".into(),\n\n from: \"/headers/Foobar\".into(),\n\n default: Some(\"meh-headers\".into()),\n\n },\n\n );\n\n\n\n let resp = Response {\n", "file_path": "src/vars.rs", "rank": 99, "score": 3.0439090473081074 } ]
Rust
crates/newport_editor/src/editor.rs
PyroFlareX/newport
ca4b09e98b31d1eefed8a8a545087be8fe28913f
use crate::{ engine, graphics, math, asset, gpu, os, Context, RawInput, DrawState, View, DARK, Layout, Panel, Style, Sizing, ColorStyle, LayoutStyle, Shape, TextStyle, }; use engine::{ Module, Engine, EngineBuilder, InputEvent }; use graphics::{ Graphics, Texture, Pipeline }; use math::{ Rect }; use asset::{ AssetRef, AssetManager }; use os::window::WindowStyle; use std::sync::{ Mutex, MutexGuard }; struct EditorAssets { _close_button: AssetRef<Texture>, present_pipeline: AssetRef<Pipeline>, } impl EditorAssets { fn new() -> Self { let asset_manager = Engine::as_ref().module::<AssetManager>().unwrap(); Self{ _close_button: asset_manager.find("{ce163885-9cd7-4103-b865-3e41df21ba13}").unwrap(), present_pipeline: asset_manager.find("{62b4ffa0-9510-4818-a6f2-7645ec304d8e}").unwrap() } } } #[allow(dead_code)] struct EditorInner { gui: Context, input: Option<RawInput>, draw_state: DrawState, assets: EditorAssets, view: View, } pub struct Editor(Mutex<EditorInner>); impl Editor { pub fn set_view(&self, view: View) { let mut editor = self.lock(); editor.view = view; } fn lock(&self) -> MutexGuard<EditorInner> { self.0.lock().unwrap() } fn do_frame(&self, dt: f32) { let engine = Engine::as_ref(); let graphics = engine.module::<Graphics>().unwrap(); let device = graphics.device(); let dpi = engine.dpi(); let backbuffer = device.acquire_backbuffer(); let mut editor = self.lock(); let EditorInner { gui, input, draw_state, view, assets, } = &mut *editor; let canvas = { let mut input = input.take().unwrap_or_default(); input.viewport = (0.0, 0.0, backbuffer.width() as f32, backbuffer.height() as f32).into(); input.dt = dt; input.dpi = dpi; gui.begin_frame(input); let mut layout_style: LayoutStyle = gui.style().get(); layout_style.padding = (12.0, 8.0, 12.0, 8.0).into(); layout_style.margin = Rect::default(); gui.style().push(layout_style); let mut color: ColorStyle = gui.style().get(); color.inactive_background = DARK.bg; color.unhovered_background = DARK.bg; gui.style().push(color); let text_style: TextStyle = gui.style().get(); let height = text_style.label_height() + layout_style.padding.min.y + layout_style.padding.max.y; Panel::top("menu_bar", height).build(gui, |builder| { let space = builder.available_rect(); builder.button("File").clicked(); builder.button("Edit").clicked(); builder.button("View").clicked(); builder.button("Run").clicked(); builder.button("Help").clicked(); let bounds = builder.layout.push_size(builder.layout.space_left()); builder.layout(Layout::right_to_left(bounds), |builder| { let mut color: ColorStyle = builder.style().get(); color.hovered_background = DARK.red0; color.hovered_foreground = DARK.fg; color.focused_background = DARK.red0; color.focused_foreground = DARK.fg; builder.scoped_style(color, |builder| { if builder.button("Close").clicked() { engine.shutdown(); } }); if builder.button("Max").clicked() { engine.maximize(); } if builder.button("Min").clicked() { engine.minimize(); } let drag = builder.layout.available_rect(); let drag = Rect::from_pos_size(drag.pos() * builder.input().dpi, drag.size() * builder.input().dpi); engine.set_custom_drag(drag); builder.layout(Layout::left_to_right(space), |builder| { let mut layout_style: LayoutStyle = builder.style().get(); layout_style.width_sizing = Sizing::Fill; layout_style.height_sizing = Sizing::Fill; builder.scoped_style(layout_style, |builder| builder.label(format!("{} - Newport Editor", Engine::as_ref().name()))); }); }); }); gui.style().pop::<ColorStyle>(); let bounds = gui.take_canvas(); let mut builder = gui.builder("view", Layout::up_to_down(bounds)); let mut color: ColorStyle = builder.style().get(); builder.painter.push_shape(Shape::solid_rect(bounds, color.inactive_background, 0.0)); color.inactive_background = DARK.bg; builder.scoped_style(color, |builder| { let bounds = Rect::from_min_max(bounds.min, bounds.max); builder.layout(Layout::up_to_down(bounds), |builder| { view.build(builder); }); }); builder.finish(); gui.end_frame() }; device.update_bindless(); let present_pipeline = assets.present_pipeline.read(); let mut gfx = device.create_graphics_context().unwrap(); gfx.begin(); { let imgui = draw_state.record(canvas, &mut gfx, gui).unwrap(); gfx.begin_render_pass(&graphics.backbuffer_render_pass(), &[&backbuffer]); gfx.bind_pipeline(&present_pipeline.gpu); struct Import { _texture: u32, } let import_buffer = device.create_buffer( gpu::BufferUsage::CONSTANTS, gpu::MemoryType::HostVisible, std::mem::size_of::<Import>() ).unwrap(); import_buffer.copy_to(&[Import{ _texture: imgui.bindless().unwrap(), }]); gfx.bind_constant_buffer(&import_buffer); gfx.draw(3, 0); gfx.end_render_pass(); gfx.resource_barrier_texture(&backbuffer, gpu::Layout::ColorAttachment, gpu::Layout::Present); } gfx.end(); let receipt = device.submit_graphics(vec![gfx], &[]); device.display(&[receipt]); device.wait_for_idle(); } } impl Module for Editor { fn new() -> Self { Self(Mutex::new(EditorInner{ gui: Context::new(), input: None, draw_state: DrawState::new(), assets: EditorAssets::new(), view: View::new("main", 1.0), })) } fn depends_on(builder: EngineBuilder) -> EngineBuilder { builder .module::<Graphics>() .module::<AssetManager>() .register(WindowStyle::CustomTitleBar{ border: 5.0, drag: Default::default(), }) .process_input(|engine: &Engine, _window: &os::window::Window, event: &InputEvent| { let mut editor = engine.module::<Editor>().unwrap().lock(); if editor.input.is_none() { editor.input = Some(RawInput::default()); } editor.input.as_mut().unwrap().events.push_back(event.clone()); }) .tick(|engine: &Engine, dt: f32| { let editor = engine.module::<Editor>().unwrap(); if engine.window().is_minimized() { return; } editor.do_frame(dt); }) } }
use crate::{ engine, graphics, math, asset, gpu, os, Context, RawInput, DrawState, View, DARK, Layout, Panel, Style, Sizing, ColorStyle, LayoutStyle, Shape, TextStyle, }; use engine::{ Module, Engine, EngineBuilder, InputEvent }; use graphics::{ Graphics, Texture, Pipeline }; use math::{ Rect }; use asset::{ AssetRef, AssetManager }; use os::window::WindowStyle; use std::sync::{ Mutex, MutexGuard }; struct EditorAssets { _close_button: AssetRef<Texture>, present_pipeline: AssetRef<Pipeline>, } impl EditorAssets { fn new() -> Self { let asset_manager = Engine::as_ref().module::<AssetManager>().unwrap(); Self{ _close_button: asset_manager.find("{ce163885-9cd7-4103-b865-3e41df21ba13}").unwrap(), present_pipeline: asset_manager.find("{62b4ffa0-9510-4818-a6f2-7645ec304d8e}").unwrap() } } } #[allow(dead_code)] struct EditorInner { gui: Context, input: Option<RawInput>, draw_state: DrawState, assets: EditorAssets, view: View, } pub struct Editor(Mutex<EditorInner>); impl Editor { pub fn set_view(&self, view: View) { let mut editor = self.lock(); editor.view = view; } fn lock(&self) -> MutexGuard<EditorInner> { self.0.lock().unwrap() } fn do_frame(&self, dt: f32) { let engine = Engine::as_ref(); let graphics = engine.module::<Graphics>().unwrap(); let device = graphics.device(); let dpi = engine.dpi(); let backbuffer = device.acquire_backbuffer(); let mut editor = self.lock(); let EditorInner { gui, input, draw_state, view, assets, } = &mut *editor; let canvas = { let mut input = input.take().unwrap_or_default(); input.viewport = (0.0, 0.0, backbuffer.width() as f32, backbuffer.height() as f32).into(); input.dt = dt; input.dpi = dpi; gui.begin_frame(input); let mut layout_style: LayoutStyle = gui.style().get(); layout_style.padding = (12.0, 8.0, 12.0, 8.0).into(); layout_style.margin = Rect::default(); gui.style().push(layout_style); let mut color: ColorStyle = gui.style().get(); color.inactive_background = DARK.bg; color.unhovered_background = DARK.bg; gui.style().push(color); let text_style: TextStyle = gui.style().get(); let height = text_style.label_height() + layout_style.padding.min.y + layout_style.padding.max.y; Panel::top("menu_bar", height).build(gui, |builder| { let space = builder.available_rect(); builder.button("File").clicked(); builder.button("Edit").clicked(); builder.button("View").clicked(); builder.button("Run").clicked(); builder.button("Help").clicked(); let bounds = builder.layout.push_size(builder.layout.space_left()); builder.layout(Layout::right_to_left(bounds), |builder| { let mut color: ColorStyle = builder.style().get(); color.hovered_background = DARK.red0; color.hovered_foreground = DARK.fg; color.focused_background = DARK.red0; color.focused_foreground = DARK.fg; builder.scoped_style(color, |builder| { if builder.button("Close").clicked() { engine.shutdown(); } }); if builder
ct(); let drag = Rect::from_pos_size(drag.pos() * builder.input().dpi, drag.size() * builder.input().dpi); engine.set_custom_drag(drag); builder.layout(Layout::left_to_right(space), |builder| { let mut layout_style: LayoutStyle = builder.style().get(); layout_style.width_sizing = Sizing::Fill; layout_style.height_sizing = Sizing::Fill; builder.scoped_style(layout_style, |builder| builder.label(format!("{} - Newport Editor", Engine::as_ref().name()))); }); }); }); gui.style().pop::<ColorStyle>(); let bounds = gui.take_canvas(); let mut builder = gui.builder("view", Layout::up_to_down(bounds)); let mut color: ColorStyle = builder.style().get(); builder.painter.push_shape(Shape::solid_rect(bounds, color.inactive_background, 0.0)); color.inactive_background = DARK.bg; builder.scoped_style(color, |builder| { let bounds = Rect::from_min_max(bounds.min, bounds.max); builder.layout(Layout::up_to_down(bounds), |builder| { view.build(builder); }); }); builder.finish(); gui.end_frame() }; device.update_bindless(); let present_pipeline = assets.present_pipeline.read(); let mut gfx = device.create_graphics_context().unwrap(); gfx.begin(); { let imgui = draw_state.record(canvas, &mut gfx, gui).unwrap(); gfx.begin_render_pass(&graphics.backbuffer_render_pass(), &[&backbuffer]); gfx.bind_pipeline(&present_pipeline.gpu); struct Import { _texture: u32, } let import_buffer = device.create_buffer( gpu::BufferUsage::CONSTANTS, gpu::MemoryType::HostVisible, std::mem::size_of::<Import>() ).unwrap(); import_buffer.copy_to(&[Import{ _texture: imgui.bindless().unwrap(), }]); gfx.bind_constant_buffer(&import_buffer); gfx.draw(3, 0); gfx.end_render_pass(); gfx.resource_barrier_texture(&backbuffer, gpu::Layout::ColorAttachment, gpu::Layout::Present); } gfx.end(); let receipt = device.submit_graphics(vec![gfx], &[]); device.display(&[receipt]); device.wait_for_idle(); } } impl Module for Editor { fn new() -> Self { Self(Mutex::new(EditorInner{ gui: Context::new(), input: None, draw_state: DrawState::new(), assets: EditorAssets::new(), view: View::new("main", 1.0), })) } fn depends_on(builder: EngineBuilder) -> EngineBuilder { builder .module::<Graphics>() .module::<AssetManager>() .register(WindowStyle::CustomTitleBar{ border: 5.0, drag: Default::default(), }) .process_input(|engine: &Engine, _window: &os::window::Window, event: &InputEvent| { let mut editor = engine.module::<Editor>().unwrap().lock(); if editor.input.is_none() { editor.input = Some(RawInput::default()); } editor.input.as_mut().unwrap().events.push_back(event.clone()); }) .tick(|engine: &Engine, dt: f32| { let editor = engine.module::<Editor>().unwrap(); if engine.window().is_minimized() { return; } editor.do_frame(dt); }) } }
.button("Max").clicked() { engine.maximize(); } if builder.button("Min").clicked() { engine.minimize(); } let drag = builder.layout.available_re
random
[ { "content": "pub fn button_control(id: Id, bounds: Rect, builder: &mut Builder) -> ButtonResponse {\n\n let mut response = ButtonResponse::None;\n\n let is_over = builder.input().mouse_is_over(bounds);\n\n if is_over {\n\n if !builder.is_hovered(id) {\n\n response = ButtonResponse::Hovered;\n\n }\n\n builder.hover(id);\n\n\n\n if builder.input().was_primary_clicked() {\n\n builder.focus(id);\n\n }\n\n } else {\n\n builder.unhover(id);\n\n }\n\n\n\n if builder.input().was_primary_released() {\n\n if builder.unfocus(id) && is_over {\n\n response = ButtonResponse::Clicked(0);\n\n }\n", "file_path": "crates/newport_imgui/src/widgets/button.rs", "rank": 0, "score": 270054.394790991 }, { "content": "pub fn linear_to_srgb(x: f32) -> f32 {\n\n if x <= 0.0 {\n\n return 0.0;\n\n\n\n } else if x >= 1.0 {\n\n return 1.0;\n\n } else if x < 0.0031308 {\n\n return x * 12.92;\n\n } else {\n\n return x.powf(1.0 / 2.4) * 1.055 - 0.055;\n\n }\n\n}\n\n\n", "file_path": "crates/newport_math/src/color.rs", "rank": 1, "score": 259112.73550046404 }, { "content": "pub fn srgb_to_linear(x: f32) -> f32 {\n\n if x <= 0.0 {\n\n return 0.0;\n\n } else if x >= 1.0 {\n\n return 1.0;\n\n } else if x <= 0.04045 {\n\n return x / 12.92;\n\n } else {\n\n return ((x + 0.055) / 1.055).powf(2.4);\n\n }\n\n}\n", "file_path": "crates/newport_math/src/color.rs", "rank": 2, "score": 259112.73550046407 }, { "content": "pub trait Tick = Fn(&Engine, f32) + 'static;\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 3, "score": 243933.85324317665 }, { "content": "pub trait ProcessInput = Fn(&Engine, &Window, &InputEvent) + 'static;\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 4, "score": 222957.64325347578 }, { "content": "pub fn caret_blink_time() -> f32 {\n\n (unsafe{ win32::GetCaretBlinkTime() } as f32) / 1000.0\n\n}", "file_path": "crates/newport_os/src/lib.rs", "rank": 5, "score": 214790.59499108765 }, { "content": "fn layout_to_image_layout(layout: Layout) -> vk::ImageLayout {\n\n match layout {\n\n Layout::Undefined => vk::ImageLayout::UNDEFINED,\n\n Layout::General => vk::ImageLayout::GENERAL,\n\n Layout::ColorAttachment => vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,\n\n Layout::DepthAttachment => vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL,\n\n Layout::TransferSrc => vk::ImageLayout::TRANSFER_SRC_OPTIMAL,\n\n Layout::TransferDst => vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n\n Layout::ShaderReadOnly => vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n\n Layout::Present => vk::ImageLayout::PRESENT_SRC_KHR,\n\n }\n\n}\n\n\n\nimpl GraphicsContext {\n\n pub fn begin(&mut self) {\n\n unsafe{ self.owner.logical.reset_command_buffer(self.command_buffer, vk::CommandBufferResetFlags::default()).unwrap() };\n\n \n\n let begin_info = vk::CommandBufferBeginInfo::builder()\n\n .flags(vk::CommandBufferUsageFlags::SIMULTANEOUS_USE);\n\n\n", "file_path": "crates/newport_gpu/src/vk/context.rs", "rank": 6, "score": 204363.31520444728 }, { "content": "/// Modules are an easy way to have global immutable state\n\npub trait Module: Sized + 'static {\n\n /// Creates a module and returns as result. This is the initialization point for Modules\n\n fn new() -> Self;\n\n\n\n /// Takes a builder to append on other modules or elements\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `builder` - A [`EngineBuilder`] used to add dep modules or functions\n\n fn depends_on(builder: EngineBuilder) -> EngineBuilder {\n\n builder\n\n }\n\n}", "file_path": "crates/newport_engine/src/module.rs", "rank": 7, "score": 187032.4539667377 }, { "content": "type GetDpiForMonitor = extern fn(HMONITOR, MONITOR_DPI_TYPE, *mut UINT, *mut UINT) -> HRESULT;\n\n\n\n#[cfg(target_os = \"windows\")]\n\nlazy_static! {\n\n static ref SHCORE: Option<Library> = {\n\n let library = Library::new(\"shcore.dll\").ok()?;\n\n\n\n let func : Option<SetProcessDPIAwareness> = proc_address!(library, \"SetProcessDpiAwareness\");\n\n if func.is_some() {\n\n let func = func.unwrap();\n\n func(PROCESS_DPI_AWARENESS::PROCESS_SYSTEM_DPI_AWARE);\n\n }\n\n\n\n Some(library)\n\n };\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub enum WindowStyle {\n\n Windowed,\n", "file_path": "crates/newport_os/src/window.rs", "rank": 8, "score": 180522.4573397531 }, { "content": "pub trait PreShutdown = FnOnce(&Engine) + 'static;\n\n\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 9, "score": 177682.84444239677 }, { "content": "pub trait PostInit = FnOnce(&Engine) + 'static;\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 10, "score": 177682.84444239674 }, { "content": "pub trait Asset: Sized + 'static {\n\n fn load(bytes: &[u8], path: &Path) -> (UUID, Self);\n\n}\n\n\n\nimpl<T: Serialize + DeserializeOwned + Sized + 'static> Asset for T {\n\n fn load(bytes: &[u8], _path: &Path) -> (UUID, Self) {\n\n deserialize(bytes).expect(\"Failed to deserialize asset\")\n\n }\n\n}", "file_path": "crates/newport_asset/src/asset.rs", "rank": 11, "score": 175880.3113139116 }, { "content": "pub trait Register = Sized + Clone + 'static;\n\n\n\n/// Structure used to define engine structure and execution\n\npub struct EngineBuilder {\n\n pub(crate) entries: Vec<EngineBuilderEntry>,\n\n pub(crate) name: Option<String>,\n\n\n\n pub(crate) post_inits: Vec<Box<dyn PostInit>>,\n\n pub(crate) process_input: Vec<Box<dyn ProcessInput>>,\n\n pub(crate) tick: Vec<Box<dyn Tick>>,\n\n pub(crate) pre_shutdown: Vec<Box<dyn PreShutdown>>,\n\n\n\n pub(crate) registers: HashMap<TypeId, Box<dyn Any>>,\n\n}\n\n\n\nimpl EngineBuilder {\n\n /// Creates a new [`EngineBuilder`]\n\n pub fn new() -> Self {\n\n Self { \n\n entries: Vec::with_capacity(32),\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 12, "score": 170918.61774183766 }, { "content": "#[proc_macro_derive(Editable)]\n\npub fn derive_editable(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n expand_derive_editable(&input).into()\n\n}\n\n\n", "file_path": "crates/newport_codegen/src/lib.rs", "rank": 14, "score": 163166.66783236893 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"self::serde\", rename = \"Texture\")]\n\nstruct TextureFile {\n\n raw: PathBuf,\n\n srgb: bool,\n\n}\n\n\n\nimpl Asset for Texture {\n\n fn load(bytes: &[u8], path: &Path) -> (UUID, Self) {\n\n let (id, texture): (UUID, TextureFile) = deserialize(bytes).unwrap();\n\n\n\n let raw_path = path.with_file_name(texture.raw);\n\n let raw = fs::read(&raw_path).unwrap();\n\n\n\n let raw_texture = match image::load_from_memory(&raw[..]) {\n\n LoadResult::Error(err) => {\n\n panic!(\"Failed to load texture from file due to {}\", err);\n\n },\n\n LoadResult::ImageU8(image) => {\n\n let engine = Engine::as_ref();\n\n let graphics = engine.module::<Graphics>().unwrap();\n\n let device = graphics.device();\n", "file_path": "crates/newport_graphics/src/texture.rs", "rank": 15, "score": 163008.7750213732 }, { "content": "struct Swapchain {\n\n // HACK: Leak the swapchain handle because it crashes when trying to free it. Probably due to it being attached to resources???\n\n // TODO: Maybe actually handle this?\n\n handle: vk::SwapchainKHR,\n\n\n\n backbuffers: Vec<Arc<Texture>>,\n\n current: Option<usize>,\n\n}\n\n\n\nimpl Swapchain {\n\n fn new(device: Arc<Device>) -> Self {\n\n assert_eq!(device.surface.is_some(), true);\n\n \n\n let swapchain_khr = khr::Swapchain::new(&device.owner.instance, &device.logical);\n\n let surface_khr = khr::Surface::new(&device.owner.entry, &device.owner.instance);\n\n \n\n unsafe{ \n\n let capabilities = surface_khr.get_physical_device_surface_capabilities(device.physical, device.surface.unwrap()).unwrap();\n\n let formats = surface_khr.get_physical_device_surface_formats(device.physical, device.surface.unwrap()).unwrap();\n\n\n", "file_path": "crates/newport_gpu/src/vk/device.rs", "rank": 16, "score": 151894.70057920952 }, { "content": "pub trait Tab {\n\n fn name(&self) -> String;\n\n\n\n fn build(&mut self, builder: &mut Builder);\n\n}\n\n\n\npub struct TestTab(pub i32);\n\nimpl Tab for TestTab {\n\n fn name(&self) -> String {\n\n format!(\"Test {}\", self.0)\n\n }\n\n\n\n fn build(&mut self, builder: &mut Builder) {\n\n builder.label(self.name());\n\n }\n\n}\n\n\n\npub struct TabLabel {\n\n id: Id,\n\n name: String,\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 17, "score": 150296.58825630107 }, { "content": "pub trait Vertex {\n\n fn attributes() -> Vec<VertexAttribute>;\n\n}\n\n\n\npub struct PipelineBuilder {\n\n desc: PipelineDescription,\n\n}\n\n\n\nimpl PipelineBuilder {\n\n pub fn new_graphics(render_pass: &RenderPass) -> Self {\n\n let desc = GraphicsPipelineDescription{\n\n render_pass: render_pass.clone(),\n\n shaders: Vec::new(),\n\n\n\n vertex_attributes: Vec::new(),\n\n\n\n draw_mode: DrawMode::Fill,\n\n line_width: 1.0,\n\n\n\n cull_mode: CullMode::empty(),\n", "file_path": "crates/newport_gpu/src/pipeline.rs", "rank": 18, "score": 149201.96459805805 }, { "content": "struct SelectableCollapsingEntry {\n\n id: Id,\n\n label: String,\n\n selected: bool,\n\n}\n\n\n\nimpl SelectableCollapsingEntry {\n\n fn new(id: impl ToId, label: impl Into<String>, selected: bool) -> Self {\n\n Self {\n\n id: id.to_id(),\n\n label: label.into(),\n\n selected,\n\n }\n\n }\n\n}\n\n\n\nimpl SelectableCollapsingEntry {\n\n fn build(self, builder: &mut Builder, has_contents: bool, contents: impl FnOnce(&mut Builder)) -> ButtonResponse {\n\n let mut retained = builder.retained::<SelectableCollapsingRetained>(self.id);\n\n\n", "file_path": "crates/newport_editor/src/asset_browser.rs", "rank": 19, "score": 145684.832742699 }, { "content": "#[derive(Clone)]\n\nstruct SelectableCollapsingRetained {\n\n is_open: bool,\n\n}\n\n\n\nimpl Default for SelectableCollapsingRetained {\n\n fn default() -> Self {\n\n Self{\n\n is_open: true,\n\n }\n\n }\n\n}\n\n\n\nimpl Retained for SelectableCollapsingRetained { }\n\n\n", "file_path": "crates/newport_editor/src/asset_browser.rs", "rank": 20, "score": 145684.832742699 }, { "content": "pub fn vk_format(format: Format) -> vk::Format {\n\n match format {\n\n Format::Undefined => vk::Format::UNDEFINED,\n\n Format::RGB_U8 => vk::Format::R8G8B8_UINT,\n\n Format::RGB_U8_SRGB => vk::Format::R8G8B8_SRGB,\n\n Format::RGBA_U8 => vk::Format::R8G8B8A8_UNORM,\n\n Format::RGBA_U8_SRGB => vk::Format::R8G8B8A8_SRGB,\n\n Format::RGBA_F16 => vk::Format::R16G16B16A16_SFLOAT,\n\n Format::BGR_U8_SRGB => vk::Format::B8G8R8A8_SRGB\n\n }\n\n}", "file_path": "crates/newport_gpu/src/vk/mod.rs", "rank": 21, "score": 141340.83861425443 }, { "content": "type SetProcessDPIAwareness = extern fn(PROCESS_DPI_AWARENESS) -> HRESULT;\n\n\n", "file_path": "crates/newport_os/src/window.rs", "rank": 22, "score": 133537.10797115057 }, { "content": "pub trait Style = Default + Clone + Any + 'static;\n\n\n\npub struct StyleMap {\n\n inner: HashMap<TypeId, Box<dyn Any>>,\n\n}\n\n\n\nimpl StyleMap {\n\n pub fn new() -> Self {\n\n Self {\n\n inner: HashMap::with_capacity(16)\n\n }\n\n }\n\n\n\n pub fn get<T: Style>(&mut self) -> T {\n\n let id = TypeId::of::<T>();\n\n\n\n if !self.inner.contains_key(&id) {\n\n self.inner.insert(id, Box::new(vec![T::default()]));\n\n }\n\n self.inner.get(&id).unwrap().downcast_ref::<Vec<T>>().unwrap().last().unwrap().clone()\n", "file_path": "crates/newport_imgui/src/style.rs", "rank": 24, "score": 128212.79957632408 }, { "content": "fn expand_derive_editable(input: &DeriveInput) -> TokenStream2 {\n\n match &input.data {\n\n Data::Struct(data) => implement_struct_editable(&input.ident, &input.generics, &data.fields),\n\n _ => unimplemented!(\"Enum not supported\")\n\n }\n\n}\n\n\n", "file_path": "crates/newport_codegen/src/lib.rs", "rank": 25, "score": 127870.09941284318 }, { "content": "pub fn deserialize<'de, T: Deserialize<'de>>(bytes: &'de [u8]) -> Result<(UUID, T), ()> {\n\n let contents = str::from_utf8(bytes).map_err(|_| ())?;\n\n\n\n let t: AssetFile<T> = ron::from_str(contents).unwrap(); // .map_err(|_| ())?;\n\n Ok((t.id, t.asset))\n\n}", "file_path": "crates/newport_asset/src/de.rs", "rank": 26, "score": 123310.23837148811 }, { "content": "fn main() {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let mut path = PathBuf::from(out_dir);\n\n path.push(\"dxcompiler.dll\");\n\n fs::copy(\"bin/dxcompiler.dll\", &path).unwrap();\n\n}", "file_path": "crates/newport_gpu/build.rs", "rank": 27, "score": 119865.60458001558 }, { "content": "struct Layer {\n\n painter: Painter,\n\n}\n\n\n\n\n\npub struct Context {\n\n pub(crate) input: InputState,\n\n layers: Vec<Layer>,\n\n retained: HashMap<Id, Box<dyn Retained>>,\n\n\n\n pub(crate) hovered: Option<Id>,\n\n pub(crate) focused: Option<Id>,\n\n\n\n pub(crate) style: StyleMap,\n\n\n\n canvas: Rect,\n\n}\n\n\n\nimpl Context {\n\n pub fn new() -> Self {\n", "file_path": "crates/newport_imgui/src/context.rs", "rank": 28, "score": 117891.11696157031 }, { "content": "fn shader_variant_to_shader_stage(variant: ShaderVariant) -> vk::ShaderStageFlags {\n\n match variant {\n\n ShaderVariant::Vertex => vk::ShaderStageFlags::VERTEX,\n\n ShaderVariant::Pixel => vk::ShaderStageFlags::FRAGMENT,\n\n }\n\n}\n\n\n\npub struct Pipeline {\n\n pub owner: Arc<Device>,\n\n\n\n pub handle: vk::Pipeline,\n\n pub layout: vk::PipelineLayout,\n\n\n\n pub desc: PipelineDescription,\n\n}\n\n\n\nimpl Pipeline {\n\n pub fn new(owner: Arc<Device>, desc: PipelineDescription) -> Result<Arc<Pipeline>, ()> {\n\n match desc {\n\n PipelineDescription::Graphics(desc) => {\n", "file_path": "crates/newport_gpu/src/vk/pipeline.rs", "rank": 29, "score": 117090.3258810984 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[serde(rename = \"Mesh\", crate = \"self::serde\")]\n\nstruct MeshFile {\n\n raw: PathBuf,\n\n}\n\n\n\nimpl Asset for Mesh {\n\n fn load(bytes: &[u8], path: &Path) -> (UUID, Self) {\n\n let (id, mesh_file): (UUID, MeshFile) = deserialize(bytes).unwrap();\n\n\n\n let raw_path = path.with_file_name(mesh_file.raw);\n\n\n\n let (gltf, buffers, _images) = gltf::import(raw_path).unwrap();\n\n\n\n let mut vertex_count = 0;\n\n let mut index_count = 0;\n\n\n\n let mesh = gltf.meshes().nth(0).unwrap();\n\n for primitive in mesh.primitives() {\n\n let reader = primitive.reader(|buffer| Some(&buffers[buffer.index()]));\n\n\n\n vertex_count += reader.read_positions().unwrap().count();\n", "file_path": "crates/newport_graphics/src/mesh.rs", "rank": 30, "score": 115021.58481079289 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[serde(rename = \"Font\", crate = \"self::serde\")]\n\nstruct FontFile {\n\n raw: PathBuf,\n\n}\n\n\n\nimpl Asset for FontCollection {\n\n fn load(bytes: &[u8], path: &Path) -> (UUID, Self) {\n\n let (id, file): (UUID, FontFile) = deserialize(bytes).unwrap();\n\n \n\n let raw_path = path.with_file_name(file.raw);\n\n\n\n let font_file = fs::read(raw_path).unwrap();\n\n (id, FontCollection::new(font_file).unwrap())\n\n }\n\n}\n\n\n\npub struct Font {\n\n pub size: u32,\n\n\n\n pub ascent: f32,\n\n pub descent: f32,\n", "file_path": "crates/newport_graphics/src/font.rs", "rank": 31, "score": 115021.58481079289 }, { "content": "pub fn compile(name: &str, source: &str, main: &str, variant: ShaderVariant) -> Result<Vec<u8>, String> { \n\n DXC_COMPILER.with(|f| {\n\n let target_profile = match variant {\n\n ShaderVariant::Pixel => \"ps_6_1\",\n\n ShaderVariant::Vertex => \"vs_6_1\"\n\n };\n\n \n\n let blob = f.library\n\n .create_blob_with_encoding_from_str(source)\n\n .unwrap();\n\n\n\n let mut args = Vec::with_capacity(4); // TODO: Temp allocator\n\n\n\n #[cfg(feature = \"vulkan\")]\n\n {\n\n args.push(\"-spirv\");\n\n args.push(\"-Zpc\"); // Column major matrices\n\n\n\n if variant == ShaderVariant::Vertex {\n\n args.push(\"-fvk-invert-y\");\n", "file_path": "crates/newport_gpu/src/shaders.rs", "rank": 32, "score": 114516.41950551902 }, { "content": "pub trait Editable {\n\n fn edit(&mut self, name: &str, ui: &mut Builder);\n\n}\n\n\n\n// impl Editable for math::Vector2 {\n\n// fn edit(&mut self, name: &str, ui: &mut Ui) {\n\n// ui.horizontal(|ui| {\n\n// ui.label(name);\n\n// ui.separator();\n\n// ui.add(DragValue::new(&mut self.x));\n\n// ui.add(DragValue::new(&mut self.y));\n\n// });\n\n// }\n\n// }\n\n\n\n// impl Editable for math::Vector3 {\n\n// fn edit(&mut self, name: &str, ui: &mut Ui) {\n\n// ui.horizontal(|ui| {\n\n// ui.label(name);\n\n// ui.separator();\n", "file_path": "crates/newport_editor/src/editable.rs", "rank": 33, "score": 112463.01034817315 }, { "content": "pub trait InterpTo {\n\n fn interp_to(self, target: Self, dt: f32, speed: f32) -> Self;\n\n}\n\n\n\nimpl InterpTo for f32 {\n\n fn interp_to(self, target: Self, dt: f32, speed: f32) -> Self {\n\n if speed <= 0.0 { return target; }\n\n\n\n let distance = target - self;\n\n if distance * distance < SMALL_NUMBER {\n\n return target;\n\n }\n\n\n\n let delta = distance * (dt * speed).max(0.0).min(1.0);\n\n self + delta\n\n }\n\n}", "file_path": "crates/newport_math/src/lib.rs", "rank": 34, "score": 112384.96793152446 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone, Default)]\n\nstruct CACHE_DESCRIPTOR {\n\n Level: BYTE,\n\n Associativity: BYTE,\n\n LineSize: WORD,\n\n Size: DWORD,\n\n Type: PROCESSOR_CACHE_TYPE,\n\n}\n\n\n", "file_path": "crates/newport_os/src/win32/kernel32.rs", "rank": 35, "score": 112237.3671906069 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone, Default)]\n\nstruct Oem_ID {\n\n wProcessorArchitecture: WORD,\n\n wReserved: WORD,\n\n}\n\n\n\n#[repr(C)]\n\nunion Oem_Union {\n\n dwOemId: DWORD,\n\n internal: Oem_ID,\n\n}\n\n\n", "file_path": "crates/newport_os/src/win32/kernel32.rs", "rank": 36, "score": 112237.3671906069 }, { "content": "#[repr(C)]\n\nstruct SYSTEM_INFO {\n\n oem_id: Oem_Union,\n\n dwPageSize: DWORD,\n\n lpMinimumApplicationAddress: LPVOID,\n\n lpMaximumApplicationAddress: LPVOID,\n\n dwActiveProcessorMask: DWORD_PTR,\n\n dwNumberOfProcessors: DWORD,\n\n dwProcessorType: DWORD,\n\n dwAllocationGranularity: DWORD,\n\n wProcessorLevel: WORD,\n\n wProcessorRevision: WORD,\n\n}\n", "file_path": "crates/newport_os/src/win32/kernel32.rs", "rank": 37, "score": 112237.3671906069 }, { "content": "struct CompilerThreadInfo {\n\n _dxc: Dxc,\n\n compiler: DxcCompiler,\n\n library: DxcLibrary\n\n}\n\n\n\nimpl CompilerThreadInfo {\n\n fn new() -> Self {\n\n let out_dir = env!(\"OUT_DIR\");\n\n let target_index = out_dir.find(\"target\").unwrap();\n\n let (_, relative_out_dir) = out_dir.split_at(target_index);\n\n \n\n let mut library_path = PathBuf::from(relative_out_dir);\n\n library_path.push(\"dxcompiler.dll\");\n\n\n\n let dxc = Dxc::new(Some(library_path)).unwrap();\n\n\n\n let compiler = dxc.create_compiler().unwrap();\n\n let library = dxc.create_library().unwrap();\n\n\n", "file_path": "crates/newport_gpu/src/shaders.rs", "rank": 38, "score": 111786.48388164786 }, { "content": "// This is copied from utils.rs in hassle-rs\n\nstruct DefaultIncludeHandler {}\n\n\n\nimpl DxcIncludeHandler for DefaultIncludeHandler {\n\n fn load_source(&self, filename: String) -> Option<String> {\n\n use std::io::Read;\n\n match std::fs::File::open(filename) {\n\n Ok(mut f) => {\n\n let mut content = String::new();\n\n f.read_to_string(&mut content).unwrap();\n\n Some(content)\n\n }\n\n Err(_) => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/newport_gpu/src/shaders.rs", "rank": 39, "score": 111786.48388164786 }, { "content": "#[repr(C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct SYSTEM_LOGICAL_PROCESSOR_INFORMATION {\n\n ProcessorMask: ULONG_PTR,\n\n Relationship: LOGICAL_PROCESSOR_RELATIONSHIP,\n\n Union: SYSTEM_LOGICAL_PROCESSOR_INFORMATION_UNION,\n\n}\n", "file_path": "crates/newport_os/src/win32/kernel32.rs", "rank": 40, "score": 107671.62107596172 }, { "content": "type LPSYSTEM_INFO = *mut SYSTEM_INFO;\n\n\n", "file_path": "crates/newport_os/src/win32/kernel32.rs", "rank": 41, "score": 103374.35787870118 }, { "content": "enum ViewChildren {\n\n None,\n\n Views {\n\n views: Vec<View>,\n\n direction: Direction,\n\n },\n\n Tabs{\n\n tabs: Vec<Box<dyn Tab>>,\n\n selected: usize,\n\n hide_tabs: bool,\n\n hide_border: bool,\n\n }\n\n}\n\n\n\npub struct View {\n\n _id: Id,\n\n children: ViewChildren,\n\n percent: f32,\n\n}\n\n\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 42, "score": 101402.61183343746 }, { "content": "use crate::{\n\n EngineBuilder\n\n};\n\n\n\n/// Modules are an easy way to have global immutable state\n", "file_path": "crates/newport_engine/src/module.rs", "rank": 43, "score": 97067.52620946406 }, { "content": " /// \n\n /// let builder = EngineBuilder::new()\n\n /// .module::<Test>();\n\n /// ```\n\n pub fn post_init<F: PostInit>(mut self, f: F) -> Self {\n\n self.post_inits.push(Box::new(f));\n\n self\n\n }\n\n\n\n pub fn process_input<F: ProcessInput>(mut self, f: F) -> Self {\n\n self.process_input.push(Box::new(f));\n\n self\n\n }\n\n\n\n /// Adds a tick closure to the list\n\n pub fn tick<F: Tick>(mut self, f: F) -> Self {\n\n self.tick.push(Box::new(f));\n\n self\n\n }\n\n\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 44, "score": 96921.22239265304 }, { "content": "use crate::{\n\n Engine,\n\n Window,\n\n InputEvent,\n\n Module,\n\n};\n\n\n\nuse std::{\n\n any::{ TypeId, Any },\n\n collections::HashMap,\n\n};\n\n\n\npub(crate) struct EngineBuilderEntry {\n\n pub id: TypeId,\n\n pub spawn: fn() -> Box<dyn Any>,\n\n}\n\n\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 45, "score": 96920.95672476703 }, { "content": " /// use newport_engine::EngineBuilder;\n\n /// \n\n /// let builder = EngineBuilder::new()\n\n /// .module::<Test>();\n\n /// ```\n\n pub fn module<T: Module>(mut self) -> Self {\n\n // Don't add another module thats already added\n\n let id = TypeId::of::<T>();\n\n for it in self.entries.iter() {\n\n if it.id == id {\n\n return self;\n\n }\n\n }\n\n\n\n fn spawn<T: Module>() -> Box<dyn Any> {\n\n Box::new(T::new())\n\n }\n\n \n\n // Add dependencies to the entries list. There will be duplicates\n\n self = T::depends_on(self);\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 46, "score": 96920.86844567893 }, { "content": " /// Adds a pre shutdown closure to the list\n\n pub fn pre_shutdown<F: PreShutdown>(mut self, f: F) -> Self {\n\n self.pre_shutdown.push(Box::new(f));\n\n self\n\n }\n\n\n\n /// Sets the name of the engine runnable\n\n pub fn name(mut self, name: impl Into<String>) -> Self {\n\n self.name = Some(name.into());\n\n self\n\n }\n\n\n\n pub fn register<T: Register>(mut self, register: T) -> Self {\n\n let type_id = TypeId::of::<T>();\n\n let it = match self.registers.get_mut(&type_id) {\n\n Some(it) => it,\n\n None => {\n\n let register: Vec<T> = Vec::new();\n\n self.registers.insert(type_id, Box::new(register));\n\n self.registers.get_mut(&type_id).unwrap()\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 47, "score": 96913.82466785306 }, { "content": "\n\n // Push entry with generic spawn func and type id\n\n self.entries.push(EngineBuilderEntry{\n\n id: id,\n\n spawn: spawn::<T>,\n\n });\n\n\n\n self\n\n }\n\n\n\n /// Adds a post initialization closure to the list\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `T` - A [`Module`] that will be initialized and used at runtime\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// use newport_engine::EngineBuilder;\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 48, "score": 96909.29235397525 }, { "content": " name: None,\n\n\n\n post_inits: Vec::new(),\n\n process_input: Vec::new(),\n\n tick: Vec::new(),\n\n pre_shutdown: Vec::new(),\n\n\n\n registers: HashMap::new(),\n\n }\n\n }\n\n\n\n /// Adds a module to the list\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `T` - A [`Module`] that will be initialized and used at runtime\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 49, "score": 96907.92821691268 }, { "content": " }\n\n };\n\n\n\n let registers = it.downcast_mut::<Vec<T>>().unwrap();\n\n registers.push(register);\n\n\n\n self\n\n }\n\n}\n\n\n", "file_path": "crates/newport_engine/src/builder.rs", "rank": 50, "score": 96896.92501221588 }, { "content": "use crate::{\n\n Id,\n\n ToId,\n\n Layout,\n\n Builder,\n\n Sizing,\n\n ButtonResponse,\n\n button_control,\n\n DARK,\n\n Style,\n\n Direction,\n\n LayoutStyle,\n\n ColorStyle,\n\n TextStyle,\n\n Shape,\n\n};\n\n\n\nuse crate::math::{\n\n Vector2,\n\n Rect,\n\n};\n\n\n\npub const SPACING: f32 = 1.0;\n\n\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 51, "score": 96666.93573220127 }, { "content": " builder.scoped_style(layout_style, |builder| builder.label(\"Empty View\"));\n\n },\n\n ViewChildren::Tabs { tabs, selected, hide_tabs, hide_border } => {\n\n // Draw the tab space\n\n let mut layout_style: LayoutStyle = builder.style().get();\n\n layout_style.margin = Rect::default();\n\n layout_style.padding = (12.0, 6.0, 12.0, 6.0).into();\n\n if !*hide_tabs {\n\n builder.style().push(layout_style);\n\n \n\n let mut color: ColorStyle = builder.style().get();\n\n color.focused_background = color.hovered_background;\n\n color.focused_foreground = color.hovered_foreground;\n\n \n\n let height = builder.label_height_with_padding();\n\n let bounds = builder.layout.push_size(Vector2::new(0.0, height));\n\n builder.painter.push_shape(Shape::solid_rect(bounds, color.unhovered_background, 0.0));\n\n\n\n builder.style().push(color);\n\n\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 52, "score": 96660.26808231801 }, { "content": " let bounds = builder.layout.push_size(available_size);\n\n let color: ColorStyle = builder.style().get();\n\n builder.painter.push_shape(Shape::solid_rect(bounds, color.inactive_background, 0.0));\n\n\n\n let mut color: ColorStyle = builder.style().get();\n\n color.inactive_background = DARK.bg;\n\n\n\n builder.scoped_style(color, |builder| {\n\n let bounds = if *hide_border {\n\n Rect::from_min_max(bounds.min, bounds.max)\n\n } else {\n\n Rect::from_min_max(bounds.min + SPACING, bounds.max - SPACING)\n\n };\n\n builder.layout(Layout::up_to_down(bounds), |builder| {\n\n tabs[*selected].build(builder);\n\n });\n\n });\n\n\n\n builder.style().pop::<LayoutStyle>();\n\n }\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 53, "score": 96658.65366404653 }, { "content": " selected: bool,\n\n}\n\n\n\nimpl TabLabel {\n\n pub fn new(name: impl Into<String>, selected: bool) -> Self {\n\n let name = name.into();\n\n Self {\n\n id: name.to_id(),\n\n name: name,\n\n selected: selected\n\n }\n\n }\n\n\n\n pub fn build(self, builder: &mut Builder) -> ButtonResponse {\n\n let color: ColorStyle = builder.style().get();\n\n let text: TextStyle = builder.style().get();\n\n\n\n let label_rect = text.string_rect(&self.name, text.label_size, None).size();\n\n let bounds = builder.content_bounds(label_rect);\n\n \n", "file_path": "crates/newport_editor/src/view.rs", "rank": 54, "score": 96654.99002380914 }, { "content": " }\n\n }\n\n\n\n pub fn hide_border(&mut self, hide: bool) {\n\n match &mut self.children {\n\n ViewChildren::Tabs { hide_border, .. } => {\n\n *hide_border = hide;\n\n },\n\n _ => unreachable!()\n\n }\n\n }\n\n}\n\n\n\nimpl View {\n\n pub fn build(&mut self, builder: &mut Builder) {\n\n match &mut self.children {\n\n ViewChildren::None => {\n\n let mut layout_style = LayoutStyle::default();\n\n layout_style.width_sizing = Sizing::Fill;\n\n layout_style.height_sizing = Sizing::Fill;\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 55, "score": 96651.74729894107 }, { "content": " ViewChildren::Views { views, direction } => {\n\n let available_size = builder.available_rect().size();\n\n let bounds = builder.layout.push_size(available_size);\n\n \n\n let layout = Layout::new(bounds, *direction);\n\n\n\n builder.layout(layout, |builder| {\n\n for it in views {\n\n let size = builder.layout.bounds().size() * it.percent - SPACING / 2.0;\n\n let bounds = builder.layout.push_size(size);\n\n\n\n builder.layout(Layout::up_to_down(bounds), |builder| {\n\n it.build(builder);\n\n });\n\n builder.add_spacing(SPACING);\n\n }\n\n });\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 56, "score": 96651.3829350676 }, { "content": " let layout = Layout::left_to_right(bounds);\n\n builder.layout(layout, |builder|{\n\n for (index, it) in tabs.iter().enumerate() {\n\n if TabLabel::new(it.name(), index == *selected).build(builder).clicked() {\n\n *selected = index;\n\n }\n\n }\n\n });\n\n \n\n builder.style().pop::<LayoutStyle>();\n\n builder.style().pop::<ColorStyle>();\n\n\n\n builder.add_spacing(SPACING);\n\n }\n\n\n\n layout_style.padding = (8.0, 8.0, 8.0, 8.0).into();\n\n builder.style().push(layout_style);\n\n\n\n let available_size = builder.available_rect().size();\n\n \n", "file_path": "crates/newport_editor/src/view.rs", "rank": 57, "score": 96649.57888012285 }, { "content": " };\n\n\n\n (background_color, foreground_color)\n\n };\n\n\n\n builder.painter.push_shape(Shape::solid_rect(bounds, background_color, 0.0));\n\n \n\n let at = Rect::from_pos_size(bounds.pos(), label_rect).top_left();\n\n builder.painter.push_shape(\n\n Shape::text(\n\n self.name, \n\n at, \n\n &text.font, \n\n text.label_size, \n\n builder.input().dpi, \n\n foreground_color\n\n )\n\n );\n\n\n\n response\n\n }\n\n}", "file_path": "crates/newport_editor/src/view.rs", "rank": 58, "score": 96648.45473878738 }, { "content": "impl View {\n\n pub fn new(id: impl ToId, percent: f32) -> Self {\n\n Self {\n\n _id: id.to_id(),\n\n children: ViewChildren::None,\n\n percent: percent,\n\n }\n\n }\n\n\n\n pub fn new_views(id: impl ToId, percent: f32, views: Vec<View>, direction: Direction) -> Self {\n\n Self {\n\n _id: id.to_id(),\n\n children: ViewChildren::Views{ views, direction },\n\n percent: percent,\n\n }\n\n }\n\n\n\n pub fn add_tab(&mut self, tab: impl Tab + 'static) {\n\n match &mut self.children {\n\n ViewChildren::None => {\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 59, "score": 96644.29063872359 }, { "content": "use std::convert::From;\n\nuse crate::Vector4;\n\n\n\nuse serde::{ Serialize, Deserialize };\n\n\n\n#[derive(Copy, Clone, Default, Debug, PartialEq, PartialOrd, Serialize, Deserialize)]\n\npub struct Color {\n\n pub r: f32,\n\n pub g: f32,\n\n pub b: f32,\n\n pub a: f32,\n\n}\n\n\n\nimpl Color {\n\n pub const RED: Self = Self::new(1.0, 0.0, 0.0, 1.0);\n\n pub const GREEN: Self = Self::new(0.0, 1.0, 0.0, 1.0);\n\n pub const BLUE: Self = Self::new(0.0, 0.0, 1.0, 1.0);\n\n \n\n pub const WHITE: Self = Self::new(1.0, 1.0, 1.0, 1.0);\n\n pub const BLACK: Self = Self::new(0.0, 0.0, 0.0, 1.0);\n", "file_path": "crates/newport_math/src/color.rs", "rank": 60, "score": 96633.3625667622 }, { "content": " let mut tabs: Vec<Box<dyn Tab>> = Vec::with_capacity(1);\n\n tabs.push(Box::new(tab));\n\n self.children = ViewChildren::Tabs{\n\n tabs: tabs,\n\n selected: 0,\n\n hide_tabs: false,\n\n hide_border: false,\n\n }\n\n },\n\n ViewChildren::Tabs { tabs, selected, .. } => {\n\n tabs.push(Box::new(tab));\n\n *selected = tabs.len() - 1;\n\n },\n\n _ => unreachable!()\n\n }\n\n }\n\n\n\n pub fn add_view(&mut self, view: View) {\n\n match &mut self.children {\n\n ViewChildren::None => {\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 61, "score": 96630.97504042981 }, { "content": " let mut views = Vec::with_capacity(1);\n\n views.push(view);\n\n self.children = ViewChildren::Views{\n\n views: views,\n\n direction: Direction::LeftToRight,\n\n }\n\n },\n\n ViewChildren::Views { views, .. } => {\n\n views.push(view);\n\n },\n\n _ => unreachable!()\n\n }\n\n }\n\n\n\n pub fn hide_tabs(&mut self, hide: bool) {\n\n match &mut self.children {\n\n ViewChildren::Tabs { hide_tabs, .. } => {\n\n *hide_tabs = hide;\n\n },\n\n _ => unreachable!()\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 62, "score": 96628.3798360334 }, { "content": " let response = button_control(self.id, bounds, builder);\n\n \n\n let is_focused = self.selected;\n\n let is_hovered = builder.is_hovered(self.id);\n\n \n\n let (background_color, foreground_color) = {\n\n let background_color = if is_focused {\n\n color.focused_background\n\n } else if is_hovered {\n\n color.hovered_background\n\n } else {\n\n color.unhovered_background\n\n };\n\n\n\n let foreground_color = if is_focused {\n\n color.focused_foreground\n\n } else if is_hovered {\n\n color.hovered_foreground\n\n } else {\n\n color.unhovered_foreground\n", "file_path": "crates/newport_editor/src/view.rs", "rank": 63, "score": 96627.99295337274 }, { "content": " Color::from_hex(color)\n\n }\n\n}\n\n\n\nimpl From<Vector4> for Color {\n\n fn from(color: Vector4) -> Self {\n\n Self{ r: color.x, g: color.y, b: color.z, a: color.w }\n\n }\n\n}\n\n\n\nimpl From<(f32, f32, f32, f32)> for Color {\n\n fn from(rgba: (f32, f32, f32, f32)) -> Self {\n\n let (r, g, b, a) = rgba;\n\n Self{ r, g, b, a }\n\n }\n\n}\n\n\n", "file_path": "crates/newport_math/src/color.rs", "rank": 64, "score": 96621.61472403137 }, { "content": "\n\n pub const CYAN: Self = Self::new(0.0, 1.0, 1.0, 1.0);\n\n pub const YELLOW: Self = Self::new(1.0, 1.0, 0.0, 1.0);\n\n pub const MAGENTA: Self = Self::new(1.0, 0.0, 1.0, 1.0);\n\n\n\n pub const fn new(r: f32, g: f32, b: f32, a: f32) -> Self {\n\n Self{ r: r, g: g, b: b, a: a }\n\n }\n\n\n\n pub const fn from_hex(hex: u32) -> Self {\n\n let r = hex >> 24 & 0xFF;\n\n let g = hex >> 16 & 0xFF;\n\n let b = hex >> 8 & 0xFF;\n\n let a = hex & 0xFF;\n\n\n\n Self{\n\n r: r as f32 / 255.0, \n\n g: g as f32 / 255.0, \n\n b: b as f32 / 255.0, \n\n a: a as f32 / 255.0, \n", "file_path": "crates/newport_math/src/color.rs", "rank": 65, "score": 96621.48546888545 }, { "content": " }\n\n }\n\n\n\n pub fn from_srgb(hex: u32) -> Self {\n\n let r = hex >> 24 & 0xFF;\n\n let g = hex >> 16 & 0xFF;\n\n let b = hex >> 8 & 0xFF;\n\n let a = hex & 0xFF;\n\n\n\n Self{\n\n r: srgb_to_linear(r as f32 / 255.0), \n\n g: srgb_to_linear(g as f32 / 255.0), \n\n b: srgb_to_linear(b as f32 / 255.0), \n\n a: srgb_to_linear(a as f32 / 255.0), \n\n }\n\n }\n\n}\n\n\n\nimpl From<u32> for Color {\n\n fn from(color: u32) -> Self {\n", "file_path": "crates/newport_math/src/color.rs", "rank": 66, "score": 96620.9218515145 }, { "content": " let max = Vector2::new(self.min.x, self.max.y);\n\n\n\n (min, max).into()\n\n }\n\n\n\n pub fn split_right(&mut self, size: f32) -> Rect {\n\n let max = self.max;\n\n \n\n self.max.x -= size;\n\n\n\n let min = Vector2::new(self.max.x, self.min.y);\n\n\n\n (min, max).into()\n\n }\n\n}\n\n\n\nimpl From<(Vector2, Vector2)> for Rect {\n\n fn from(min_max: (Vector2, Vector2)) -> Self {\n\n let (min, max) = min_max;\n\n Self{\n", "file_path": "crates/newport_math/src/rect.rs", "rank": 67, "score": 96611.36856537312 }, { "content": " let min = Vector2::new(self.min.x, self.max.y);\n\n\n\n (min, max).into()\n\n }\n\n\n\n pub fn split_bottom(&mut self, size: f32) -> Rect {\n\n let min = self.min;\n\n \n\n self.min.y += size;\n\n\n\n let max = Vector2::new(self.max.x, self.min.y);\n\n\n\n (min, max).into()\n\n }\n\n\n\n pub fn split_left(&mut self, size: f32) -> Rect {\n\n let min = self.min;\n\n \n\n self.min.x += size;\n\n\n", "file_path": "crates/newport_math/src/rect.rs", "rank": 68, "score": 96608.88910808795 }, { "content": "use crate::Vector2;\n\n\n\nuse std::convert::From;\n\n\n\nuse serde::{ Serialize, Deserialize };\n\n\n\n#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)]\n\npub struct Rect {\n\n pub min: Vector2,\n\n pub max: Vector2,\n\n}\n\n\n\nimpl Rect {\n\n pub const INFINITY: Rect = Rect{ \n\n min: Vector2{ \n\n x: -f32::INFINITY, \n\n y: -f32::INFINITY \n\n }, \n\n max: Vector2{ \n\n x: f32::INFINITY, \n", "file_path": "crates/newport_math/src/rect.rs", "rank": 69, "score": 96608.12174676544 }, { "content": " min: min,\n\n max: max,\n\n }\n\n }\n\n}\n\n\n\nimpl From<(f32, f32, f32, f32)> for Rect {\n\n fn from(rect: (f32, f32, f32, f32)) -> Self {\n\n let (x0, y0, x1, y1) = rect;\n\n Self {\n\n min: Vector2::new(x0, y0),\n\n max: Vector2::new(x1, y1)\n\n }\n\n }\n\n}", "file_path": "crates/newport_math/src/rect.rs", "rank": 70, "score": 96605.75530297632 }, { "content": "\n\n pub fn height(self) -> f32 {\n\n self.max.y - self.min.y\n\n }\n\n\n\n pub fn size(self) -> Vector2 {\n\n Vector2::new(self.width(), self.height())\n\n }\n\n\n\n pub fn pos(self) -> Vector2 {\n\n let x = self.min.x + self.width() / 2.0;\n\n let y = self.min.y + self.height() / 2.0;\n\n Vector2::new(x, y)\n\n }\n\n\n\n pub fn bottom_left(self) -> Vector2 {\n\n self.min\n\n }\n\n\n\n pub fn top_right(self) -> Vector2 {\n", "file_path": "crates/newport_math/src/rect.rs", "rank": 71, "score": 96604.40672106374 }, { "content": " y: f32::INFINITY \n\n } \n\n };\n\n\n\n pub fn from_min_max(min: Vector2, max: Vector2) -> Self {\n\n Self{\n\n min: min,\n\n max: max,\n\n }\n\n }\n\n\n\n pub fn from_pos_size(pos: Vector2, size: Vector2) -> Self {\n\n let min = pos - size / 2.0;\n\n let max = pos + size / 2.0;\n\n Self{ min, max }\n\n }\n\n\n\n pub fn width(self) -> f32 {\n\n self.max.x - self.min.x\n\n }\n", "file_path": "crates/newport_math/src/rect.rs", "rank": 72, "score": 96602.09761487154 }, { "content": " self.max\n\n }\n\n\n\n pub fn bottom_right(self) -> Vector2 {\n\n (self.max.x, self.min.y).into()\n\n }\n\n\n\n pub fn top_left(self) -> Vector2 {\n\n (self.min.x, self.max.y).into()\n\n }\n\n\n\n pub fn point_overlap(self, point: Vector2) -> bool {\n\n self.min.x <= point.x && self.max.x >= point.x && self.min.y <= point.y && self.max.y >= point.y\n\n }\n\n\n\n pub fn split_top(&mut self, size: f32) -> Rect {\n\n let max = self.max;\n\n \n\n self.max.y -= size;\n\n\n", "file_path": "crates/newport_math/src/rect.rs", "rank": 73, "score": 96601.32183024174 }, { "content": "use crate::{\n\n asset,\n\n gpu,\n\n serde,\n\n\n\n engine::Engine,\n\n\n\n Graphics,\n\n};\n\n\n\nuse asset::{\n\n Asset,\n\n deserialize,\n\n UUID,\n\n};\n\n\n\nuse gpu::{ \n\n BufferUsage, \n\n MemoryType, \n\n TextureUsage, \n", "file_path": "crates/newport_graphics/src/texture.rs", "rank": 74, "score": 96594.26012527756 }, { "content": " image.width as u32,\n\n image.height as u32,\n\n 1,\n\n Wrap::Clamp,\n\n Filter::Linear,\n\n Filter::Linear\n\n ).unwrap();\n\n\n\n let mut gfx = device.create_graphics_context().unwrap();\n\n {\n\n gfx.begin();\n\n gfx.resource_barrier_texture(&gpu_texture, Layout::Undefined, Layout::TransferDst);\n\n gfx.copy_buffer_to_texture(&gpu_texture, &pixel_buffer);\n\n gfx.resource_barrier_texture(&gpu_texture, Layout::TransferDst, Layout::ShaderReadOnly);\n\n gfx.end();\n\n }\n\n \n\n let receipt = device.submit_graphics(vec![gfx], &[]);\n\n receipt.wait();\n\n\n", "file_path": "crates/newport_graphics/src/texture.rs", "rank": 75, "score": 96591.87820084191 }, { "content": "\n\n\n\npub struct Texture {\n\n srgb: bool,\n\n\n\n gpu: gpu::Texture,\n\n}\n\n\n\nimpl Texture {\n\n pub fn srgb(&self) -> bool {\n\n self.srgb\n\n }\n\n \n\n pub fn gpu(&self) -> &gpu::Texture {\n\n &self.gpu\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"self::serde\", rename = \"Texture\")]\n", "file_path": "crates/newport_graphics/src/texture.rs", "rank": 76, "score": 96586.10402245962 }, { "content": "\n\n assert_eq!(image.depth, 4, \"Currently vulkan only supports 4 byte formats\");\n\n\n\n let pixel_buffer = device.create_buffer(\n\n BufferUsage::TRANSFER_SRC, \n\n MemoryType::HostVisible, \n\n image.data.len()\n\n ).unwrap();\n\n pixel_buffer.copy_to(&image.data[..]);\n\n\n\n let format = if texture.srgb {\n\n Format::RGBA_U8_SRGB\n\n } else {\n\n Format::RGBA_U8\n\n };\n\n\n\n let gpu_texture = device.create_texture(\n\n TextureUsage::TRANSFER_DST | TextureUsage::SAMPLED,\n\n MemoryType::DeviceLocal, \n\n format,\n", "file_path": "crates/newport_graphics/src/texture.rs", "rank": 77, "score": 96575.42674233312 }, { "content": " gpu_texture\n\n },\n\n _ => unimplemented!()\n\n };\n\n\n\n (id, Texture{\n\n srgb: texture.srgb,\n\n gpu: raw_texture\n\n })\n\n }\n\n}", "file_path": "crates/newport_graphics/src/texture.rs", "rank": 78, "score": 96574.46156000554 }, { "content": " Format, \n\n Wrap, \n\n Filter, \n\n Layout \n\n};\n\n\n\n\n\nuse serde::{ \n\n Serialize, \n\n Deserialize \n\n};\n\nuse stb_image::{\n\n image,\n\n image::LoadResult,\n\n};\n\n\n\nuse std::{\n\n path::{ PathBuf, Path },\n\n fs,\n\n};\n", "file_path": "crates/newport_graphics/src/texture.rs", "rank": 79, "score": 96572.32630172337 }, { "content": " }\n\n\n\n SamplerState index_samplers(uint index) {\n\n return _all_samplers[index];\n\n }\n\n\";\n\n\n\nimpl Asset for Pipeline {\n\n fn load(bytes: &[u8], _path: &Path) -> (UUID, Self) {\n\n let engine = Engine::as_ref();\n\n let graphics: &Graphics = engine.module().unwrap();\n\n let device = graphics.device();\n\n\n\n let (id, pipeline_file): (UUID, PipelineFile) = deserialize(bytes).unwrap();\n\n\n\n let PipelineFile {\n\n render_states,\n\n \n\n color_blend,\n\n alpha_blend,\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 80, "score": 96294.92474041402 }, { "content": "use crate::{\n\n gpu,\n\n asset,\n\n engine,\n\n serde,\n\n\n\n Graphics,\n\n};\n\n\n\nuse asset::{\n\n Asset,\n\n UUID,\n\n deserialize,\n\n};\n\n\n\nuse serde::{\n\n Serialize,\n\n Deserialize\n\n};\n\n\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 81, "score": 96291.92241358693 }, { "content": "use engine::{\n\n Engine,\n\n};\n\n\n\nuse std::path::Path;\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(rename = \"Pipeline\", crate = \"self::serde\")]\n\npub struct PipelineFile {\n\n #[serde(default)]\n\n render_states: RenderStates,\n\n \n\n #[serde(default)]\n\n color_blend: Option<BlendStates>,\n\n\n\n #[serde(default)]\n\n alpha_blend: Option<BlendStates>,\n\n\n\n #[serde(default)]\n\n depth_stencil_states: DepthStencilStates,\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 82, "score": 96285.65365064937 }, { "content": " dst_color_blend_factor: color_blend.dst_blend_factor,\n\n color_blend_op: color_blend.blend_op,\n\n\n\n src_alpha_blend_factor: alpha_blend.src_blend_factor,\n\n dst_alpha_blend_factor: alpha_blend.dst_blend_factor,\n\n alpha_blend_op: alpha_blend.blend_op,\n\n\n\n depth_test: depth_stencil_states.depth_test,\n\n depth_write: depth_stencil_states.depth_write,\n\n depth_compare: depth_stencil_states.depth_compare,\n\n\n\n push_constant_size: 4,\n\n };\n\n\n\n let pipeline = device.create_pipeline(gpu::PipelineDescription::Graphics(pipeline_desc)).unwrap();\n\n\n\n (id, Pipeline{ file: pipeline_file, gpu: pipeline })\n\n }\n\n}", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 83, "score": 96284.40327093635 }, { "content": " vec![ColorMaskSerde::Red, ColorMaskSerde::Green, ColorMaskSerde::Blue, ColorMaskSerde::Alpha]\n\n }\n\n}\n\n\n\nimpl Default for RenderStates {\n\n fn default() -> Self {\n\n Self {\n\n draw_mode: Self::default_draw_mode(),\n\n line_width: Self::default_line_width(),\n\n cull_mode: Default::default(),\n\n color_mask: Self::default_color_mask(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Copy, Clone)]\n\n#[serde(crate = \"self::serde\")]\n\npub struct BlendStates {\n\n #[serde(default = \"BlendStates::default_blend_factor\")]\n\n src_blend_factor: gpu::BlendFactor,\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 84, "score": 96284.2532708241 }, { "content": " Self::Vector2 => \"float2\",\n\n Self::Vector3 => \"float3\",\n\n Self::Vector4 => \"float4\",\n\n\n\n Self::Matrix4 => \"float4x4\",\n\n\n\n Self::Texture => \"uint\",\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"self::serde\")]\n\npub enum SystemSemantics {\n\n VertexId,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"self::serde\")]\n\npub struct VertexShader {\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 85, "score": 96282.18892144223 }, { "content": "pub struct Item(String, ItemVariant);\n\n\n\n#[derive(Serialize, Deserialize, Copy, Clone, PartialEq)]\n\n#[serde(crate = \"self::serde\")]\n\npub enum ItemVariant {\n\n Float32,\n\n Vector2,\n\n Vector3,\n\n Vector4,\n\n\n\n Matrix4,\n\n\n\n Texture,\n\n}\n\n\n\nimpl ItemVariant {\n\n fn into_type_string(self) -> &'static str {\n\n match self {\n\n Self::Float32 => \"float\",\n\n \n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 86, "score": 96282.05030514032 }, { "content": "}\n\n\n\nimpl DepthStencilStates {\n\n fn default_depth_compare() -> gpu::CompareOp {\n\n gpu::CompareOp::Less\n\n }\n\n}\n\n\n\nimpl Default for DepthStencilStates {\n\n fn default() -> Self {\n\n Self{\n\n depth_test: false,\n\n depth_write: false,\n\n depth_compare: Self::default_depth_compare(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(rename = \"CullMode\", crate = \"self::serde\")]\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 87, "score": 96281.03612002995 }, { "content": "pub enum CullModeSerde {\n\n Front,\n\n Back\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(rename = \"ColorMask\", crate = \"self::serde\")]\n\npub enum ColorMaskSerde {\n\n Red,\n\n Green,\n\n Blue,\n\n Alpha\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"self::serde\")]\n\npub struct RenderStates {\n\n #[serde(default = \"RenderStates::default_draw_mode\")]\n\n draw_mode: gpu::DrawMode,\n\n\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 88, "score": 96280.71837149486 }, { "content": " #[serde(default)]\n\n pub attributes: Vec<Item>,\n\n #[serde(default)]\n\n pub system_semantics: Vec<SystemSemantics>,\n\n\n\n #[serde(default)]\n\n pub exports: Vec<Item>,\n\n pub code: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"self::serde\")]\n\npub struct PixelShader {\n\n pub exports: Vec<(String, gpu::Format)>,\n\n pub code: String,\n\n}\n\n\n\npub struct Pipeline {\n\n pub file: PipelineFile,\n\n\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 89, "score": 96280.62869892495 }, { "content": " #[serde(default = \"RenderStates::default_line_width\")]\n\n line_width: f32,\n\n\n\n #[serde(default)]\n\n cull_mode: Vec<CullModeSerde>,\n\n\n\n #[serde(default = \"RenderStates::default_color_mask\")]\n\n color_mask: Vec<ColorMaskSerde>,\n\n}\n\n\n\nimpl RenderStates {\n\n fn default_draw_mode() -> gpu::DrawMode {\n\n gpu::DrawMode::Fill\n\n }\n\n\n\n fn default_line_width() -> f32 {\n\n 1.0\n\n }\n\n\n\n fn default_color_mask() -> Vec<ColorMaskSerde> {\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 90, "score": 96280.61042357102 }, { "content": " }\n\n\n\n (render_states.draw_mode, render_states.line_width, cull_mode, color_mask)\n\n };\n\n\n\n let pipeline_desc = gpu::GraphicsPipelineDescription{\n\n render_pass,\n\n shaders,\n\n\n\n vertex_attributes,\n\n\n\n draw_mode,\n\n line_width,\n\n\n\n cull_mode,\n\n color_mask,\n\n\n\n blend_enabled,\n\n\n\n src_color_blend_factor: color_blend.src_blend_factor,\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 91, "score": 96280.59673943052 }, { "content": " pub gpu: gpu::Pipeline,\n\n}\n\n\n\nstatic SHADER_HEADER: &str = \"\n\n #define NULL 0\n\n ByteAddressBuffer _all_buffers[] : register(t0);\n\n Texture2D _all_textures[] : register(t1);\n\n SamplerState _all_samplers[] : register(s2);\n\n\n\n struct Constants {\n\n uint index;\n\n };\n\n [[vk::push_constant]] Constants constants;\n\n\n\n ByteAddressBuffer index_buffers(uint index) {\n\n return _all_buffers[index];\n\n }\n\n\n\n Texture2D index_textures(uint index) {\n\n return _all_textures[index];\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 92, "score": 96279.1956119346 }, { "content": " };\n\n let shaders = vec![pixel_shader, vertex_shader];\n\n\n\n let (draw_mode, line_width, cull_mode, color_mask) = {\n\n let mut cull_mode = gpu::CullMode::empty();\n\n for it in render_states.cull_mode.iter() {\n\n match it {\n\n CullModeSerde::Front => cull_mode.insert(gpu::CullMode::FRONT),\n\n CullModeSerde::Back => cull_mode.insert(gpu::CullMode::BACK),\n\n }\n\n }\n\n\n\n let mut color_mask = gpu::ColorMask::empty();\n\n for it in render_states.color_mask.iter() {\n\n match it {\n\n ColorMaskSerde::Red => color_mask.insert(gpu::ColorMask::RED),\n\n ColorMaskSerde::Green => color_mask.insert(gpu::ColorMask::GREEN),\n\n ColorMaskSerde::Blue => color_mask.insert(gpu::ColorMask::BLUE),\n\n ColorMaskSerde::Alpha => color_mask.insert(gpu::ColorMask::ALPHA),\n\n }\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 93, "score": 96278.79854816194 }, { "content": "\n\n let vertex_attributes = vertex_shader.attributes.iter().map(|Item(_, variant)| {\n\n match variant {\n\n ItemVariant::Float32 => gpu::VertexAttribute::Float32,\n\n ItemVariant::Vector2 => gpu::VertexAttribute::Vector2,\n\n ItemVariant::Vector3 => gpu::VertexAttribute::Vector3,\n\n ItemVariant::Vector4 => gpu::VertexAttribute::Vector4,\n\n\n\n ItemVariant::Texture => gpu::VertexAttribute::Uint32,\n\n\n\n _ => unreachable!(),\n\n }\n\n }).collect();\n\n\n\n let blend_enabled = color_blend.is_some() || alpha_blend.is_some();\n\n\n\n let color_blend = color_blend.unwrap_or(BlendStates{\n\n src_blend_factor: gpu::BlendFactor::One,\n\n dst_blend_factor: gpu::BlendFactor::One,\n\n blend_op: gpu::BlendOp::Add\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 94, "score": 96276.86087674028 }, { "content": " let line = match semantic {\n\n SystemSemantics::VertexId => \"uint vertex_id : SV_VertexID;\\n\"\n\n };\n\n source.push_str(line);\n\n }\n\n source.push_str(\"};\\n\\n\");\n\n\n\n source.push_str(\"VertexOutput main( VertexInput input ) {\\n\");\n\n source.push_str(\"VertexOutput output;\");\n\n } else {\n\n source.push_str(\"VertexOutput main( ) {\\n\");\n\n }\n\n source.push_str(&code);\n\n source.push_str(\"\\n}\\n\");\n\n\n\n // Compile to binary and then pass to device\n\n let binary = gpu::shaders::compile(\"vertex.hlsl\", &source, \"main\", gpu::ShaderVariant::Vertex).unwrap();\n\n let shader = device.create_shader(&binary, gpu::ShaderVariant::Vertex, \"main\".to_string()).unwrap();\n\n\n\n shader\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 95, "score": 96276.20435174182 }, { "content": "\n\n #[serde(default = \"BlendStates::default_blend_factor\")]\n\n dst_blend_factor: gpu::BlendFactor,\n\n\n\n #[serde(default = \"BlendStates::default_blend_op\")]\n\n blend_op: gpu::BlendOp,\n\n}\n\n\n\nimpl BlendStates {\n\n fn default_blend_factor() -> gpu::BlendFactor {\n\n gpu::BlendFactor::One\n\n }\n\n\n\n fn default_blend_op() -> gpu::BlendOp {\n\n gpu::BlendOp::Add\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"self::serde\")]\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 96, "score": 96276.1450790056 }, { "content": " }\n\n\n\n source.push_str(\"PixelOutput output;\"); \n\n\n\n source.push_str(&code);\n\n source.push_str(\"\\n}\\n\");\n\n\n\n // Compile to binary and then pass to device\n\n let binary = gpu::shaders::compile(\"pixel.hlsl\", &source, \"main\", gpu::ShaderVariant::Pixel).unwrap();\n\n let shader = device.create_shader(&binary, gpu::ShaderVariant::Pixel, \"main\".to_string()).unwrap();\n\n\n\n shader\n\n };\n\n\n\n // Generate the vertex shader\n\n let vertex_shader = {\n\n let VertexShader {\n\n attributes,\n\n system_semantics,\n\n exports,\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 97, "score": 96273.93239051393 }, { "content": " \n\n #[serde(default)]\n\n imports: Vec<Item>,\n\n\n\n #[serde(default)]\n\n common: String,\n\n\n\n vertex_shader: Option<VertexShader>,\n\n pixel_shader: Option<PixelShader>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\n#[serde(crate = \"self::serde\")]\n\npub struct DepthStencilStates {\n\n #[serde(default)]\n\n depth_test: bool,\n\n #[serde(default)]\n\n depth_write: bool,\n\n #[serde(default = \"DepthStencilStates::default_depth_compare\")]\n\n depth_compare: gpu::CompareOp,\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 98, "score": 96273.58121681472 }, { "content": " source.push_str(\"struct PixelOutput {\\n\");\n\n for (index, (name, format)) in exports.iter().enumerate() {\n\n let mut name_uppercase = name.clone();\n\n name_uppercase.make_ascii_uppercase();\n\n\n\n let variant = match format {\n\n gpu::Format::BGR_U8_SRGB|gpu::Format::RGBA_F16|gpu::Format::RGB_U8|gpu::Format::RGB_U8_SRGB|gpu::Format::RGBA_U8|gpu::Format::RGBA_U8_SRGB => \"float4\",\n\n _ => unreachable!(),\n\n };\n\n\n\n let line= format!(\n\n \" {} {} : SV_TARGET{};\\n\", \n\n variant, \n\n name, \n\n index\n\n );\n\n source.push_str(&line);\n\n }\n\n source.push_str(\"};\\n\\n\");\n\n\n", "file_path": "crates/newport_graphics/src/pipeline.rs", "rank": 99, "score": 96273.08522378646 } ]
Rust
common/lib/esp32-c3-dkc02-bsc/src/led.rs
SuGlider/espressif-trainings
cdfb39b7fea35c0aa78e177169d55e4a1cef3379
use std::ptr::{null, null_mut}; use esp_idf_sys::{ c_types::c_void, esp, rmt_config, rmt_config_t, rmt_config_t__bindgen_ty_1, rmt_driver_install, rmt_get_counter_clock, rmt_item32_t, rmt_item32_t__bindgen_ty_1, rmt_item32_t__bindgen_ty_1__bindgen_ty_1, rmt_mode_t_RMT_MODE_TX, rmt_translator_init, rmt_tx_config_t, rmt_wait_tx_done, rmt_write_sample, size_t, u_int8_t, }; pub use rgb::RGB8; const WS2812_T0H_NS: u32 = 350; const WS2812_T0L_NS: u32 = 1000; const WS2812_T1H_NS: u32 = 1000; const WS2812_T1L_NS: u32 = 350; #[derive(Debug, Default, Clone, Copy)] struct Ws2812Config { t0h_ticks: u32, t0l_ticks: u32, t1h_ticks: u32, t1l_ticks: u32, } const FREERTOS_HZ: u32 = 1000; static mut WS_CONFIG: Option<Ws2812Config> = None; unsafe extern "C" fn ws2812_to_rmt( src: *const c_void, dest: *mut rmt_item32_t, src_size: size_t, wanted_num: size_t, translated_size: *mut size_t, item_num: *mut size_t, ) { if src == null() || dest == null_mut() { *translated_size = 0; *item_num = 0; return; } let config = WS_CONFIG.unwrap(); let mut bit0: rmt_item32_t__bindgen_ty_1__bindgen_ty_1 = Default::default(); bit0.set_duration0(config.t0h_ticks); bit0.set_level0(1); bit0.set_duration1(config.t0l_ticks); bit0.set_level1(0); let bit0 = rmt_item32_t { __bindgen_anon_1: rmt_item32_t__bindgen_ty_1 { __bindgen_anon_1: bit0, }, }; let mut bit1: rmt_item32_t__bindgen_ty_1__bindgen_ty_1 = Default::default(); bit1.set_duration0(config.t1h_ticks); bit1.set_level0(1); bit1.set_duration1(config.t1l_ticks); bit1.set_level1(0); let bit1 = rmt_item32_t { __bindgen_anon_1: rmt_item32_t__bindgen_ty_1 { __bindgen_anon_1: bit1, }, }; let mut size: size_t = 0; let mut num = 0; let mut psrc = src as *const u_int8_t; let mut pdest: *mut rmt_item32_t = dest as _; while size < src_size && num < wanted_num { for i in 0..8 { if *psrc & (1 << (7 - i)) != 0 { *pdest = bit1; } else { *pdest = bit0; } num += 1; pdest = pdest.add(1); } size += 1; psrc = psrc.add(1); } *translated_size = size; *item_num = num; } pub struct WS2812RMT { config: rmt_config_t, } impl WS2812RMT { pub fn new() -> anyhow::Result<Self> { let rmt_tx_config = rmt_tx_config_t { carrier_freq_hz: 38000, carrier_level: 1, idle_level: 0, carrier_duty_percent: 33, loop_count: 1, carrier_en: false, loop_en: false, idle_output_en: true, }; let config = rmt_config_t { rmt_mode: rmt_mode_t_RMT_MODE_TX, channel: 0, gpio_num: 8, clk_div: 2, mem_block_num: 1, flags: 0, __bindgen_anon_1: rmt_config_t__bindgen_ty_1 { tx_config: rmt_tx_config, }, }; unsafe { esp!(rmt_config(&config))?; esp!(rmt_driver_install(config.channel, 0, 0))?; let mut rmt_clock = 0u32; esp!(rmt_get_counter_clock(config.channel, &mut rmt_clock))?; let ratio = rmt_clock as f64 / 1e9; WS_CONFIG = Some(Ws2812Config { t0h_ticks: (ratio * WS2812_T0H_NS as f64) as _, t0l_ticks: (ratio * WS2812_T0L_NS as f64) as _, t1h_ticks: (ratio * WS2812_T1H_NS as f64) as _, t1l_ticks: (ratio * WS2812_T1L_NS as f64) as _, }); esp!(rmt_translator_init(config.channel, Some(ws2812_to_rmt)))?; } Ok(Self { config }) } pub fn set_pixel(&mut self, color: RGB8) -> anyhow::Result<()> { let timeout_ms = 1; unsafe { esp!(rmt_write_sample( self.config.channel, &[color.g, color.r, color.b] as *const u8, 3, true, ))?; esp!(rmt_wait_tx_done( self.config.channel, (timeout_ms as u32 * FREERTOS_HZ) / 1000, ))?; } Ok(()) } }
use std::ptr::{null, null_mut}; use esp_idf_sys::{ c_types::c_void, esp, rmt_config, rmt_config_t, rmt_config_t__bindgen_ty_1, rmt_driver_install, rmt_get_counter_clock, rmt_item32_t, rmt_item32_t__bindgen_ty_1, rmt_item32_t__bindgen_ty_1__bindgen_ty_1, rmt_mode_t_RMT_MODE_TX, rmt_translator_init, rmt_tx_config_t, rmt_wait_tx_done, rmt_write_sample, size_t, u_int8_t, }; pub use rgb::RGB8; const WS2812_T0H_NS: u32 = 350; const WS2812_T0L_NS: u32 = 1000; const WS2812_T1H_NS: u32 = 1000; const WS2812_T1L_NS: u32 = 350; #[derive(Debug, Default, Clone, Copy)] struct Ws2812Config { t0h_ticks: u32, t0l_ticks: u32, t1h_ticks: u32, t1l_ticks: u32, } const FREERTOS_HZ: u32 = 1000; static mut WS_CONFIG: Option<Ws2812Config> = None; unsafe extern "C" fn ws2812_to_rmt( src: *const c_void, dest: *mut rmt_item32_t, src_size: size_t, wanted_num: size_t, translated_size: *mut size_t, item_num: *mut size_t, ) { if src == null() || dest == null_mut() { *translated_size = 0; *item_num = 0; return; } let config = WS_CONFIG.unwrap(); let mut bit0: rmt_item32_t__bindgen_ty_1__bindgen_ty_1 = Default::default(); bit0.set_duration0(config.t0h_ticks); bit0.set_level0(1); bit0.set_duration1(config.t0l_ticks); bit0.set_level1(0); let bit0 = rmt_item32_t { __bindgen_anon_1: rmt_item32_t__bindgen_ty_1 { __bindgen_anon_1: bit0, }, }; let mut bit1: rmt_item32_t__bindgen_ty_1__bindgen_ty_1 = Default::default(); bit1.set_duration0(config.t1h_ticks); bit1.set_level0(1); bit1.set_duration1(config.t1l_ticks); bit1.set_level1(0); let bit1 = rmt_item32_t { __bindgen_anon_1: rmt_item32_t__bindgen_ty_1 { __bindgen_anon_1: bit1, }, }; let mut size: size_t = 0; let mut num = 0; let mut psrc = src as *const u_int8_t; let mut pdest: *mut rmt_item32_t = dest as _; while size < src_size && num < wanted_num { for i in 0..8 { if *psrc & (1 << (7 - i)) != 0 { *pdest = bit1; } else { *pdest = bit0; } num += 1; pdest = pdest.add(1); } size += 1; psrc = psrc.add(1); } *translated_size = size; *item_num = num; } pub struct WS2812RMT { config: rmt_config_t, } impl WS2812RMT { pub fn new() -> anyhow::Result<Self> { let rmt_tx_config = rmt_tx_config_t { carrier_freq_hz: 38000, carrier_level: 1, idle_level: 0, carrier_duty_percent: 33, loop_count: 1, carrier_en: false, loop_en: false,
pub fn set_pixel(&mut self, color: RGB8) -> anyhow::Result<()> { let timeout_ms = 1; unsafe { esp!(rmt_write_sample( self.config.channel, &[color.g, color.r, color.b] as *const u8, 3, true, ))?; esp!(rmt_wait_tx_done( self.config.channel, (timeout_ms as u32 * FREERTOS_HZ) / 1000, ))?; } Ok(()) } }
idle_output_en: true, }; let config = rmt_config_t { rmt_mode: rmt_mode_t_RMT_MODE_TX, channel: 0, gpio_num: 8, clk_div: 2, mem_block_num: 1, flags: 0, __bindgen_anon_1: rmt_config_t__bindgen_ty_1 { tx_config: rmt_tx_config, }, }; unsafe { esp!(rmt_config(&config))?; esp!(rmt_driver_install(config.channel, 0, 0))?; let mut rmt_clock = 0u32; esp!(rmt_get_counter_clock(config.channel, &mut rmt_clock))?; let ratio = rmt_clock as f64 / 1e9; WS_CONFIG = Some(Ws2812Config { t0h_ticks: (ratio * WS2812_T0H_NS as f64) as _, t0l_ticks: (ratio * WS2812_T0L_NS as f64) as _, t1h_ticks: (ratio * WS2812_T1H_NS as f64) as _, t1l_ticks: (ratio * WS2812_T1L_NS as f64) as _, }); esp!(rmt_translator_init(config.channel, Some(ws2812_to_rmt)))?; } Ok(Self { config }) }
function_block-function_prefix_line
[ { "content": "fn process_message(message: EspMqttMessage, led: &mut WS2812RMT) {\n\n match message.details() {\n\n Complete(token) => {\n\n info!(\"{}\", message.topic(token));\n\n let message_data: &[u8] = &message.data();\n\n if let Ok(ColorData::BoardLed(color)) = ColorData::try_from(message_data) {\n\n info!(\"{}\", color);\n\n if let Err(e) = led.set_pixel(color) {\n\n error!(\"could not set board LED: {:?}\", e)\n\n };\n\n }\n\n }\n\n _ => error!(\"could not set board LED\"),\n\n }\n\n}\n", "file_path": "intro/mqtt/solution/src/main.rs", "rank": 0, "score": 190109.26135084435 }, { "content": "fn process_message(message: EspMqttMessage, led: &mut WS2812RMT) {\n\n match message.details() {\n\n Complete(token) => {\n\n info!(\"{}\", message.topic(token));\n\n let message_data: &[u8] = &message.data();\n\n if let Ok(ColorData::BoardLed(color)) = ColorData::try_from(message_data) {\n\n info!(\"{}\", color);\n\n if let Err(e) = led.set_pixel(color) {\n\n error!(\"could not set board LED: {:?}\", e)\n\n };\n\n }\n\n }\n\n _ => error!(\"could not set board LED\"),\n\n }\n\n}\n", "file_path": "intro/mqtt/solution/examples/solution1.rs", "rank": 1, "score": 168880.69723649096 }, { "content": "fn process_message(message: EspMqttMessage, inflight: &mut Vec<u8>, led: &mut WS2812RMT) {\n\n match message.details() {\n\n Complete(token) => {\n\n let topic = message.topic(token);\n\n // use `split()` to look for '{UUID}/cmd/' as leading part of `topic`\n\n // and if it matches, process the remaining part\n\n if let Some(command_str) = topic.split(&cmd_topic_fragment(UUID)).nth(1) {\n\n // try and parse the remaining path and the data sent along as `BoardLed` command\n\n let raw = RawCommandData {\n\n path: command_str,\n\n data: message.data(),\n\n };\n\n\n\n if let Ok(Command::BoardLed(color)) = Command::try_from(raw) {\n\n match led.set_pixel(color) {\n\n Err(e) => error!(\"could not set board LED: {:?}\", e),\n\n _ => {}\n\n };\n\n }\n\n }\n", "file_path": "intro/mqtt/solution/examples/solution2.rs", "rank": 2, "score": 168101.85600530458 }, { "content": "fn light(led: &mut WS2812RMT, color: RGB8) {\n\n led.set_pixel(color).unwrap();\n\n}\n", "file_path": "advanced/button-interrupt/solution/src/main_led.rs", "rank": 3, "score": 162491.07379334755 }, { "content": "/// Handles `EspMqttMessage` without MQTT hierarchy\n\n///\n\n/// Used to send ColorData(rgb)\n\npub fn color_topic(uuid: &str) -> String {\n\n format!(\"{}/color_topic\", uuid)\n\n}\n\n\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 5, "score": 115622.29118565838 }, { "content": "pub fn hello_topic(uuid: &str) -> String {\n\n format!(\"{}/hello\", uuid)\n\n}\n\n\n\npub enum Command {\n\n BoardLed(RGB8),\n\n}\n\n\n\nimpl Command {\n\n const BOARD_LED: &'static str = \"board_led\";\n\n\n\n pub fn topic(&self, uuid: &str) -> String {\n\n match self {\n\n Command::BoardLed(_) => format!(\"{}{}\", cmd_topic_fragment(uuid), Self::BOARD_LED),\n\n }\n\n }\n\n\n\n pub fn data(&self) -> &[u8] {\n\n match self {\n\n Command::BoardLed(led_data) => led_data.as_slice(),\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 6, "score": 115616.06057298803 }, { "content": "pub fn temperature_data_topic(uuid: &str) -> String {\n\n format!(\"{}/sensor_data/temperature\", uuid)\n\n}\n\n\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 7, "score": 114163.61258971355 }, { "content": "pub fn cmd_topic_fragment(uuid: &str) -> String {\n\n format!(\"{}/command/\", uuid)\n\n}\n\n\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 8, "score": 114163.61258971355 }, { "content": "fn templated(content: impl AsRef<str>) -> String {\n\n format!(\n\n r#\"\n\n<!DOCTYPE html>\n\n<html>\n\n <head>\n\n <meta charset=\"utf-8\">\n\n <title>esp-rs web server</title>\n\n </head>\n\n <body>\n\n {}\n\n </body>\n\n</html>\n\n\"#,\n\n content.as_ref()\n\n )\n\n}\n\n\n", "file_path": "intro/http-server/exercise/src/main.rs", "rank": 9, "score": 113898.63946070796 }, { "content": "fn templated(content: impl AsRef<str>) -> String {\n\n format!(\n\n r#\"\n\n<!DOCTYPE html>\n\n<html>\n\n <head>\n\n <meta charset=\"utf-8\">\n\n <title>esp-rs web server</title>\n\n </head>\n\n <body>\n\n {}\n\n </body>\n\n</html>\n\n\"#,\n\n content.as_ref()\n\n )\n\n}\n\n\n", "file_path": "intro/http-server/solution/src/main.rs", "rank": 10, "score": 113898.63946070796 }, { "content": "struct SensorConfig {\n\n dac_offset: DacOffset,\n\n clock_divider: u8,\n\n}\n\n\n\nimpl SensorConfig {\n\n fn new(dac_offset: DacOffset, clock_divider: u8) -> Self {\n\n Self {\n\n dac_offset,\n\n clock_divider,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for SensorConfig {\n\n fn default() -> Self {\n\n Self {\n\n clock_divider: 6,\n\n dac_offset: Default::default(),\n\n }\n", "file_path": "common/lib/esp32-c3-dkc02-bsc/src/temp_sensor.rs", "rank": 11, "score": 111677.46973222028 }, { "content": "fn get(url: impl AsRef<str>) -> anyhow::Result<()> {\n\n // 1. create a new EspHttpClient with SSL certificates enabled\n\n let mut client = EspHttpClient::new(&EspHttpClientConfiguration {\n\n use_global_ca_store: true,\n\n crt_bundle_attach: Some(esp_idf_sys::esp_crt_bundle_attach),\n\n\n\n ..Default::default()\n\n })?;\n\n\n\n // 2. open a GET request to `url`\n\n let request = client.get(url.as_ref())?;\n\n\n\n // 3. requests *may* send data to the server. Turn the request into a writer, specifying 0 bytes as write length\n\n // (since we don't send anything - but have to do the writer step anyway)\n\n //\n\n // https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-reference/protocols/esp_http_client.html\n\n // if this were a POST request, you'd set a write length > 0 and then writer.do_write(&some_buf);\n\n let writer = request.into_writer(0)?;\n\n\n\n // 4. turn the writer into a response and check its status. Successful http status codes are in the 200..=299 range.\n", "file_path": "intro/http-client/solution/src/main.rs", "rank": 12, "score": 110894.69555867738 }, { "content": "fn get(url: impl AsRef<str>) -> anyhow::Result<()> {\n\n // 1. Create a new EspHttpClient. (Check documentation)\n\n\n\n // 2. Open a GET request to `url`\n\n\n\n // 3. Requests *may* send data to the server. Turn the request into a writer, specifying 0 bytes as write length\n\n // (since we don't send anything - but have to do the writer step anyway)\n\n //\n\n // https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-reference/protocols/esp_http_client.html\n\n // If this were a POST request, you'd set a write length > 0 and then writer.do_write(&some_buf);\n\n\n\n // let writer = request...;\n\n\n\n // 4. Turn the writer into a response and check its status. Successful http status codes are in the 200..=299 range.\n\n\n\n // let response = writer...;\n\n // let status = ...;\n\n // println!(\"response code: {}\\n\", status);\n\n\n\n // 5. If the status is OK, read response data chunk by chunk into a buffer and print it until done.\n\n // 6. Try converting the bytes into a Rust (UTF-8) string and print it.\n\n\n\n Ok(())\n\n}\n", "file_path": "intro/http-client/exercise/src/main.rs", "rank": 13, "score": 110894.69555867738 }, { "content": "fn get(url: impl AsRef<str>) -> anyhow::Result<()> {\n\n // 1. Create a new EspHttpClient. (Check documentation) \n\n let mut client = EspHttpClient::new_default()?;\n\n \n\n // 2. Open a GET request to `url`\n\n let request = client.get(url)?;\n\n\n\n // 3. Requests *may* send data to the server. Turn the request into a writer, specifying 0 bytes as write length\n\n // (since we don't send anything - but have to do the writer step anyway)\n\n // https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-reference/protocols/esp_http_client.html\n\n // If this were a POST request, you'd set a write length > 0 and then writer.do_write(&some_buf);\n\n\n\n let writer = request.into_writer(0)?;\n\n\n\n // 4. Turn the writer into a response and check its status. \n\n // Successful http status codes are in the 200..=299 range.\n\n\n\n let response = writer.into_response()?;\n\n let status = response.status();\n\n let mut total_size = 0;\n", "file_path": "intro/http-client/solution/src/solution1.rs", "rank": 14, "score": 110894.69555867738 }, { "content": "pub fn wifi(ssid: &str, psk: &str) -> anyhow::Result<Wifi> {\n\n if ssid.len() == 0 {\n\n anyhow::bail!(\"missing WiFi name\")\n\n }\n\n if psk.len() == 0 {\n\n anyhow::bail!(\"missing WiFi password\")\n\n }\n\n let netif_stack = Arc::new(EspNetifStack::new()?);\n\n let sys_loop_stack = Arc::new(EspSysLoopStack::new()?);\n\n let default_nvs = Arc::new(EspDefaultNvs::new()?);\n\n let mut wifi = EspWifi::new(\n\n netif_stack.clone(),\n\n sys_loop_stack.clone(),\n\n default_nvs.clone(),\n\n )?;\n\n\n\n info!(\"Searching for Wifi network {}\", ssid);\n\n\n\n let ap_infos = wifi.scan()?;\n\n\n", "file_path": "common/lib/esp32-c3-dkc02-bsc/src/wifi.rs", "rank": 15, "score": 102873.67429618012 }, { "content": "fn main() -> anyhow::Result<()> {\n\n esp_idf_sys::link_patches();\n\n\n\n EspLogger::initialize_default();\n\n\n\n let app_config = CONFIG;\n\n\n\n info!(\"our UUID is:\");\n\n info!(\"{}\", UUID);\n\n\n\n let mut temp_sensor = BoardTempSensor::new_taking_peripherals();\n\n\n\n let mut led = WS2812RMT::new()?;\n\n led.set_pixel(RGB8::new(1, 1, 0))?;\n\n\n\n let _wifi = wifi(app_config.wifi_ssid, app_config.wifi_psk)?;\n\n\n\n let mqtt_config = MqttClientConfiguration::default();\n\n\n\n let broker_url = if !app_config.mqtt_user.is_empty() {\n", "file_path": "intro/mqtt/solution/src/main.rs", "rank": 16, "score": 69760.77637949983 }, { "content": "fn main() -> anyhow::Result<()> {\n\n\n\n // Setup \n\n esp_idf_sys::link_patches();\n\n\n\n EspLogger::initialize_default();\n\n\n\n let app_config = CONFIG;\n\n\n\n info!(\"our UUID is:\");\n\n info!(\"{}\", UUID);\n\n\n\n let mut temp_sensor = BoardTempSensor::new_taking_peripherals();\n\n\n\n let mut led = WS2812RMT::new()?;\n\n led.set_pixel(RGB8::new(1, 1, 0))?;\n\n\n\n let _wifi = wifi(app_config.wifi_ssid, app_config.wifi_psk)?;\n\n\n\n let broker_url = if app_config.mqtt_user != \"\" {\n", "file_path": "intro/mqtt/exercise/src/main.rs", "rank": 17, "score": 69760.77637949983 }, { "content": "/// Entry point to our application.\n\n///\n\n/// It sets up a Wi-Fi connection to the Access Point given in the\n\n/// configuration, then blinks the RGB LED green/blue.\n\n///\n\n/// If the LED goes solid red, then it was unable to connect to your Wi-Fi\n\n/// network.\n\nfn main() -> anyhow::Result<()> {\n\n use bsc::led::RGB8;\n\n\n\n esp_idf_sys::link_patches();\n\n\n\n println!(\"Hello, world!\");\n\n\n\n // Start the LED off yellow\n\n let mut led = bsc::led::WS2812RMT::new()?;\n\n led.set_pixel(RGB8::new(50, 50, 0))?;\n\n\n\n // The constant `CONFIG` is auto-generated by `toml_config`.\n\n let app_config = CONFIG;\n\n\n\n // Connect to the Wi-Fi network\n\n let _wifi = match bsc::wifi::wifi(app_config.wifi_ssid, app_config.wifi_psk) {\n\n Ok(inner) => inner,\n\n Err(err) => {\n\n // Red!\n\n led.set_pixel(RGB8::new(50, 0, 0))?;\n", "file_path": "intro/hardware-check/src/main.rs", "rank": 18, "score": 69760.77637949983 }, { "content": "fn index_html() -> String {\n\n templated(\"Hello from mcu!\")\n\n}\n\n\n", "file_path": "intro/http-server/exercise/src/main.rs", "rank": 19, "score": 69631.2335149434 }, { "content": "fn index_html() -> String {\n\n templated(\"Hello from mcu!\")\n\n}\n\n\n", "file_path": "intro/http-server/solution/src/main.rs", "rank": 20, "score": 69631.2335149434 }, { "content": "fn main() -> anyhow::Result<()> {\n\n esp_idf_sys::link_patches();\n\n\n\n let _wifi = wifi(CONFIG.wifi_ssid, CONFIG.wifi_psk)?;\n\n\n\n let server_config = Configuration::default();\n\n let mut server = EspHttpServer::new(&server_config)?;\n\n server.set_inline_handler(\"/\", Method::Get, |request, response| {\n\n let html = index_html();\n\n let mut writer = response.into_writer(request)?;\n\n writer.do_write_all(html.as_bytes())?;\n\n writer.complete()\n\n })?;\n\n\n\n let temp_sensor_main = Arc::new(Mutex::new(BoardTempSensor::new_taking_peripherals()));\n\n let temp_sensor = temp_sensor_main.clone();\n\n\n\n server.set_inline_handler(\"/temperature\", Method::Get, move |request, response| {\n\n let temp_val = temp_sensor.lock().unwrap().read_owning_peripherals();\n\n let html = temperature(temp_val);\n", "file_path": "intro/http-server/solution/src/main.rs", "rank": 21, "score": 68705.07535306274 }, { "content": "fn main() -> anyhow::Result<()> {\n\n const GPIO_NUM: i32 = 9;\n\n \n\n // Configures the button\n\n let io_conf = gpio_config_t {\n\n pin_bit_mask: 1 << GPIO_NUM,\n\n mode: gpio_mode_t_GPIO_MODE_INPUT,\n\n pull_up_en: true.into(),\n\n pull_down_en: false.into(),\n\n intr_type: gpio_int_type_t_GPIO_INTR_POSEDGE, // positive edge trigger = button down\n\n };\n\n\n\n // Queue configurations\n\n const QUEUE_TYPE_BASE: u8 = 0;\n\n const ITEM_SIZE: u32 = 0; // we're not posting any actual data, just notifying\n\n const QUEUE_SIZE: u32 = 1;\n\n\n\n unsafe {\n\n // Writes the button configuration to the registers\n\n esp!(gpio_config(&io_conf))?;\n", "file_path": "advanced/button-interrupt/solution/src/main.rs", "rank": 22, "score": 68705.07535306274 }, { "content": "fn main() -> anyhow::Result<()> {\n\n esp_idf_sys::link_patches();\n\n\n\n let _wifi = wifi(CONFIG.wifi_ssid, CONFIG.wifi_psk)?;\n\n\n\n get(\"http://neverssl.com/\")?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "intro/http-client/solution/src/solution1.rs", "rank": 23, "score": 68705.07535306274 }, { "content": "fn main() -> anyhow::Result<()> {\n\n esp_idf_sys::link_patches();\n\n\n\n let _wifi = wifi(CONFIG.wifi_ssid, CONFIG.wifi_psk)?;\n\n\n\n // TODO your code here\n\n //get(...)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "intro/http-client/exercise/src/main.rs", "rank": 24, "score": 68705.07535306274 }, { "content": "fn main() -> anyhow::Result<()> {\n\n esp_idf_sys::link_patches();\n\n\n\n let _wifi = wifi(CONFIG.wifi_ssid, CONFIG.wifi_psk)?;\n\n\n\n get(\"http://neverssl.com\")?;\n\n\n\n get(\"https://espressif.com\")?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "intro/http-client/solution/src/main.rs", "rank": 25, "score": 68705.07535306274 }, { "content": "fn main() -> anyhow::Result<()> {\n\n esp_idf_sys::link_patches();\n\n\n\n let _wifi = wifi(CONFIG.wifi_ssid, CONFIG.wifi_psk)?;\n\n\n\n let mut temp_sensor = BoardTempSensor::new_taking_peripherals();\n\n\n\n // TODO your code here:\n\n // let server_config = ...;\n\n // let mut server = EspHttpServer::new(...)?;\n\n\n\n // server.set_inline_handler(\"/\", Method::Get, |request, response| {\n\n // TODO your code here:\n\n // ...\n\n //})?;\n\n\n\n // TODO this is not true until you actually create one\n\n println!(\"server awaiting connection\");\n\n\n\n // prevent program from exiting\n\n loop {\n\n let current_temperature = temp_sensor.read_owning_peripherals();\n\n println!(\"board temperature: {:.2}\", current_temperature);\n\n sleep(Duration::from_millis(1000));\n\n }\n\n}\n\n\n", "file_path": "intro/http-server/exercise/src/main.rs", "rank": 26, "score": 68705.07535306274 }, { "content": "fn main() -> anyhow::Result<()> {\n\n link_patches();\n\n\n\n let peripherals = Peripherals::take().unwrap();\n\n\n\n let sda = peripherals.pins.gpio4;\n\n let scl = peripherals.pins.gpio5;\n\n\n\n let i2c = Master::<I2C0, _, _>::new(\n\n peripherals.i2c0,\n\n MasterPins { sda, scl },\n\n <MasterConfig as Default>::default().baudrate(400.kHz().into()),\n\n )?;\n\n\n\n let mut sensor = IMC42670P::new(i2c, SlaveAddr::AD1)?;\n\n println!(\"Sensor init\");\n\n let device_id = sensor.read_device_id_register()?;\n\n\n\n assert_eq!(device_id, 103_u16);\n\n println!(\"Hello, world, I am sensor {}\", device_id);\n\n\n\n loop {};\n\n\n\n\n\n}\n\n\n", "file_path": "advanced/i2c-driver/solution/src/main.rs", "rank": 27, "score": 68705.07535306274 }, { "content": "fn main() -> anyhow::Result<()> {\n\n const GPIO_NUM: i32 = 9;\n\n\n\n // 1. Add GPIO configuration c struct\n\n // let io_conf = gpio_config_t {\n\n // ...\n\n // };\n\n\n\n unsafe {\n\n\n\n // 2. write the GPIO configuration into the register\n\n // esp!(...)?;\n\n\n\n\n\n // 3. Install the global GPIO interrupt handler\n\n // esp!(...)?;\n\n\n\n // Queue configurations\n\n const QUEUE_TYPE_BASE: u8 = 0;\n\n const ITEM_SIZE: u32 = 0; \n", "file_path": "advanced/button-interrupt/exercise/src/main.rs", "rank": 28, "score": 68705.07535306274 }, { "content": "fn main() -> anyhow::Result<()> {\n\n link_patches();\n\n\n\n let peripherals = Peripherals::take().unwrap();\n\n\n\n let sda = peripherals.pins.gpio4;\n\n let scl = peripherals.pins.gpio5;\n\n\n\n let i2c = Master::<I2C0, _, _>::new(\n\n peripherals.i2c0,\n\n MasterPins { sda, scl },\n\n <MasterConfig as Default>::default().baudrate(400.kHz().into()),\n\n )?;\n\n\n\n\n\n let mut sht = shtcx::shtc3(i2c);\n\n let device_id = sht.device_identifier().unwrap();\n\n \n\n\n\n println!(\"Device ID: {}\", device_id);\n", "file_path": "advanced/i2c-sensor-reading/solution/src/main.rs", "rank": 29, "score": 67688.98374502246 }, { "content": "fn main() -> anyhow::Result<()> {\n\n\n\n let mut led = WS2812RMT::new()?;\n\n const GPIO_NUM: i32 = 9;\n\n \n\n // Configures the button\n\n let io_conf = gpio_config_t {\n\n pin_bit_mask: 1 << GPIO_NUM,\n\n mode: gpio_mode_t_GPIO_MODE_INPUT,\n\n pull_up_en: true.into(),\n\n pull_down_en: false.into(),\n\n intr_type: gpio_int_type_t_GPIO_INTR_POSEDGE, // positive edge trigger = button down\n\n };\n\n\n\n // Queue configurations\n\n const QUEUE_TYPE_BASE: u8 = 0;\n\n const ITEM_SIZE: u32 = 0; // we're not posting any actual data, just notifying\n\n const QUEUE_SIZE: u32 = 1;\n\n\n\n unsafe {\n", "file_path": "advanced/button-interrupt/solution/src/main_led.rs", "rank": 30, "score": 67688.98374502246 }, { "content": "fn main() -> anyhow::Result<()> {\n\n link_patches();\n\n\n\n let peripherals = Peripherals::take().unwrap();\n\n\n\n let sda = peripherals.pins.gpio4;\n\n let scl = peripherals.pins.gpio5;\n\n\n\n let i2c = Master::<I2C0, _, _>::new(\n\n peripherals.i2c0,\n\n MasterPins { sda, scl },\n\n <MasterConfig as Default>::default().baudrate(400.kHz().into()),\n\n )?;\n\n\n\n let bus = shared_bus::BusManagerSimple::new(i2c);\n\n\n\n let proxy_1 =bus.acquire_i2c();\n\n let proxy_2 =bus.acquire_i2c();\n\n\n\n let mut imu = IMC42670P::new(proxy_1, SlaveAddr::B110_1001)?;\n", "file_path": "advanced/i2c-sensor-reading/solution/src/main_both.rs", "rank": 31, "score": 67688.98374502246 }, { "content": "fn temperature(val: f32) -> String {\n\n templated(format!(\"chip temperature: {:.2}°C\", val))\n\n}\n", "file_path": "intro/http-server/exercise/src/main.rs", "rank": 32, "score": 66867.46942740623 }, { "content": "fn temperature(val: f32) -> String {\n\n templated(format!(\"chip temperature: {:.2}°C\", val))\n\n}\n", "file_path": "intro/http-server/solution/src/main.rs", "rank": 33, "score": 66867.46942740623 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n dbg!(CONFIG);\n\n let client_id = UUID;\n\n dbg!(UUID);\n\n let mut mqttoptions = MqttOptions::new(client_id, CONFIG.mqtt_host, 1883);\n\n mqttoptions.set_credentials(CONFIG.mqtt_user, CONFIG.mqtt_pass);\n\n\n\n mqttoptions.set_keep_alive(Duration::from_secs(5));\n\n\n\n let (mut client, mut connection) = Client::new(mqttoptions, 10);\n\n\n\n client.subscribe(temperature_data_topic(UUID), QoS::AtMostOnce)?;\n\n client.subscribe(hello_topic(UUID), QoS::AtMostOnce)?;\n\n thread::spawn(move || {\n\n let mut rng = rand::thread_rng();\n\n loop {\n\n let r = rng.gen();\n\n let g = rng.gen();\n\n let b = rng.gen();\n\n let color = RGB8::new(r, g, b);\n", "file_path": "intro/mqtt/host-client/src/main.rs", "rank": 34, "score": 65155.141871949476 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "intro/hardware-check/build.rs", "rank": 35, "score": 46675.93776264169 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "intro/mqtt/solution/build.rs", "rank": 36, "score": 46675.93776264169 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "intro/mqtt/exercise/build.rs", "rank": 37, "score": 46675.93776264169 }, { "content": "fn main() -> anyhow::Result<()> {\n\n esp_idf_sys::link_patches();\n\n\n\n EspLogger::initialize_default();\n\n\n\n let app_config = CONFIG;\n\n\n\n info!(\"our UUID is:\");\n\n info!(\"{}\", UUID);\n\n\n\n let mut temp_sensor = BoardTempSensor::new_taking_peripherals();\n\n\n\n let mut led = WS2812RMT::new()?;\n\n led.set_pixel(RGB8::new(1, 1, 0))?;\n\n\n\n let _wifi = wifi(app_config.wifi_ssid, app_config.wifi_psk)?;\n\n\n\n let mqtt_config = MqttClientConfiguration::default();\n\n\n\n let broker_url = if !app_config.mqtt_user.is_empty() {\n", "file_path": "intro/mqtt/solution/examples/solution1.rs", "rank": 38, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n esp_idf_sys::link_patches();\n\n\n\n EspLogger::initialize_default();\n\n\n\n let app_config = CONFIG;\n\n\n\n info!(\"our UUID is:\");\n\n info!(\"{}\", UUID);\n\n\n\n let mut temp_sensor = BoardTempSensor::new_taking_peripherals();\n\n\n\n let mut led = WS2812RMT::new()?;\n\n led.set_pixel(RGB8::new(1, 1, 0))?;\n\n\n\n let _wifi = wifi(app_config.wifi_ssid, app_config.wifi_psk)?;\n\n\n\n let mqtt_config = MqttClientConfiguration::default();\n\n\n\n let broker_url = if app_config.mqtt_user != \"\" {\n", "file_path": "intro/mqtt/solution/examples/solution2.rs", "rank": 39, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "intro/http-server/exercise/build.rs", "rank": 40, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n if let Ok(_already_exists) = File::open(\"uuid.toml\") {\n\n return Ok(());\n\n }\n\n\n\n let mut uuid_file = File::create(\"uuid.toml\")?;\n\n uuid_file.write_all(\"[get-uuid]\\n\".as_bytes())?;\n\n let uuid_val = Uuid::new_v4().to_string();\n\n uuid_file.write_fmt(format_args!(\"uuid = \\\"{}\\\"\\n\", uuid_val))?;\n\n\n\n let package_root = env!(\"CARGO_MANIFEST_DIR\");\n\n let uuid_rs = format!(\"{}/_uuid.rs\", package_root);\n\n let mut uuid_file = File::create(uuid_rs)?;\n\n uuid_file.write_fmt(format_args!(\n\n \"const UUID: &'static str = \\\"{}\\\";\\n\",\n\n uuid_val\n\n ))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "common/lib/get-uuid/build.rs", "rank": 41, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "advanced/i2c-driver/solution/build.rs", "rank": 42, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "advanced/button-interrupt/exercise/build.rs", "rank": 43, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "intro/http-client/solution/build.rs", "rank": 44, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "intro/http-server/solution/build.rs", "rank": 45, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "advanced/button-interrupt/solution/build.rs", "rank": 46, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "intro/http-client/exercise/build.rs", "rank": 47, "score": 46049.6081239984 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Necessary because of this issue: https://github.com/rust-lang/cargo/issues/9641\n\n embuild::build::CfgArgs::output_propagated(\"ESP_IDF\")?;\n\n embuild::build::LinkArgs::output_propagated(\"ESP_IDF\")\n\n}\n", "file_path": "advanced/i2c-sensor-reading/solution/build.rs", "rank": 48, "score": 45447.227361607016 }, { "content": "def get_config():\n\n d = toml.load(open(\"../../intro/mqtt/exercise/cfg.toml\", \"r\"))\n", "file_path": "extra/mqtt-python-client/mqtt.py", "rank": 49, "score": 30613.076970411064 }, { "content": "//! # Hardware Check\n\n//!\n\n//! This `libstd` program is for the ESP32-C3-DevKitC-02 board.\n\n\n\n// Logging macros\n\n\n\nuse log::*;\n\n\n\n// And alias for our Board Support Crate\n\nuse esp32_c3_dkc02_bsc as bsc;\n\n\n\n// If using the `binstart` feature of `esp-idf-sys`, always keep this module imported\n\nuse esp_idf_sys as _;\n\n\n\n/// This configuration is picked up at compile time by `build.rs` from the\n\n/// file `cfg.toml`.\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"\")]\n\n wifi_ssid: &'static str,\n", "file_path": "intro/hardware-check/src/main.rs", "rank": 50, "score": 25201.200636492158 }, { "content": "#![deny(unsafe_code)]\n\n#![no_std]\n\n\n\nuse embedded_hal::blocking::i2c;\n\n\n\n/// IMC42670P device driver.\n\n/// Datasheet: https://3cfeqx1hf82y3xcoull08ihx-wpengine.netdna-ssl.com/wp-content/uploads/2021/07/DS-000451-ICM-42670-P-v1.0.pdf\n\n/// \n\n#[derive(Debug)]\n\npub struct IMC42670P<I2C> {\n\n /// The concrete I²C device implementation.\n\n i2c: I2C,\n\n\n\n /// Device address\n\n address: SlaveAddr,\n\n}\n\n\n\n/// see Table 3.3.2\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum SlaveAddr {\n", "file_path": "common/lib/imc42670p/src/lib.rs", "rank": 51, "score": 25200.813606410113 }, { "content": " /// ADP_AD0 = 0\n\n B110_1000 = 0b110_1000,\n\n /// ADP_AD0 = 1\n\n B110_1001 = 0b110_1001,\n\n}\n\n\n\nimpl<I2C, E>IMC42670P<I2C>\n\nwhere\n\n I2C: i2c::WriteRead<Error = E> + i2c::Write<Error = E>,\n\n{\n\n /// Create a new instance of the IMC42670P.\n\n pub fn new(i2c: I2C, address: SlaveAddr) -> Result<Self, E> {\n\n\n\n let imc42670p = IMC42670P { i2c, address };\n\n\n\n Ok(imc42670p)\n\n }\n\n\n\n /// Reads device ID.\n\n /// Should return `0x67`. (if it doesn't, something is amiss)\n", "file_path": "common/lib/imc42670p/src/lib.rs", "rank": 52, "score": 25196.198279448436 }, { "content": "\n\n// imported message topics\n\nuse mqtt_messages::{cmd_topic_fragment, hello_topic, Command, RawCommandData};\n\n\n\nconst UUID: &'static str = get_uuid::uuid();\n\n\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"localhost\")]\n\n mqtt_host: &'static str,\n\n #[default(\"\")]\n\n mqtt_user: &'static str,\n\n #[default(\"\")]\n\n mqtt_pass: &'static str,\n\n #[default(\"\")]\n\n wifi_ssid: &'static str,\n\n #[default(\"\")]\n\n wifi_psk: &'static str,\n\n}\n\n\n", "file_path": "intro/mqtt/exercise/src/main.rs", "rank": 53, "score": 25196.194264677288 }, { "content": "\n\npub struct Data {\n\n pub x: i16,\n\n pub y: i16,\n\n pub z: i16,\n\n}\n\n\n\n// Table 14.1\n\n#[derive(Clone, Copy)]\n\npub enum Register {\n\n GyroDataX1 = 0x11,\n\n GyroDataX0 = 0x12,\n\n GyroDataY1 = 0x13,\n\n GyroDataY0 = 0x14,\n\n GyroDataZ1 = 0x15,\n\n GyroDataZ0 = 0x16,\n\n PwrMgmt0 = 0x1F,\n\n WhoAmI = 0x75,\n\n}\n\n\n\nimpl Register {\n\n fn address(&self) -> u8 {\n\n *self as u8\n\n }\n\n}\n", "file_path": "common/lib/imc42670p/src/lib.rs", "rank": 54, "score": 25196.048446747005 }, { "content": " format!(\n\n \"mqtt://{}:{}@{}\",\n\n app_config.mqtt_user, app_config.mqtt_pass, app_config.mqtt_host\n\n )\n\n } else {\n\n format!(\"mqtt://{}\", app_config.mqtt_host)\n\n };\n\n\n\n let mut client =\n\n EspMqttClient::new_with_callback(broker_url, &mqtt_config, move |message_event| {\n\n if let Some(Ok(Received(message))) = message_event {\n\n process_message(message, &mut led);\n\n }\n\n })?;\n\n\n\n let payload: &[u8] = &[];\n\n client.publish(hello_topic(UUID), QoS::AtLeastOnce, true, payload)?;\n\n\n\n client.subscribe(mqtt_messages::color_topic(UUID), QoS::AtLeastOnce)?;\n\n\n", "file_path": "intro/mqtt/solution/src/main.rs", "rank": 55, "score": 25195.467711429148 }, { "content": " format!(\n\n \"mqtt://{}:{}@{}\",\n\n app_config.mqtt_user, app_config.mqtt_pass, app_config.mqtt_host\n\n )\n\n } else {\n\n format!(\"mqtt://{}\", app_config.mqtt_host)\n\n };\n\n\n\n // Your Code:\n\n\n\n // 1. Create a client with default configuration and empty handler\n\n // let mut client = EspMqttClient::new_with_callback( ... )?;\n\n\n\n // 2. publish an empty hello message\n\n\n\n\n\n loop {\n\n sleep(Duration::from_secs(1));\n\n let temp = temp_sensor.read_owning_peripherals();\n\n\n\n // 3. publish CPU temperature\n\n // client.publish( ... )?;\n\n }\n\n}\n", "file_path": "intro/mqtt/exercise/src/main.rs", "rank": 56, "score": 25195.151681187115 }, { "content": "const UUID: &'static str = get_uuid::uuid();\n\n\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"localhost\")]\n\n mqtt_host: &'static str,\n\n #[default(\"\")]\n\n mqtt_user: &'static str,\n\n #[default(\"\")]\n\n mqtt_pass: &'static str,\n\n #[default(\"\")]\n\n wifi_ssid: &'static str,\n\n #[default(\"\")]\n\n wifi_psk: &'static str,\n\n}\n\n\n", "file_path": "intro/mqtt/solution/src/main.rs", "rank": 57, "score": 25194.390467163652 }, { "content": "use std::{convert::TryFrom, thread::sleep, time::Duration};\n\n\n\nuse bsc::{\n\n led::{RGB8, WS2812RMT},\n\n temp_sensor::BoardTempSensor,\n\n wifi::wifi,\n\n};\n\nuse embedded_svc::mqtt::client::{\n\n Client, Details::Complete, Event::Received, Message, Publish, QoS,\n\n};\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse esp_idf_svc::{\n\n log::EspLogger,\n\n mqtt::client::{EspMqttClient, EspMqttMessage, MqttClientConfiguration},\n\n};\n\n// If using the `binstart` feature of `esp-idf-sys`, always keep this module imported\n\nuse esp_idf_sys as _;\n\nuse log::{error, info};\n\nuse mqtt_messages::{hello_topic, ColorData};\n\n\n", "file_path": "intro/mqtt/solution/src/main.rs", "rank": 58, "score": 25193.677488494635 }, { "content": "use bsc::{\n\n led::{RGB8, WS2812RMT},\n\n temp_sensor::BoardTempSensor,\n\n wifi::wifi,\n\n};\n\nuse embedded_svc::mqtt::client::{\n\n Client,\n\n Details::{Complete, InitialChunk, SubsequentChunk},\n\n Event::{self, Received},\n\n Message, Publish, QoS,\n\n};\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse esp_idf_svc::{\n\n log::EspLogger,\n\n mqtt::client::{EspMqttClient, EspMqttMessage, MqttClientConfiguration},\n\n};\n\nuse std::{borrow::Cow, convert::TryFrom, thread::sleep, time::Duration};\n\n// If using the `binstart` feature of `esp-idf-sys`, always keep this module imported\n\nuse esp_idf_sys as _;\n\nuse log::{error, info};\n", "file_path": "intro/mqtt/exercise/src/main.rs", "rank": 59, "score": 25193.643545534393 }, { "content": " self.write_register(Register::PwrMgmt0, value)\n\n }\n\n\n\n fn write_register(&mut self, register: Register, value: u8) -> Result<(), E> {\n\n let byte = value as u8;\n\n self.i2c\n\n .write(self.address as u8, &[register.address(), byte])\n\n }\n\n\n\n fn read_register(&mut self, register: Register) -> Result<u8, E> {\n\n let mut data = [0];\n\n self.i2c\n\n .write_read(self.address as u8, &[register.address()], &mut data)?;\n\n Ok(u8::from_le_bytes(data))\n\n }\n\n}\n\n\n\npub struct PowerManagement {\n\n pub bits: u8,\n\n}\n", "file_path": "common/lib/imc42670p/src/lib.rs", "rank": 60, "score": 25190.920912544858 }, { "content": " pub fn read_device_id_register(&mut self) -> Result<u8, E> {\n\n self.read_register(Register::WhoAmI)\n\n }\n\n\n\n /// Starts gyroscope sensor in low noise mode.\n\n pub fn gyro_ln(&mut self) -> Result<(), E> {\n\n let value: u8 = 0b11 << 2;\n\n self. write_pwr_mgmt(value)\n\n }\n\n\n\n /// Reads gyroscope sensor values.\n\n /// This may need some rework.\n\n pub fn read_gyro(&mut self) -> Result<Data, E> {\n\n \n\n\n\n let x0 = self.read_register(Register::GyroDataX0)?;\n\n let x1 = self.read_register(Register::GyroDataX1)?;\n\n let y0 = self.read_register(Register::GyroDataY0)?;\n\n let y1 = self.read_register(Register::GyroDataY1)?;\n\n let z0 = self.read_register(Register::GyroDataZ0)?;\n", "file_path": "common/lib/imc42670p/src/lib.rs", "rank": 61, "score": 25190.603644351027 }, { "content": " let z1 = self.read_register(Register::GyroDataZ1)?;\n\n\n\n let gyro_data = Data {\n\n x: i16::from_be_bytes([x1, x0]),\n\n y: i16::from_be_bytes([y1, y0]),\n\n z: i16::from_be_bytes([z1, z0]),\n\n };\n\n\n\n Ok(gyro_data)\n\n \n\n }\n\n\n\n /// Read PwrMgmt0 configuration\n\n pub fn read_pwr_configuration(&mut self) -> Result<PowerManagement, E> {\n\n let bits = self.read_register(Register::PwrMgmt0)?;\n\n Ok(PowerManagement { bits })\n\n }\n\n\n\n /// Write in PwrMgmt0 Register\n\n fn write_pwr_mgmt(&mut self, value: u8) -> Result<(), E> {\n", "file_path": "common/lib/imc42670p/src/lib.rs", "rank": 62, "score": 25189.41511670378 }, { "content": " anyhow::bail!(\"could not connect to Wi-Fi network: {:?}\", err)\n\n }\n\n };\n\n\n\n loop {\n\n // Blue!\n\n led.set_pixel(RGB8::new(0, 0, 50))?;\n\n // Wait...\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n info!(\"Hello, world!\");\n\n\n\n // Green!\n\n led.set_pixel(RGB8::new(0, 50, 0))?;\n\n // Wait...\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n }\n\n}\n", "file_path": "intro/hardware-check/src/main.rs", "rank": 63, "score": 25187.304934115517 }, { "content": " #[default(\"\")]\n\n wifi_psk: &'static str,\n\n}\n\n\n\n/// Entry point to our application.\n\n///\n\n/// It sets up a Wi-Fi connection to the Access Point given in the\n\n/// configuration, then blinks the RGB LED green/blue.\n\n///\n\n/// If the LED goes solid red, then it was unable to connect to your Wi-Fi\n\n/// network.\n", "file_path": "intro/hardware-check/src/main.rs", "rank": 64, "score": 25186.75610635219 }, { "content": " loop {\n\n sleep(Duration::from_secs(1));\n\n let temp = temp_sensor.read_owning_peripherals();\n\n client.publish(\n\n mqtt_messages::temperature_data_topic(UUID),\n\n QoS::AtLeastOnce,\n\n false,\n\n &temp.to_be_bytes() as &[u8],\n\n )?;\n\n }\n\n}\n\n\n", "file_path": "intro/mqtt/solution/src/main.rs", "rank": 65, "score": 25183.742247217444 }, { "content": "// reference:\n\n// https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-reference/system/freertos.html\n\nuse std::ptr;\n\n\n\n// If using the `binstart` feature of `esp-idf-sys`, always keep this module imported (`self as _`)\n\nuse esp_idf_sys::{\n\n self as _, c_types::c_void, esp, gpio_config, gpio_config_t, gpio_install_isr_service,\n\n gpio_int_type_t_GPIO_INTR_POSEDGE, gpio_isr_handler_add, gpio_mode_t_GPIO_MODE_INPUT,\n\n xQueueGenericCreate, xQueueGiveFromISR, xQueueReceive, QueueHandle_t,ESP_INTR_FLAG_IRAM, esp_random,\n\n};\n\n\n\n// These imports are needed for part 2.\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse bsc::led::{RGB8, WS2812RMT};\n\n\n\n// 4. Create a `static mut` that holds the queue handle.\n\nstatic mut EVENT_QUEUE: Option<QueueHandle_t> = None;\n\n\n\n// 6. Define what the interrupt handler does, once the button is pushed. Button_interrupt sends a message into the queue. \n\n#[link_section = \".iram0.text\"]\n\nunsafe extern \"C\" fn button_interrupt(_: *mut c_void) {\n\n xQueueGiveFromISR(EVENT_QUEUE.unwrap(), std::ptr::null_mut());\n\n}\n\n\n", "file_path": "advanced/button-interrupt/exercise/src/main.rs", "rank": 66, "score": 24698.96542835892 }, { "content": "// reference:\n\n// https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-reference/system/freertos.html\n\n\n\nuse std::ptr;\n\n\n\n// If using the `binstart` feature of `esp-idf-sys`, always keep this module imported (`self as _`)\n\nuse esp_idf_sys::{\n\n self as _, c_types::c_void, esp, gpio_config, gpio_config_t, gpio_install_isr_service,\n\n gpio_int_type_t_GPIO_INTR_POSEDGE, gpio_isr_handler_add, gpio_mode_t_GPIO_MODE_INPUT,\n\n xQueueGenericCreate, xQueueGiveFromISR, xQueueReceive, QueueHandle_t,ESP_INTR_FLAG_IRAM,\n\n};\n\n\n\n// This `static mut` holds the queue handle we are going to get from `xQueueGenericCreate`.\n\n// This is unsafe, but we are careful not to enable our GPIO interrupt handler until after this value has been initialised, and then never modify it again\n\nstatic mut EVENT_QUEUE: Option<QueueHandle_t> = None;\n\n\n\n#[link_section = \".iram0.text\"]\n\nunsafe extern \"C\" fn button_interrupt(_: *mut c_void) {\n\n xQueueGiveFromISR(EVENT_QUEUE.unwrap(), std::ptr::null_mut());\n\n}\n\n\n", "file_path": "advanced/button-interrupt/solution/src/main.rs", "rank": 67, "score": 24698.277427282497 }, { "content": "#![deny(unsafe_code)]\n\n#![no_std]\n\n\n\nuse embedded_hal::blocking::i2c;\n\n\n\n/// IMC42670P device driver.\n\n/// Datasheet: https://3cfeqx1hf82y3xcoull08ihx-wpengine.netdna-ssl.com/wp-content/uploads/2021/07/DS-000451-ICM-42670-P-v1.0.pdf\n\n#[derive(Debug)]\n\npub struct IMC42670P<I2C> {\n\n /// The concrete I²C device implementation.\n\n i2c: I2C,\n\n\n\n /// Device address\n\n address: SlaveAddr,\n\n}\n\n\n\n// see Table 3.3.2\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum SlaveAddr {\n\n\n", "file_path": "advanced/i2c-driver/solution/src/imc42670p.rs", "rank": 68, "score": 24690.04474956514 }, { "content": "\n\n \n\n // Installs the generic GPIO interrupt handler\n\n esp!(gpio_install_isr_service(ESP_INTR_FLAG_IRAM as i32))?;\n\n\n\n // Instantiates the event queue\n\n EVENT_QUEUE = Some(xQueueGenericCreate(QUEUE_SIZE, ITEM_SIZE, QUEUE_TYPE_BASE));\n\n\n\n // Registers our function with the generic GPIO interrupt handler we installed earlier.\n\n esp!(gpio_isr_handler_add(\n\n GPIO_NUM,\n\n Some(button_interrupt),\n\n std::ptr::null_mut()\n\n ))?;\n\n }\n\n\n\n // Reads the queue in a loop.\n\n loop {\n\n unsafe {\n\n // maximum delay\n", "file_path": "advanced/button-interrupt/solution/src/main.rs", "rank": 69, "score": 24690.00263576891 }, { "content": "use core::str;\n\n\n\nuse bsc::wifi::wifi;\n\nuse embedded_svc::{\n\n http::{\n\n client::{Client, Response, Request, RequestWrite},\n\n Status,\n\n }, io::Read,\n\n};\n\n\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse esp_idf_svc::http::client::{EspHttpClient};\n\nuse esp_idf_sys as _; \n\n\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"\")]\n\n wifi_ssid: &'static str,\n\n #[default(\"\")]\n\n wifi_psk: &'static str,\n\n}\n\n\n", "file_path": "intro/http-client/solution/src/solution1.rs", "rank": 70, "score": 24689.4411620663 }, { "content": "use core::str;\n\n\n\nuse bsc::wifi::wifi;\n\nuse embedded_svc::{\n\n http::{\n\n client::{Client, Request, RequestWrite, Response},\n\n Status,\n\n },\n\n io::Read,\n\n};\n\nuse esp_idf_svc::http::client::{EspHttpClient, EspHttpClientConfiguration};\n\n\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse esp_idf_sys as _; // If using the `binstart` feature of `esp-idf-sys`, always keep this module imported\n\n\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"\")]\n\n wifi_ssid: &'static str,\n\n #[default(\"\")]\n\n wifi_psk: &'static str,\n\n}\n\n\n", "file_path": "intro/http-client/solution/src/main.rs", "rank": 71, "score": 24689.22784953856 }, { "content": "use core::str;\n\n\n\nuse bsc::wifi::wifi;\n\nuse embedded_svc::{\n\n http::{\n\n client::{Client, Request, RequestWrite, Response},\n\n Headers, Status,\n\n },\n\n io::Read,\n\n};\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse esp_idf_svc::http::client::{EspHttpClient, EspHttpClientConfiguration};\n\nuse esp_idf_sys as _; // If using the `binstart` feature of `esp-idf-sys`, always keep this module imported\n\n\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"\")]\n\n wifi_ssid: &'static str,\n\n #[default(\"\")]\n\n wifi_psk: &'static str,\n\n}\n\n\n", "file_path": "intro/http-client/exercise/src/main.rs", "rank": 72, "score": 24689.181283114893 }, { "content": "use core::str;\n\nuse std::{\n\n sync::{Arc, Mutex},\n\n thread::sleep,\n\n time::Duration,\n\n};\n\n\n\nuse bsc::{temp_sensor::BoardTempSensor, wifi::wifi};\n\nuse embedded_svc::{\n\n http::{\n\n server::{registry::Registry, Response, ResponseWrite},\n\n Method,\n\n },\n\n io::Write,\n\n};\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse esp_idf_svc::http::server::{Configuration, EspHttpServer};\n\nuse esp_idf_sys as _; // If using the `binstart` feature of `esp-idf-sys`, always keep this module imported\n\n\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"\")]\n\n wifi_ssid: &'static str,\n\n #[default(\"\")]\n\n wifi_psk: &'static str,\n\n}\n\n\n", "file_path": "intro/http-server/exercise/src/main.rs", "rank": 73, "score": 24688.636280262683 }, { "content": "use core::str;\n\nuse std::{\n\n sync::{Arc, Mutex},\n\n thread::sleep,\n\n time::Duration,\n\n};\n\n\n\nuse bsc::{temp_sensor::BoardTempSensor, wifi::wifi};\n\nuse embedded_svc::{\n\n http::{\n\n server::{registry::Registry, Response, ResponseWrite},\n\n Method,\n\n },\n\n io::Write,\n\n};\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse esp_idf_svc::http::server::{Configuration, EspHttpServer};\n\nuse esp_idf_sys as _; // If using the `binstart` feature of `esp-idf-sys`, always keep this module imported\n\n\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"\")]\n\n wifi_ssid: &'static str,\n\n #[default(\"\")]\n\n wifi_psk: &'static str,\n\n}\n\n\n", "file_path": "intro/http-server/solution/src/main.rs", "rank": 74, "score": 24688.636280262683 }, { "content": " AD0 = 0b110_1000,\n\n AD1 = 0b110_1001,\n\n}\n\n\n\nimpl<I2C, E>IMC42670P<I2C>\n\nwhere\n\n I2C: i2c::WriteRead<Error = E> + i2c::Write<Error = E>,\n\n{\n\n /// Creates a new instance of the sensor, taking ownership of the i2c peripheral\n\n pub fn new(i2c: I2C, address: SlaveAddr) -> Result<Self, E> {\n\n\n\n let imc42670p = IMC42670P { i2c, address };\n\n\n\n Ok(imc42670p)\n\n }\n\n\n\n /// Should return `0x67 (if it doesn't, something is amiss)\n\n /// Public method that can be accessed from outside this file\n\n pub fn read_device_id_register(&mut self) -> Result<u16, E> {\n\n self.read_register(Register::WhoAmI)\n", "file_path": "advanced/i2c-driver/solution/src/imc42670p.rs", "rank": 75, "score": 24687.837785333497 }, { "content": "// Table 14.1\n\n#[derive(Clone, Copy)]\n\npub enum Register {\n\n WhoAmI = 0x75,\n\n}\n\n\n\nimpl Register {\n\n fn address(&self) -> u8 {\n\n *self as u8\n\n }\n\n}\n", "file_path": "advanced/i2c-driver/solution/src/imc42670p.rs", "rank": 76, "score": 24686.736580206198 }, { "content": "use mqtt_messages::{hello_topic, temperature_data_topic, ColorData, Command, RGB8};\n\nuse rand::Rng;\n\nuse rumqttc::{Client, MqttOptions, Packet, QoS};\n\nuse std::error::Error;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nconst UUID: &'static str = get_uuid::uuid();\n\n\n\n#[derive(Debug)]\n\n#[toml_cfg::toml_config]\n\npub struct Config {\n\n #[default(\"localhost\")]\n\n mqtt_host: &'static str,\n\n #[default(\"\")]\n\n mqtt_user: &'static str,\n\n #[default(\"\")]\n\n mqtt_pass: &'static str,\n\n}\n\n\n", "file_path": "intro/mqtt/host-client/src/main.rs", "rank": 77, "score": 24686.21282124245 }, { "content": " const QUEUE_SIZE: u32 = 1;\n\n\n\n // 5. Create an event queue\n\n // EVENT_QUEUE = Some(...);\n\n \n\n \n\n // 7. Add the button GPIO and the function to the interrupt handler\n\n // esp!(...)?;\n\n }\n\n\n\n // The loop in main waits until it gets a message through the rx (\"receiver\") part of the channel\n\n loop {\n\n unsafe {\n\n // maximum delay\n\n const QUEUE_WAIT_TICKS: u32 = 1000;;\n\n\n\n // 8. Receive the event from the queue.\n\n // let res = ...;\n\n \n\n // If the event has the value 0, nothing happens. if it has a different value, the button was pressed. \n\n match res {\n\n 1 => println!(\"button pressed!\"),\n\n _ => {},\n\n };\n\n }\n\n }\n\n}\n", "file_path": "advanced/button-interrupt/exercise/src/main.rs", "rank": 78, "score": 24684.35034697045 }, { "content": "use anyhow;\n\nuse esp_idf_hal::{\n\n i2c::{config::MasterConfig, Master, MasterPins, I2C0},\n\n peripherals::Peripherals,\n\n prelude::*,\n\n};\n\nuse esp_idf_sys::*;\n\nuse i2c_driver_exercise::imc42670p::{IMC42670P, SlaveAddr};\n\n\n\n// Dont change this file. Work in the lib.rs and modify it so main.rs runs.\n\n\n", "file_path": "advanced/i2c-driver/solution/src/main.rs", "rank": 79, "score": 24683.441434754284 }, { "content": "use std::borrow::{Borrow, Cow};\n\n\n\nuse rgb::ComponentSlice;\n\npub use rgb::RGB8;\n\n\n\n/// Handles `EspMqttMessage` with MQTT hierarchy\n\n///\n\n/// Can be used to send ColorData(rgb) with `Command`\n\n\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 80, "score": 24682.688926549177 }, { "content": " const QUEUE_WAIT_TICKS: u32 = 1000;\n\n\n\n // Reads the event item out of the queue\n\n let res = xQueueReceive(EVENT_QUEUE.unwrap(), ptr::null_mut(), QUEUE_WAIT_TICKS);\n\n \n\n // If the event has the value 0, nothing happens. if it has a different value, the button was pressed. \n\n match res {\n\n 1 => println!(\"button pressed!\"),\n\n _ => {},\n\n };\n\n }\n\n }\n\n}\n", "file_path": "advanced/button-interrupt/solution/src/main.rs", "rank": 81, "score": 24681.84588828389 }, { "content": "\n\n println!(\"response code: {}\\n\", status);\n\n\n\n match status {\n\n 200..=299 => {\n\n // 5. if the status is OK, read response data chunk by chunk into a buffer and print it until done\n\n let mut buf = [0_u8;256];\n\n let mut reader = response.reader();\n\n loop {\n\n if let Ok(size) = Read::do_read(&mut reader, &mut buf){\n\n if size == 0 { break; }\n\n total_size += size;\n\n // 6. try converting the bytes into a Rust (UTF-8) string and print it\n\n let response_text = str::from_utf8(&buf[..size])?;\n\n println!(\"{}\", response_text);\n\n }\n\n } \n\n }\n\n _ => anyhow::bail!(\"unexpected response code: {}\", status),\n\n }\n\n \n\n \n\n Ok(())\n\n}\n", "file_path": "intro/http-client/solution/src/solution1.rs", "rank": 82, "score": 24680.97021826228 }, { "content": " let response = writer.into_response()?;\n\n let status = response.status();\n\n println!(\"response code: {}\\n\", status);\n\n match status {\n\n 200..=299 => {\n\n // 5. if the status is OK, read response data chunk by chunk into a buffer and print it until done\n\n let mut buf = [0u8; 256];\n\n let mut total_size = 0;\n\n let mut reader = response.reader();\n\n loop {\n\n let size = reader.do_read(&mut buf)?;\n\n if size == 0 {\n\n break;\n\n }\n\n total_size += size;\n\n // strictly speaking, we should check the response's encoding...\n\n\n\n // 6. try converting the bytes into a Rust (UTF-8) string and print it\n\n let response_text = str::from_utf8(&buf)?;\n\n print!(\"{}\", response_text);\n", "file_path": "intro/http-client/solution/src/main.rs", "rank": 83, "score": 24680.922792956997 }, { "content": "}\n\n\n\n/// Handles `.data()` from EspMqttMessage\n\n///\n\n// The message is a slice containing 3 values, and is cast into a ColorData(rgb)\n\nimpl<'a> TryFrom<&[u8]> for ColorData {\n\n type Error = ConvertError;\n\n\n\n fn try_from(message: &[u8]) -> Result<Self, Self::Error> {\n\n if message.len() == 3 {\n\n let rgb = RGB8::new(message[0], message[1], message[2]);\n\n Ok(ColorData::BoardLed(rgb))\n\n } else {\n\n Err(ConvertError::Length(message.len()))\n\n }\n\n }\n\n}\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 84, "score": 24680.905866655132 }, { "content": "pub enum ConvertError {\n\n Length(usize),\n\n InvalidPath,\n\n}\n\n\n\nimpl<'a> TryFrom<RawCommandData<'a>> for Command {\n\n type Error = ConvertError;\n\n\n\n fn try_from(value: RawCommandData) -> Result<Self, Self::Error> {\n\n if value.path == Command::BOARD_LED {\n\n let data: &[u8] = value.data.borrow();\n\n let data: [u8; 3] = data\n\n .try_into()\n\n .map_err(|_| ConvertError::Length(data.len()))?;\n\n let rgb = RGB8::new(data[0], data[1], data[2]);\n\n Ok(Command::BoardLed(rgb))\n\n } else {\n\n Err(ConvertError::InvalidPath)\n\n }\n\n }\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 85, "score": 24680.468000644327 }, { "content": "#![deny(unsafe_code)]\n\n#![no_std]\n\n\n\npub mod imc42670p;", "file_path": "advanced/i2c-driver/solution/src/lib.rs", "rank": 86, "score": 24680.01531381742 }, { "content": " }\n\n }\n\n}\n\n\n\n/// `ColorData` is a simplified `Command`\n\n\n\npub enum ColorData {\n\n BoardLed(RGB8),\n\n}\n\nimpl ColorData {\n\n pub fn topic(&self, uuid: &str) -> String {\n\n match self {\n\n ColorData::BoardLed(_) => color_topic(uuid),\n\n }\n\n }\n\n pub fn data(&self) -> &[u8] {\n\n match self {\n\n ColorData::BoardLed(led_data) => led_data.as_slice(),\n\n }\n\n }\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 87, "score": 24679.710475768832 }, { "content": "include!(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/_uuid.rs\"));\n\n\n\npub const fn uuid() -> &'static str {\n\n UUID\n\n}\n", "file_path": "common/lib/get-uuid/src/lib.rs", "rank": 88, "score": 24679.689308595807 }, { "content": "}\n\n\n\npub struct RawCommandData<'a> {\n\n pub path: &'a str,\n\n pub data: Cow<'a, [u8]>,\n\n}\n\n\n\nimpl<'a> TryFrom<Command> for RawCommandData<'a> {\n\n type Error = ();\n\n\n\n fn try_from(value: Command) -> Result<Self, Self::Error> {\n\n match value {\n\n Command::BoardLed(rgb) => Ok(RawCommandData {\n\n data: Cow::Owned(vec![rgb.r, rgb.g, rgb.b]),\n\n path: Command::BOARD_LED,\n\n }),\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/lib/mqtt-messages/src/lib.rs", "rank": 89, "score": 24679.54046852114 }, { "content": " }\n\n\n\n /// Writes into a register\n\n /// This method is not public as it is only needed inside this file\n\n fn write_register(&mut self, register: Register, value: u8) -> Result<(), E> {\n\n let byte = value as u8;\n\n self.i2c\n\n .write(self.address as u8, &[register.address(), byte])\n\n }\n\n\n\n /// Reads a register using a `write_read` method.\n\n /// this method is not public as it is only needed inside this file\n\n fn read_register(&mut self, register: Register) -> Result<u16, E> {\n\n let mut data = [0; 2];\n\n self.i2c\n\n .write_read(self.address as u8, &[register.address()], &mut data)?;\n\n Ok(u16::from_le_bytes(data))\n\n }\n\n}\n\n\n", "file_path": "advanced/i2c-driver/solution/src/imc42670p.rs", "rank": 90, "score": 24679.077783886092 }, { "content": " println!(\"setting new color: {}\", color);\n\n let color = ColorData::BoardLed(color);\n\n //let command = Command::BoardLed(color);\n\n client\n\n .publish(\n\n color.topic(UUID),\n\n //command.topic(UUID),\n\n QoS::AtLeastOnce,\n\n false,\n\n color.data(),\n\n //command.data().clone(),\n\n )\n\n .unwrap();\n\n thread::sleep(Duration::from_secs(1));\n\n }\n\n });\n\n\n\n // Iterate to poll the eventloop for connection progress\n\n for (_, notification) in connection.iter().enumerate() {\n\n // if you want to see *everything*, uncomment:\n", "file_path": "intro/mqtt/host-client/src/main.rs", "rank": 91, "score": 24678.282658018892 }, { "content": " }\n\n\n\n println!(\"\\n\\nDone! read {} bytes:\", total_size);\n\n }\n\n _ => anyhow::bail!(\"unexpected response code: {}\", status),\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "intro/http-client/solution/src/main.rs", "rank": 92, "score": 24676.400673489476 }, { "content": " let mut writer = response.into_writer(request)?;\n\n writer.do_write_all(html.as_bytes())?;\n\n writer.complete()\n\n })?;\n\n\n\n println!(\"server awaiting connection\");\n\n\n\n loop {\n\n sleep(Duration::from_millis(1000));\n\n }\n\n}\n\n\n", "file_path": "intro/http-server/solution/src/main.rs", "rank": 93, "score": 24676.121772385686 }, { "content": " // println!(\"Notification = {:#?}\", notification);\n\n\n\n if let Ok(rumqttc::Event::Incoming(Packet::Publish(publish_data))) = notification {\n\n if publish_data.topic == hello_topic(UUID) {\n\n println!(\"board says hi!\");\n\n }\n\n\n\n if publish_data.topic == temperature_data_topic(UUID) {\n\n let data: &[u8] = &publish_data.payload;\n\n let data: Result<[u8; 4], _> = data.try_into();\n\n\n\n if let Ok(data) = data {\n\n let temp: f32 = f32::from_be_bytes(data);\n\n println!(\"board temperature: {:.2}°C\", temp)\n\n }\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "intro/mqtt/host-client/src/main.rs", "rank": 94, "score": 24675.543834406562 }, { "content": "// reference:\n\n// https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-reference/system/freertos.html\n\n\n\nuse std::ptr;\n\nuse esp32_c3_dkc02_bsc as bsc;\n\nuse bsc::led::{RGB8, WS2812RMT};\n\n\n\n// If using the `binstart` feature of `esp-idf-sys`, always keep this module imported (`self as _`)\n\nuse esp_idf_sys::{\n\n self as _, c_types::c_void, esp, gpio_config, gpio_config_t, gpio_install_isr_service,\n\n gpio_int_type_t_GPIO_INTR_POSEDGE, gpio_isr_handler_add, gpio_mode_t_GPIO_MODE_INPUT,\n\n xQueueGenericCreate, xQueueGiveFromISR, xQueueReceive, QueueHandle_t,ESP_INTR_FLAG_IRAM, esp_random,\n\n};\n\n\n\n// This `static mut` holds the queue handle we are going to get from `xQueueGenericCreate`.\n\n// This is unsafe, but we are careful not to enable our GPIO interrupt handler until after this value has been initialised, and then never modify it again\n\nstatic mut EVENT_QUEUE: Option<QueueHandle_t> = None;\n\n\n\n#[link_section = \".iram0.text\"]\n\nunsafe extern \"C\" fn button_interrupt(_: *mut c_void) {\n\n xQueueGiveFromISR(EVENT_QUEUE.unwrap(), std::ptr::null_mut());\n\n}\n\n\n", "file_path": "advanced/button-interrupt/solution/src/main_led.rs", "rank": 95, "score": 24209.33609376934 }, { "content": " // Writes the button configuration to the registers\n\n esp!(gpio_config(&io_conf))?;\n\n\n\n \n\n // Installs the generic GPIO interrupt handler\n\n esp!(gpio_install_isr_service(ESP_INTR_FLAG_IRAM as i32))?;\n\n\n\n // Instantiates the event queue\n\n EVENT_QUEUE = Some(xQueueGenericCreate(QUEUE_SIZE, ITEM_SIZE, QUEUE_TYPE_BASE));\n\n\n\n // Registers our function with the generic GPIO interrupt handler we installed earlier.\n\n esp!(gpio_isr_handler_add(\n\n GPIO_NUM,\n\n Some(button_interrupt),\n\n std::ptr::null_mut()\n\n ))?;\n\n }\n\n\n\n // Reads the queue in a loop.\n\n loop {\n", "file_path": "advanced/button-interrupt/solution/src/main_led.rs", "rank": 96, "score": 24199.20681487025 }, { "content": " \n\n \n\n unsafe {\n\n // maximum delay\n\n const QUEUE_WAIT_TICKS: u32 = 1000;\n\n\n\n // Reads the event item out of the queue\n\n let res = xQueueReceive(EVENT_QUEUE.unwrap(), ptr::null_mut(), QUEUE_WAIT_TICKS);\n\n \n\n // If the event has the value 0, nothing happens. if it has a different value, the button was pressed. \n\n // If the button was pressed, a function that changes the state of the LED is called. \n\n \n\n match res {\n\n 1 => {\n\n // Generates random rgb values\n\n let r = esp_random() as u8;\n\n let g = esp_random() as u8;\n\n let b = esp_random() as u8;\n\n\n\n let color = RGB8::new(r, g, b);\n", "file_path": "advanced/button-interrupt/solution/src/main_led.rs", "rank": 97, "score": 24198.831354877304 }, { "content": "use anyhow;\n\nuse embedded_hal::blocking::delay::DelayMs;\n\nuse esp_idf_hal::{\n\n delay::FreeRtos,\n\n i2c::{config::MasterConfig, Master, MasterPins, I2C0},\n\n peripherals::Peripherals,\n\n prelude::*,\n\n};\n\nuse esp_idf_sys::*;\n\nuse imc42670p::{IMC42670P, SlaveAddr};\n\n\n\nuse shtcx::{self, PowerMode};\n\n\n\nuse shared_bus;\n\n\n\n// goals of this exercise:\n\n// instantiate i2c peripheral\n\n// implement one sensor, print sensor values\n\n// implement second sensor on same bus to solve an ownership problem\n\n\n", "file_path": "advanced/i2c-sensor-reading/solution/src/main.rs", "rank": 98, "score": 24192.584560083324 }, { "content": "use anyhow;\n\nuse embedded_hal::blocking::delay::DelayMs;\n\nuse esp_idf_hal::{\n\n delay::FreeRtos,\n\n i2c::{config::MasterConfig, Master, MasterPins, I2C0},\n\n peripherals::Peripherals,\n\n prelude::*,\n\n};\n\nuse esp_idf_sys::*;\n\nuse imc42670p::{IMC42670P, SlaveAddr};\n\n\n\nuse shtcx::{self, PowerMode};\n\n\n\nuse shared_bus;\n\n\n\n// goals of this exercise:\n\n// instantiate i2c peripheral\n\n// implement one sensor, print sensor values\n\n// implement second sensor on same bus to solve an ownership problem\n\n\n", "file_path": "advanced/i2c-sensor-reading/solution/src/main_both.rs", "rank": 99, "score": 24192.584560083324 } ]